You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by Mohammad Tariq <do...@gmail.com> on 2016/07/28 11:45:09 UTC

Is spark-1.6.1-bin-2.6.0 compatible with hive-1.1.0-cdh5.7.1

Could anyone please help me with this? I have been using the same version
of Spark with CDH-5.4.5 successfully so far. However after a recent CDH
upgrade I'm not able to run the same Spark SQL module against
hive-1.1.0-cdh5.7.1.

When I try to run my program Spark tries to connect to local derby Hive
metastore instead of the configured MySQL metastore. I have all the
required jars along with hive-site.xml in place though. There is no change
in the setup.

This is the exception which I'm getting :

[2016-07-28 04:36:01,207] INFO Initializing execution hive, version 1.2.1
(org.apache.spark.sql.hive.HiveContext:58)

[2016-07-28 04:36:01,231] INFO Inspected Hadoop version: 2.6.0-cdh5.7.1
(org.apache.spark.sql.hive.client.ClientWrapper:58)

[2016-07-28 04:36:01,232] INFO Loaded
org.apache.hadoop.hive.shims.Hadoop23Shims for Hadoop version
2.6.0-cdh5.7.1 (org.apache.spark.sql.hive.client.ClientWrapper:58)

[2016-07-28 04:36:01,520] INFO 0: Opening raw store with implemenation
class:org.apache.hadoop.hive.metastore.ObjectStore
(org.apache.hadoop.hive.metastore.HiveMetaStore:638)

[2016-07-28 04:36:01,548] INFO ObjectStore, initialize called
(org.apache.hadoop.hive.metastore.ObjectStore:332)

[2016-07-28 04:36:01,814] INFO Property
hive.metastore.integral.jdo.pushdown unknown - will be ignored
(DataNucleus.Persistence:77)

[2016-07-28 04:36:01,815] INFO Property datanucleus.cache.level2 unknown -
will be ignored (DataNucleus.Persistence:77)

[2016-07-28 04:36:02,417] WARN Retrying creating default database after
error: Unexpected exception caught.
(org.apache.hadoop.hive.metastore.HiveMetaStore:671)

javax.jdo.JDOFatalInternalException: Unexpected exception caught.

at
javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193)

at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)

at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)

at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:410)

at
org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:439)

at
org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:334)

at
org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:290)

at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)

at
org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)

at
org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)

at
org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)

at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:642)

at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:620)

at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:669)

at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:478)

at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78)

at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84)

at
org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5903)

at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:198)

at
org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)

at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)

at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)

at java.lang.reflect.Constructor.newInstance(Constructor.java:423)

at
org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1491)

at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:67)

at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:82)

at
org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2935)

at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2954)

at
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:513)

at
org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:204)

at
org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:238)

at
org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:218)

at
org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:208)

at
org.apache.spark.sql.hive.HiveContext.functionRegistry$lzycompute(HiveContext.scala:462)

at
org.apache.spark.sql.hive.HiveContext.functionRegistry(HiveContext.scala:461)

at org.apache.spark.sql.UDFRegistration.<init>(UDFRegistration.scala:40)

at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:330)

at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:97)

at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101)

at
com.intuit.blink.replicator.bootstrap.Bootstrap$.getHiveContext(Bootstrap.scala:47)

at
com.intuit.blink.replicator.bootstrap.Bootstrap$.main(Bootstrap.scala:125)

at com.intuit.blink.replicator.bootstrap.Bootstrap.main(Bootstrap.scala)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:498)

at
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)

at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)

at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)

at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)

at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

NestedThrowablesStackTrace:

java.lang.reflect.InvocationTargetException

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:498)

at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)

at java.security.AccessController.doPrivileged(Native Method)

at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)

at
javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)

at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)

at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)

at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:410)

at
org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:439)

at
org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:334)

at
org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:290)

at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)

at
org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)

at
org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)

at
org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)

at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:642)

at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:620)

at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:669)

at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:478)

at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78)

at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84)

at
org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5903)

at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:198)

at
org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)

at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)

at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)

at java.lang.reflect.Constructor.newInstance(Constructor.java:423)

at
org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1491)

at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:67)

at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:82)

at
org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2935)

at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2954)

at
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:513)

at
org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:204)

at
org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:238)

at
org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:218)

at
org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:208)

at
org.apache.spark.sql.hive.HiveContext.functionRegistry$lzycompute(HiveContext.scala:462)

at
org.apache.spark.sql.hive.HiveContext.functionRegistry(HiveContext.scala:461)

at org.apache.spark.sql.UDFRegistration.<init>(UDFRegistration.scala:40)

at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:330)

at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:97)

at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101)

at
com.intuit.blink.replicator.bootstrap.Bootstrap$.getHiveContext(Bootstrap.scala:47)

at
com.intuit.blink.replicator.bootstrap.Bootstrap$.main(Bootstrap.scala:125)

at com.intuit.blink.replicator.bootstrap.Bootstrap.main(Bootstrap.scala)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:498)

at
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)

at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)

at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)

at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)

at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

Caused by: java.lang.NoClassDefFoundError: Could not initialize class
org.apache.derby.jdbc.AutoloadedDriver40

at java.lang.Class.forName0(Native Method)

at java.lang.Class.forName(Class.java:348)

at java.sql.DriverManager.isDriverAllowed(DriverManager.java:556)

at java.sql.DriverManager.getConnection(DriverManager.java:661)

at java.sql.DriverManager.getConnection(DriverManager.java:208)

at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:349)

at com.jolbox.bonecp.BoneCP.<init>(BoneCP.java:416)

at
com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)

at
org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:501)

at
org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:298)

at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)

at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)

at java.lang.reflect.Constructor.newInstance(Constructor.java:423)

at
org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)

at
org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)

at
org.datanucleus.NucleusContext.createStoreManagerForProperties(NucleusContext.java:1187)

at org.datanucleus.NucleusContext.initialise(NucleusContext.java:356)

at
org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:775)

at
org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:333)

at
org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)

... 59 more

[2016-07-28 04:36:02,428] INFO 0: Opening raw store with implemenation
class:org.apache.hadoop.hive.metastore.ObjectStore
(org.apache.hadoop.hive.metastore.HiveMetaStore:638)

[2016-07-28 04:36:02,431] INFO ObjectStore, initialize called
(org.apache.hadoop.hive.metastore.ObjectStore:332)

[2016-07-28 04:36:02,483] INFO Property
hive.metastore.integral.jdo.pushdown unknown - will be ignored
(DataNucleus.Persistence:77)

[2016-07-28 04:36:02,483] INFO Property datanucleus.cache.level2 unknown -
will be ignored (DataNucleus.Persistence:77)

Exception in thread "main" java.lang.RuntimeException:
java.lang.RuntimeException: Unable to instantiate
org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient

at
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:540)

at
org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:204)

at
org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:238)

at
org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:218)

at
org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:208)

at
org.apache.spark.sql.hive.HiveContext.functionRegistry$lzycompute(HiveContext.scala:462)

at
org.apache.spark.sql.hive.HiveContext.functionRegistry(HiveContext.scala:461)

at org.apache.spark.sql.UDFRegistration.<init>(UDFRegistration.scala:40)

at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:330)

at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:97)

at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101)

at
com.intuit.blink.replicator.bootstrap.Bootstrap$.getHiveContext(Bootstrap.scala:47)

at
com.intuit.blink.replicator.bootstrap.Bootstrap$.main(Bootstrap.scala:125)

at com.intuit.blink.replicator.bootstrap.Bootstrap.main(Bootstrap.scala)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:498)

at
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)

at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)

at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)

at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)

at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

Caused by: java.lang.RuntimeException: Unable to instantiate
org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient

at
org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1493)

at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:67)

at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:82)

at
org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2935)

at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2954)

at
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:513)

... 22 more

Caused by: java.lang.reflect.InvocationTargetException

at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)

at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)

at java.lang.reflect.Constructor.newInstance(Constructor.java:423)

at
org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1491)

... 27 more

Caused by: javax.jdo.JDOFatalInternalException: Unexpected exception caught.

NestedThrowables:

java.lang.reflect.InvocationTargetException

at
javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193)

at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)

at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)

at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:410)

at
org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:439)

at
org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:334)

at
org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:290)

at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)

at
org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)

at
org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)

at
org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)

at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:642)

at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:620)

at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:673)

at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:478)

at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78)

at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84)

at
org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5903)

at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:198)

at
org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)

... 32 more

Caused by: java.lang.reflect.InvocationTargetException

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:498)

at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)

at java.security.AccessController.doPrivileged(Native Method)

at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)

at
javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)

... 51 more

Caused by: java.lang.NoClassDefFoundError: Could not initialize class
org.apache.derby.jdbc.AutoloadedDriver40

at java.lang.Class.forName0(Native Method)

at java.lang.Class.forName(Class.java:348)

at java.sql.DriverManager.isDriverAllowed(DriverManager.java:556)

at java.sql.DriverManager.getConnection(DriverManager.java:661)

at java.sql.DriverManager.getConnection(DriverManager.java:208)

at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:349)

at com.jolbox.bonecp.BoneCP.<init>(BoneCP.java:416)

at
com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)

at
org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:501)

at
org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:298)

at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)

at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)

at java.lang.reflect.Constructor.newInstance(Constructor.java:423)

at
org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)

at
org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)

at
org.datanucleus.NucleusContext.createStoreManagerForProperties(NucleusContext.java:1187)

at org.datanucleus.NucleusContext.initialise(NucleusContext.java:356)

at
org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:775)

at
org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:333)

at
org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)

... 59 more


[image: http://]

Tariq, Mohammad
about.me/mti
[image: http://]
<http://about.me/mti>

Re: Is spark-1.6.1-bin-2.6.0 compatible with hive-1.1.0-cdh5.7.1

Posted by Mich Talebzadeh <mi...@gmail.com>.
Ok does it create a derby database and comes back to prompt? For example
does spark-sql work OK.

If it cannot find the metastore it will create an empty derby database in
the same directory and at prompt you can  type show databases; and that
will only show default!

HTH

Dr Mich Talebzadeh



LinkedIn * https://www.linkedin.com/profile/view?id=AAEAAAAWh2gBxianrbJd6zP6AcPCCdOABUrV8Pw
<https://www.linkedin.com/profile/view?id=AAEAAAAWh2gBxianrbJd6zP6AcPCCdOABUrV8Pw>*



http://talebzadehmich.wordpress.com


*Disclaimer:* Use it at your own risk. Any and all responsibility for any
loss, damage or destruction of data or any other property which may arise
from relying on this email's technical content is explicitly disclaimed.
The author will in no case be liable for any monetary damages arising from
such loss, damage or destruction.



On 28 July 2016 at 13:10, Mohammad Tariq <do...@gmail.com> wrote:

> Hi Mich,
>
> Thank you so much for the prompt response!
>
> I do have a copy of hive-site.xml in spark conf directory.
>
>
> On Thursday, July 28, 2016, Mich Talebzadeh <mi...@gmail.com>
> wrote:
>
>> Hi,
>>
>> This line
>>
>> 2016-07-28 04:36:01,814] INFO Property hive.metastore.integral.jdo.pushdown
>> unknown - will be ignored (DataNucleus.Persistence:77)
>>
>> telling me that you do don't seem to have the softlink to hive-site.xml
>> in $SPARK_HOME/conf
>>
>> hive-site.xml -> /usr/lib/hive/conf/hive-site.xml
>>
>> I suggest you check that. That is the reason it cannot find you Hive
>> metastore
>>
>>
>>
>> HTH
>>
>> Dr Mich Talebzadeh
>>
>>
>>
>> LinkedIn * https://www.linkedin.com/profile/view?id=AAEAAAAWh2gBxianrbJd6zP6AcPCCdOABUrV8Pw
>> <https://www.linkedin.com/profile/view?id=AAEAAAAWh2gBxianrbJd6zP6AcPCCdOABUrV8Pw>*
>>
>>
>>
>> http://talebzadehmich.wordpress.com
>>
>>
>> *Disclaimer:* Use it at your own risk. Any and all responsibility for
>> any loss, damage or destruction of data or any other property which may
>> arise from relying on this email's technical content is explicitly
>> disclaimed. The author will in no case be liable for any monetary damages
>> arising from such loss, damage or destruction.
>>
>>
>>
>> On 28 July 2016 at 12:45, Mohammad Tariq <do...@gmail.com> wrote:
>>
>>> Could anyone please help me with this? I have been using the same
>>> version of Spark with CDH-5.4.5 successfully so far. However after a recent
>>> CDH upgrade I'm not able to run the same Spark SQL module against
>>> hive-1.1.0-cdh5.7.1.
>>>
>>> When I try to run my program Spark tries to connect to local derby Hive
>>> metastore instead of the configured MySQL metastore. I have all the
>>> required jars along with hive-site.xml in place though. There is no change
>>> in the setup.
>>>
>>> This is the exception which I'm getting :
>>>
>>> [2016-07-28 04:36:01,207] INFO Initializing execution hive, version
>>> 1.2.1 (org.apache.spark.sql.hive.HiveContext:58)
>>>
>>> [2016-07-28 04:36:01,231] INFO Inspected Hadoop version: 2.6.0-cdh5.7.1
>>> (org.apache.spark.sql.hive.client.ClientWrapper:58)
>>>
>>> [2016-07-28 04:36:01,232] INFO Loaded
>>> org.apache.hadoop.hive.shims.Hadoop23Shims for Hadoop version
>>> 2.6.0-cdh5.7.1 (org.apache.spark.sql.hive.client.ClientWrapper:58)
>>>
>>> [2016-07-28 04:36:01,520] INFO 0: Opening raw store with implemenation
>>> class:org.apache.hadoop.hive.metastore.ObjectStore
>>> (org.apache.hadoop.hive.metastore.HiveMetaStore:638)
>>>
>>> [2016-07-28 04:36:01,548] INFO ObjectStore, initialize called
>>> (org.apache.hadoop.hive.metastore.ObjectStore:332)
>>>
>>> [2016-07-28 04:36:01,814] INFO Property
>>> hive.metastore.integral.jdo.pushdown unknown - will be ignored
>>> (DataNucleus.Persistence:77)
>>>
>>> [2016-07-28 04:36:01,815] INFO Property datanucleus.cache.level2 unknown
>>> - will be ignored (DataNucleus.Persistence:77)
>>>
>>> [2016-07-28 04:36:02,417] WARN Retrying creating default database after
>>> error: Unexpected exception caught.
>>> (org.apache.hadoop.hive.metastore.HiveMetaStore:671)
>>>
>>> javax.jdo.JDOFatalInternalException: Unexpected exception caught.
>>>
>>> at
>>> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193)
>>>
>>> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
>>>
>>> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:410)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:439)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:334)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:290)
>>>
>>> at
>>> org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
>>>
>>> at
>>> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:642)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:620)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:669)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:478)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5903)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:198)
>>>
>>> at
>>> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
>>>
>>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>>
>>> at
>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>>
>>> at
>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>>
>>> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1491)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:67)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:82)
>>>
>>> at
>>> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2935)
>>>
>>> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2954)
>>>
>>> at
>>> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:513)
>>>
>>> at
>>> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:204)
>>>
>>> at
>>> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:238)
>>>
>>> at
>>> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:218)
>>>
>>> at
>>> org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:208)
>>>
>>> at
>>> org.apache.spark.sql.hive.HiveContext.functionRegistry$lzycompute(HiveContext.scala:462)
>>>
>>> at
>>> org.apache.spark.sql.hive.HiveContext.functionRegistry(HiveContext.scala:461)
>>>
>>> at org.apache.spark.sql.UDFRegistration.<init>(UDFRegistration.scala:40)
>>>
>>> at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:330)
>>>
>>> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:97)
>>>
>>> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101)
>>>
>>> at
>>> com.intuit.blink.replicator.bootstrap.Bootstrap$.getHiveContext(Bootstrap.scala:47)
>>>
>>> at
>>> com.intuit.blink.replicator.bootstrap.Bootstrap$.main(Bootstrap.scala:125)
>>>
>>> at com.intuit.blink.replicator.bootstrap.Bootstrap.main(Bootstrap.scala)
>>>
>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>
>>> at
>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>>
>>> at
>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>
>>> at java.lang.reflect.Method.invoke(Method.java:498)
>>>
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>>>
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>>>
>>> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>>>
>>> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>>>
>>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>>
>>> NestedThrowablesStackTrace:
>>>
>>> java.lang.reflect.InvocationTargetException
>>>
>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>
>>> at
>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>>
>>> at
>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>
>>> at java.lang.reflect.Method.invoke(Method.java:498)
>>>
>>> at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
>>>
>>> at java.security.AccessController.doPrivileged(Native Method)
>>>
>>> at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
>>>
>>> at
>>> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
>>>
>>> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
>>>
>>> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:410)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:439)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:334)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:290)
>>>
>>> at
>>> org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
>>>
>>> at
>>> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:642)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:620)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:669)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:478)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5903)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:198)
>>>
>>> at
>>> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
>>>
>>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>>
>>> at
>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>>
>>> at
>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>>
>>> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1491)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:67)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:82)
>>>
>>> at
>>> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2935)
>>>
>>> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2954)
>>>
>>> at
>>> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:513)
>>>
>>> at
>>> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:204)
>>>
>>> at
>>> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:238)
>>>
>>> at
>>> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:218)
>>>
>>> at
>>> org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:208)
>>>
>>> at
>>> org.apache.spark.sql.hive.HiveContext.functionRegistry$lzycompute(HiveContext.scala:462)
>>>
>>> at
>>> org.apache.spark.sql.hive.HiveContext.functionRegistry(HiveContext.scala:461)
>>>
>>> at org.apache.spark.sql.UDFRegistration.<init>(UDFRegistration.scala:40)
>>>
>>> at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:330)
>>>
>>> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:97)
>>>
>>> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101)
>>>
>>> at
>>> com.intuit.blink.replicator.bootstrap.Bootstrap$.getHiveContext(Bootstrap.scala:47)
>>>
>>> at
>>> com.intuit.blink.replicator.bootstrap.Bootstrap$.main(Bootstrap.scala:125)
>>>
>>> at com.intuit.blink.replicator.bootstrap.Bootstrap.main(Bootstrap.scala)
>>>
>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>
>>> at
>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>>
>>> at
>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>
>>> at java.lang.reflect.Method.invoke(Method.java:498)
>>>
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>>>
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>>>
>>> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>>>
>>> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>>>
>>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>>
>>> Caused by: java.lang.NoClassDefFoundError: Could not initialize class
>>> org.apache.derby.jdbc.AutoloadedDriver40
>>>
>>> at java.lang.Class.forName0(Native Method)
>>>
>>> at java.lang.Class.forName(Class.java:348)
>>>
>>> at java.sql.DriverManager.isDriverAllowed(DriverManager.java:556)
>>>
>>> at java.sql.DriverManager.getConnection(DriverManager.java:661)
>>>
>>> at java.sql.DriverManager.getConnection(DriverManager.java:208)
>>>
>>> at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:349)
>>>
>>> at com.jolbox.bonecp.BoneCP.<init>(BoneCP.java:416)
>>>
>>> at
>>> com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
>>>
>>> at
>>> org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:501)
>>>
>>> at
>>> org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:298)
>>>
>>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>>
>>> at
>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>>
>>> at
>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>>
>>> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>>>
>>> at
>>> org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
>>>
>>> at
>>> org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
>>>
>>> at
>>> org.datanucleus.NucleusContext.createStoreManagerForProperties(NucleusContext.java:1187)
>>>
>>> at org.datanucleus.NucleusContext.initialise(NucleusContext.java:356)
>>>
>>> at
>>> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:775)
>>>
>>> at
>>> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:333)
>>>
>>> at
>>> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
>>>
>>> ... 59 more
>>>
>>> [2016-07-28 04:36:02,428] INFO 0: Opening raw store with implemenation
>>> class:org.apache.hadoop.hive.metastore.ObjectStore
>>> (org.apache.hadoop.hive.metastore.HiveMetaStore:638)
>>>
>>> [2016-07-28 04:36:02,431] INFO ObjectStore, initialize called
>>> (org.apache.hadoop.hive.metastore.ObjectStore:332)
>>>
>>> [2016-07-28 04:36:02,483] INFO Property
>>> hive.metastore.integral.jdo.pushdown unknown - will be ignored
>>> (DataNucleus.Persistence:77)
>>>
>>> [2016-07-28 04:36:02,483] INFO Property datanucleus.cache.level2 unknown
>>> - will be ignored (DataNucleus.Persistence:77)
>>>
>>> Exception in thread "main" java.lang.RuntimeException:
>>> java.lang.RuntimeException: Unable to instantiate
>>> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
>>>
>>> at
>>> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:540)
>>>
>>> at
>>> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:204)
>>>
>>> at
>>> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:238)
>>>
>>> at
>>> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:218)
>>>
>>> at
>>> org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:208)
>>>
>>> at
>>> org.apache.spark.sql.hive.HiveContext.functionRegistry$lzycompute(HiveContext.scala:462)
>>>
>>> at
>>> org.apache.spark.sql.hive.HiveContext.functionRegistry(HiveContext.scala:461)
>>>
>>> at org.apache.spark.sql.UDFRegistration.<init>(UDFRegistration.scala:40)
>>>
>>> at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:330)
>>>
>>> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:97)
>>>
>>> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101)
>>>
>>> at
>>> com.intuit.blink.replicator.bootstrap.Bootstrap$.getHiveContext(Bootstrap.scala:47)
>>>
>>> at
>>> com.intuit.blink.replicator.bootstrap.Bootstrap$.main(Bootstrap.scala:125)
>>>
>>> at com.intuit.blink.replicator.bootstrap.Bootstrap.main(Bootstrap.scala)
>>>
>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>
>>> at
>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>>
>>> at
>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>
>>> at java.lang.reflect.Method.invoke(Method.java:498)
>>>
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>>>
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>>>
>>> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>>>
>>> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>>>
>>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>>
>>> Caused by: java.lang.RuntimeException: Unable to instantiate
>>> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1493)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:67)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:82)
>>>
>>> at
>>> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2935)
>>>
>>> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2954)
>>>
>>> at
>>> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:513)
>>>
>>> ... 22 more
>>>
>>> Caused by: java.lang.reflect.InvocationTargetException
>>>
>>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>>
>>> at
>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>>
>>> at
>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>>
>>> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1491)
>>>
>>> ... 27 more
>>>
>>> Caused by: javax.jdo.JDOFatalInternalException: Unexpected exception
>>> caught.
>>>
>>> NestedThrowables:
>>>
>>> java.lang.reflect.InvocationTargetException
>>>
>>> at
>>> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193)
>>>
>>> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
>>>
>>> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:410)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:439)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:334)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:290)
>>>
>>> at
>>> org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
>>>
>>> at
>>> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:642)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:620)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:673)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:478)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5903)
>>>
>>> at
>>> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:198)
>>>
>>> at
>>> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
>>>
>>> ... 32 more
>>>
>>> Caused by: java.lang.reflect.InvocationTargetException
>>>
>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>
>>> at
>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>>
>>> at
>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>
>>> at java.lang.reflect.Method.invoke(Method.java:498)
>>>
>>> at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
>>>
>>> at java.security.AccessController.doPrivileged(Native Method)
>>>
>>> at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
>>>
>>> at
>>> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
>>>
>>> ... 51 more
>>>
>>> Caused by: java.lang.NoClassDefFoundError: Could not initialize class
>>> org.apache.derby.jdbc.AutoloadedDriver40
>>>
>>> at java.lang.Class.forName0(Native Method)
>>>
>>> at java.lang.Class.forName(Class.java:348)
>>>
>>> at java.sql.DriverManager.isDriverAllowed(DriverManager.java:556)
>>>
>>> at java.sql.DriverManager.getConnection(DriverManager.java:661)
>>>
>>> at java.sql.DriverManager.getConnection(DriverManager.java:208)
>>>
>>> at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:349)
>>>
>>> at com.jolbox.bonecp.BoneCP.<init>(BoneCP.java:416)
>>>
>>> at
>>> com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
>>>
>>> at
>>> org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:501)
>>>
>>> at
>>> org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:298)
>>>
>>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>>
>>> at
>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>>
>>> at
>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>>
>>> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>>>
>>> at
>>> org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
>>>
>>> at
>>> org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
>>>
>>> at
>>> org.datanucleus.NucleusContext.createStoreManagerForProperties(NucleusContext.java:1187)
>>>
>>> at org.datanucleus.NucleusContext.initialise(NucleusContext.java:356)
>>>
>>> at
>>> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:775)
>>>
>>> at
>>> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:333)
>>>
>>> at
>>> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
>>>
>>> ... 59 more
>>>
>>>
>>> [image: http://]
>>>
>>> Tariq, Mohammad
>>> about.me/mti
>>> [image: http://]
>>> <http://about.me/mti>
>>>
>>>
>>
>>
>
> --
>
>
> [image: http://]
>
> Tariq, Mohammad
> about.me/mti
> [image: http://]
> <http://about.me/mti>
>
>
>

Re: Is spark-1.6.1-bin-2.6.0 compatible with hive-1.1.0-cdh5.7.1

Posted by Mohammad Tariq <do...@gmail.com>.
Hi Mich,

Thank you so much for the prompt response!

I do have a copy of hive-site.xml in spark conf directory.

On Thursday, July 28, 2016, Mich Talebzadeh <mi...@gmail.com>
wrote:

> Hi,
>
> This line
>
> 2016-07-28 04:36:01,814] INFO Property hive.metastore.integral.jdo.pushdown
> unknown - will be ignored (DataNucleus.Persistence:77)
>
> telling me that you do don't seem to have the softlink to hive-site.xml
> in $SPARK_HOME/conf
>
> hive-site.xml -> /usr/lib/hive/conf/hive-site.xml
>
> I suggest you check that. That is the reason it cannot find you Hive
> metastore
>
>
>
> HTH
>
> Dr Mich Talebzadeh
>
>
>
> LinkedIn * https://www.linkedin.com/profile/view?id=AAEAAAAWh2gBxianrbJd6zP6AcPCCdOABUrV8Pw
> <https://www.linkedin.com/profile/view?id=AAEAAAAWh2gBxianrbJd6zP6AcPCCdOABUrV8Pw>*
>
>
>
> http://talebzadehmich.wordpress.com
>
>
> *Disclaimer:* Use it at your own risk. Any and all responsibility for any
> loss, damage or destruction of data or any other property which may arise
> from relying on this email's technical content is explicitly disclaimed.
> The author will in no case be liable for any monetary damages arising from
> such loss, damage or destruction.
>
>
>
> On 28 July 2016 at 12:45, Mohammad Tariq <dontariq@gmail.com
> <javascript:_e(%7B%7D,'cvml','dontariq@gmail.com');>> wrote:
>
>> Could anyone please help me with this? I have been using the same version
>> of Spark with CDH-5.4.5 successfully so far. However after a recent CDH
>> upgrade I'm not able to run the same Spark SQL module against
>> hive-1.1.0-cdh5.7.1.
>>
>> When I try to run my program Spark tries to connect to local derby Hive
>> metastore instead of the configured MySQL metastore. I have all the
>> required jars along with hive-site.xml in place though. There is no change
>> in the setup.
>>
>> This is the exception which I'm getting :
>>
>> [2016-07-28 04:36:01,207] INFO Initializing execution hive, version 1.2.1
>> (org.apache.spark.sql.hive.HiveContext:58)
>>
>> [2016-07-28 04:36:01,231] INFO Inspected Hadoop version: 2.6.0-cdh5.7.1
>> (org.apache.spark.sql.hive.client.ClientWrapper:58)
>>
>> [2016-07-28 04:36:01,232] INFO Loaded
>> org.apache.hadoop.hive.shims.Hadoop23Shims for Hadoop version
>> 2.6.0-cdh5.7.1 (org.apache.spark.sql.hive.client.ClientWrapper:58)
>>
>> [2016-07-28 04:36:01,520] INFO 0: Opening raw store with implemenation
>> class:org.apache.hadoop.hive.metastore.ObjectStore
>> (org.apache.hadoop.hive.metastore.HiveMetaStore:638)
>>
>> [2016-07-28 04:36:01,548] INFO ObjectStore, initialize called
>> (org.apache.hadoop.hive.metastore.ObjectStore:332)
>>
>> [2016-07-28 04:36:01,814] INFO Property
>> hive.metastore.integral.jdo.pushdown unknown - will be ignored
>> (DataNucleus.Persistence:77)
>>
>> [2016-07-28 04:36:01,815] INFO Property datanucleus.cache.level2 unknown
>> - will be ignored (DataNucleus.Persistence:77)
>>
>> [2016-07-28 04:36:02,417] WARN Retrying creating default database after
>> error: Unexpected exception caught.
>> (org.apache.hadoop.hive.metastore.HiveMetaStore:671)
>>
>> javax.jdo.JDOFatalInternalException: Unexpected exception caught.
>>
>> at
>> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193)
>>
>> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
>>
>> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
>>
>> at
>> org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:410)
>>
>> at
>> org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:439)
>>
>> at
>> org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:334)
>>
>> at
>> org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:290)
>>
>> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
>>
>> at
>> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
>>
>> at
>> org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
>>
>> at
>> org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:642)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:620)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:669)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:478)
>>
>> at
>> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78)
>>
>> at
>> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5903)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:198)
>>
>> at
>> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
>>
>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>
>> at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>
>> at
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>
>> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>>
>> at
>> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1491)
>>
>> at
>> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:67)
>>
>> at
>> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:82)
>>
>> at
>> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2935)
>>
>> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2954)
>>
>> at
>> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:513)
>>
>> at
>> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:204)
>>
>> at
>> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:238)
>>
>> at
>> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:218)
>>
>> at
>> org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:208)
>>
>> at
>> org.apache.spark.sql.hive.HiveContext.functionRegistry$lzycompute(HiveContext.scala:462)
>>
>> at
>> org.apache.spark.sql.hive.HiveContext.functionRegistry(HiveContext.scala:461)
>>
>> at org.apache.spark.sql.UDFRegistration.<init>(UDFRegistration.scala:40)
>>
>> at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:330)
>>
>> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:97)
>>
>> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101)
>>
>> at
>> com.intuit.blink.replicator.bootstrap.Bootstrap$.getHiveContext(Bootstrap.scala:47)
>>
>> at
>> com.intuit.blink.replicator.bootstrap.Bootstrap$.main(Bootstrap.scala:125)
>>
>> at com.intuit.blink.replicator.bootstrap.Bootstrap.main(Bootstrap.scala)
>>
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>
>> at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>
>> at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>
>> at java.lang.reflect.Method.invoke(Method.java:498)
>>
>> at
>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>>
>> at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>>
>> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>>
>> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>>
>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>
>> NestedThrowablesStackTrace:
>>
>> java.lang.reflect.InvocationTargetException
>>
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>
>> at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>
>> at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>
>> at java.lang.reflect.Method.invoke(Method.java:498)
>>
>> at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
>>
>> at java.security.AccessController.doPrivileged(Native Method)
>>
>> at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
>>
>> at
>> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
>>
>> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
>>
>> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
>>
>> at
>> org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:410)
>>
>> at
>> org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:439)
>>
>> at
>> org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:334)
>>
>> at
>> org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:290)
>>
>> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
>>
>> at
>> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
>>
>> at
>> org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
>>
>> at
>> org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:642)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:620)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:669)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:478)
>>
>> at
>> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78)
>>
>> at
>> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5903)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:198)
>>
>> at
>> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
>>
>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>
>> at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>
>> at
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>
>> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>>
>> at
>> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1491)
>>
>> at
>> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:67)
>>
>> at
>> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:82)
>>
>> at
>> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2935)
>>
>> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2954)
>>
>> at
>> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:513)
>>
>> at
>> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:204)
>>
>> at
>> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:238)
>>
>> at
>> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:218)
>>
>> at
>> org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:208)
>>
>> at
>> org.apache.spark.sql.hive.HiveContext.functionRegistry$lzycompute(HiveContext.scala:462)
>>
>> at
>> org.apache.spark.sql.hive.HiveContext.functionRegistry(HiveContext.scala:461)
>>
>> at org.apache.spark.sql.UDFRegistration.<init>(UDFRegistration.scala:40)
>>
>> at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:330)
>>
>> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:97)
>>
>> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101)
>>
>> at
>> com.intuit.blink.replicator.bootstrap.Bootstrap$.getHiveContext(Bootstrap.scala:47)
>>
>> at
>> com.intuit.blink.replicator.bootstrap.Bootstrap$.main(Bootstrap.scala:125)
>>
>> at com.intuit.blink.replicator.bootstrap.Bootstrap.main(Bootstrap.scala)
>>
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>
>> at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>
>> at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>
>> at java.lang.reflect.Method.invoke(Method.java:498)
>>
>> at
>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>>
>> at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>>
>> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>>
>> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>>
>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>
>> Caused by: java.lang.NoClassDefFoundError: Could not initialize class
>> org.apache.derby.jdbc.AutoloadedDriver40
>>
>> at java.lang.Class.forName0(Native Method)
>>
>> at java.lang.Class.forName(Class.java:348)
>>
>> at java.sql.DriverManager.isDriverAllowed(DriverManager.java:556)
>>
>> at java.sql.DriverManager.getConnection(DriverManager.java:661)
>>
>> at java.sql.DriverManager.getConnection(DriverManager.java:208)
>>
>> at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:349)
>>
>> at com.jolbox.bonecp.BoneCP.<init>(BoneCP.java:416)
>>
>> at
>> com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
>>
>> at
>> org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:501)
>>
>> at
>> org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:298)
>>
>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>
>> at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>
>> at
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>
>> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>>
>> at
>> org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
>>
>> at
>> org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
>>
>> at
>> org.datanucleus.NucleusContext.createStoreManagerForProperties(NucleusContext.java:1187)
>>
>> at org.datanucleus.NucleusContext.initialise(NucleusContext.java:356)
>>
>> at
>> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:775)
>>
>> at
>> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:333)
>>
>> at
>> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
>>
>> ... 59 more
>>
>> [2016-07-28 04:36:02,428] INFO 0: Opening raw store with implemenation
>> class:org.apache.hadoop.hive.metastore.ObjectStore
>> (org.apache.hadoop.hive.metastore.HiveMetaStore:638)
>>
>> [2016-07-28 04:36:02,431] INFO ObjectStore, initialize called
>> (org.apache.hadoop.hive.metastore.ObjectStore:332)
>>
>> [2016-07-28 04:36:02,483] INFO Property
>> hive.metastore.integral.jdo.pushdown unknown - will be ignored
>> (DataNucleus.Persistence:77)
>>
>> [2016-07-28 04:36:02,483] INFO Property datanucleus.cache.level2 unknown
>> - will be ignored (DataNucleus.Persistence:77)
>>
>> Exception in thread "main" java.lang.RuntimeException:
>> java.lang.RuntimeException: Unable to instantiate
>> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
>>
>> at
>> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:540)
>>
>> at
>> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:204)
>>
>> at
>> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:238)
>>
>> at
>> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:218)
>>
>> at
>> org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:208)
>>
>> at
>> org.apache.spark.sql.hive.HiveContext.functionRegistry$lzycompute(HiveContext.scala:462)
>>
>> at
>> org.apache.spark.sql.hive.HiveContext.functionRegistry(HiveContext.scala:461)
>>
>> at org.apache.spark.sql.UDFRegistration.<init>(UDFRegistration.scala:40)
>>
>> at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:330)
>>
>> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:97)
>>
>> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101)
>>
>> at
>> com.intuit.blink.replicator.bootstrap.Bootstrap$.getHiveContext(Bootstrap.scala:47)
>>
>> at
>> com.intuit.blink.replicator.bootstrap.Bootstrap$.main(Bootstrap.scala:125)
>>
>> at com.intuit.blink.replicator.bootstrap.Bootstrap.main(Bootstrap.scala)
>>
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>
>> at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>
>> at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>
>> at java.lang.reflect.Method.invoke(Method.java:498)
>>
>> at
>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>>
>> at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>>
>> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>>
>> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>>
>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>
>> Caused by: java.lang.RuntimeException: Unable to instantiate
>> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
>>
>> at
>> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1493)
>>
>> at
>> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:67)
>>
>> at
>> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:82)
>>
>> at
>> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2935)
>>
>> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2954)
>>
>> at
>> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:513)
>>
>> ... 22 more
>>
>> Caused by: java.lang.reflect.InvocationTargetException
>>
>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>
>> at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>
>> at
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>
>> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>>
>> at
>> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1491)
>>
>> ... 27 more
>>
>> Caused by: javax.jdo.JDOFatalInternalException: Unexpected exception
>> caught.
>>
>> NestedThrowables:
>>
>> java.lang.reflect.InvocationTargetException
>>
>> at
>> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193)
>>
>> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
>>
>> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
>>
>> at
>> org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:410)
>>
>> at
>> org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:439)
>>
>> at
>> org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:334)
>>
>> at
>> org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:290)
>>
>> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
>>
>> at
>> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
>>
>> at
>> org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
>>
>> at
>> org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:642)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:620)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:673)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:478)
>>
>> at
>> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78)
>>
>> at
>> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5903)
>>
>> at
>> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:198)
>>
>> at
>> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
>>
>> ... 32 more
>>
>> Caused by: java.lang.reflect.InvocationTargetException
>>
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>
>> at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>
>> at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>
>> at java.lang.reflect.Method.invoke(Method.java:498)
>>
>> at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
>>
>> at java.security.AccessController.doPrivileged(Native Method)
>>
>> at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
>>
>> at
>> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
>>
>> ... 51 more
>>
>> Caused by: java.lang.NoClassDefFoundError: Could not initialize class
>> org.apache.derby.jdbc.AutoloadedDriver40
>>
>> at java.lang.Class.forName0(Native Method)
>>
>> at java.lang.Class.forName(Class.java:348)
>>
>> at java.sql.DriverManager.isDriverAllowed(DriverManager.java:556)
>>
>> at java.sql.DriverManager.getConnection(DriverManager.java:661)
>>
>> at java.sql.DriverManager.getConnection(DriverManager.java:208)
>>
>> at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:349)
>>
>> at com.jolbox.bonecp.BoneCP.<init>(BoneCP.java:416)
>>
>> at
>> com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
>>
>> at
>> org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:501)
>>
>> at
>> org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:298)
>>
>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>
>> at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>
>> at
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>
>> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>>
>> at
>> org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
>>
>> at
>> org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
>>
>> at
>> org.datanucleus.NucleusContext.createStoreManagerForProperties(NucleusContext.java:1187)
>>
>> at org.datanucleus.NucleusContext.initialise(NucleusContext.java:356)
>>
>> at
>> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:775)
>>
>> at
>> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:333)
>>
>> at
>> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
>>
>> ... 59 more
>>
>>
>> [image: http://]
>>
>> Tariq, Mohammad
>> about.me/mti
>> [image: http://]
>> <http://about.me/mti>
>>
>>
>
>

-- 


[image: http://]

Tariq, Mohammad
about.me/mti
[image: http://]
<http://about.me/mti>

Re: Is spark-1.6.1-bin-2.6.0 compatible with hive-1.1.0-cdh5.7.1

Posted by Mich Talebzadeh <mi...@gmail.com>.
Hi,

This line

2016-07-28 04:36:01,814] INFO Property hive.metastore.integral.jdo.pushdown
unknown - will be ignored (DataNucleus.Persistence:77)

telling me that you do don't seem to have the softlink to hive-site.xml in
$SPARK_HOME/conf

hive-site.xml -> /usr/lib/hive/conf/hive-site.xml

I suggest you check that. That is the reason it cannot find you Hive
metastore



HTH

Dr Mich Talebzadeh



LinkedIn * https://www.linkedin.com/profile/view?id=AAEAAAAWh2gBxianrbJd6zP6AcPCCdOABUrV8Pw
<https://www.linkedin.com/profile/view?id=AAEAAAAWh2gBxianrbJd6zP6AcPCCdOABUrV8Pw>*



http://talebzadehmich.wordpress.com


*Disclaimer:* Use it at your own risk. Any and all responsibility for any
loss, damage or destruction of data or any other property which may arise
from relying on this email's technical content is explicitly disclaimed.
The author will in no case be liable for any monetary damages arising from
such loss, damage or destruction.



On 28 July 2016 at 12:45, Mohammad Tariq <do...@gmail.com> wrote:

> Could anyone please help me with this? I have been using the same version
> of Spark with CDH-5.4.5 successfully so far. However after a recent CDH
> upgrade I'm not able to run the same Spark SQL module against
> hive-1.1.0-cdh5.7.1.
>
> When I try to run my program Spark tries to connect to local derby Hive
> metastore instead of the configured MySQL metastore. I have all the
> required jars along with hive-site.xml in place though. There is no change
> in the setup.
>
> This is the exception which I'm getting :
>
> [2016-07-28 04:36:01,207] INFO Initializing execution hive, version 1.2.1
> (org.apache.spark.sql.hive.HiveContext:58)
>
> [2016-07-28 04:36:01,231] INFO Inspected Hadoop version: 2.6.0-cdh5.7.1
> (org.apache.spark.sql.hive.client.ClientWrapper:58)
>
> [2016-07-28 04:36:01,232] INFO Loaded
> org.apache.hadoop.hive.shims.Hadoop23Shims for Hadoop version
> 2.6.0-cdh5.7.1 (org.apache.spark.sql.hive.client.ClientWrapper:58)
>
> [2016-07-28 04:36:01,520] INFO 0: Opening raw store with implemenation
> class:org.apache.hadoop.hive.metastore.ObjectStore
> (org.apache.hadoop.hive.metastore.HiveMetaStore:638)
>
> [2016-07-28 04:36:01,548] INFO ObjectStore, initialize called
> (org.apache.hadoop.hive.metastore.ObjectStore:332)
>
> [2016-07-28 04:36:01,814] INFO Property
> hive.metastore.integral.jdo.pushdown unknown - will be ignored
> (DataNucleus.Persistence:77)
>
> [2016-07-28 04:36:01,815] INFO Property datanucleus.cache.level2 unknown -
> will be ignored (DataNucleus.Persistence:77)
>
> [2016-07-28 04:36:02,417] WARN Retrying creating default database after
> error: Unexpected exception caught.
> (org.apache.hadoop.hive.metastore.HiveMetaStore:671)
>
> javax.jdo.JDOFatalInternalException: Unexpected exception caught.
>
> at
> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193)
>
> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
>
> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
>
> at
> org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:410)
>
> at
> org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:439)
>
> at
> org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:334)
>
> at
> org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:290)
>
> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
>
> at
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
>
> at
> org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
>
> at
> org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:642)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:620)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:669)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:478)
>
> at
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78)
>
> at
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5903)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:198)
>
> at
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
>
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>
> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>
> at
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1491)
>
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:67)
>
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:82)
>
> at
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2935)
>
> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2954)
>
> at
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:513)
>
> at
> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:204)
>
> at
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:238)
>
> at
> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:218)
>
> at
> org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:208)
>
> at
> org.apache.spark.sql.hive.HiveContext.functionRegistry$lzycompute(HiveContext.scala:462)
>
> at
> org.apache.spark.sql.hive.HiveContext.functionRegistry(HiveContext.scala:461)
>
> at org.apache.spark.sql.UDFRegistration.<init>(UDFRegistration.scala:40)
>
> at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:330)
>
> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:97)
>
> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101)
>
> at
> com.intuit.blink.replicator.bootstrap.Bootstrap$.getHiveContext(Bootstrap.scala:47)
>
> at
> com.intuit.blink.replicator.bootstrap.Bootstrap$.main(Bootstrap.scala:125)
>
> at com.intuit.blink.replicator.bootstrap.Bootstrap.main(Bootstrap.scala)
>
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>
> at java.lang.reflect.Method.invoke(Method.java:498)
>
> at
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>
> at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>
> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>
> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>
> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>
> NestedThrowablesStackTrace:
>
> java.lang.reflect.InvocationTargetException
>
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>
> at java.lang.reflect.Method.invoke(Method.java:498)
>
> at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
>
> at java.security.AccessController.doPrivileged(Native Method)
>
> at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
>
> at
> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
>
> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
>
> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
>
> at
> org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:410)
>
> at
> org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:439)
>
> at
> org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:334)
>
> at
> org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:290)
>
> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
>
> at
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
>
> at
> org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
>
> at
> org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:642)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:620)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:669)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:478)
>
> at
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78)
>
> at
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5903)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:198)
>
> at
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
>
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>
> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>
> at
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1491)
>
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:67)
>
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:82)
>
> at
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2935)
>
> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2954)
>
> at
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:513)
>
> at
> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:204)
>
> at
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:238)
>
> at
> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:218)
>
> at
> org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:208)
>
> at
> org.apache.spark.sql.hive.HiveContext.functionRegistry$lzycompute(HiveContext.scala:462)
>
> at
> org.apache.spark.sql.hive.HiveContext.functionRegistry(HiveContext.scala:461)
>
> at org.apache.spark.sql.UDFRegistration.<init>(UDFRegistration.scala:40)
>
> at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:330)
>
> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:97)
>
> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101)
>
> at
> com.intuit.blink.replicator.bootstrap.Bootstrap$.getHiveContext(Bootstrap.scala:47)
>
> at
> com.intuit.blink.replicator.bootstrap.Bootstrap$.main(Bootstrap.scala:125)
>
> at com.intuit.blink.replicator.bootstrap.Bootstrap.main(Bootstrap.scala)
>
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>
> at java.lang.reflect.Method.invoke(Method.java:498)
>
> at
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>
> at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>
> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>
> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>
> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>
> Caused by: java.lang.NoClassDefFoundError: Could not initialize class
> org.apache.derby.jdbc.AutoloadedDriver40
>
> at java.lang.Class.forName0(Native Method)
>
> at java.lang.Class.forName(Class.java:348)
>
> at java.sql.DriverManager.isDriverAllowed(DriverManager.java:556)
>
> at java.sql.DriverManager.getConnection(DriverManager.java:661)
>
> at java.sql.DriverManager.getConnection(DriverManager.java:208)
>
> at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:349)
>
> at com.jolbox.bonecp.BoneCP.<init>(BoneCP.java:416)
>
> at
> com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
>
> at
> org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:501)
>
> at
> org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:298)
>
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>
> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>
> at
> org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
>
> at
> org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
>
> at
> org.datanucleus.NucleusContext.createStoreManagerForProperties(NucleusContext.java:1187)
>
> at org.datanucleus.NucleusContext.initialise(NucleusContext.java:356)
>
> at
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:775)
>
> at
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:333)
>
> at
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
>
> ... 59 more
>
> [2016-07-28 04:36:02,428] INFO 0: Opening raw store with implemenation
> class:org.apache.hadoop.hive.metastore.ObjectStore
> (org.apache.hadoop.hive.metastore.HiveMetaStore:638)
>
> [2016-07-28 04:36:02,431] INFO ObjectStore, initialize called
> (org.apache.hadoop.hive.metastore.ObjectStore:332)
>
> [2016-07-28 04:36:02,483] INFO Property
> hive.metastore.integral.jdo.pushdown unknown - will be ignored
> (DataNucleus.Persistence:77)
>
> [2016-07-28 04:36:02,483] INFO Property datanucleus.cache.level2 unknown -
> will be ignored (DataNucleus.Persistence:77)
>
> Exception in thread "main" java.lang.RuntimeException:
> java.lang.RuntimeException: Unable to instantiate
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
>
> at
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:540)
>
> at
> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:204)
>
> at
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:238)
>
> at
> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:218)
>
> at
> org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:208)
>
> at
> org.apache.spark.sql.hive.HiveContext.functionRegistry$lzycompute(HiveContext.scala:462)
>
> at
> org.apache.spark.sql.hive.HiveContext.functionRegistry(HiveContext.scala:461)
>
> at org.apache.spark.sql.UDFRegistration.<init>(UDFRegistration.scala:40)
>
> at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:330)
>
> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:97)
>
> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101)
>
> at
> com.intuit.blink.replicator.bootstrap.Bootstrap$.getHiveContext(Bootstrap.scala:47)
>
> at
> com.intuit.blink.replicator.bootstrap.Bootstrap$.main(Bootstrap.scala:125)
>
> at com.intuit.blink.replicator.bootstrap.Bootstrap.main(Bootstrap.scala)
>
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>
> at java.lang.reflect.Method.invoke(Method.java:498)
>
> at
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>
> at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>
> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>
> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>
> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>
> Caused by: java.lang.RuntimeException: Unable to instantiate
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
>
> at
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1493)
>
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:67)
>
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:82)
>
> at
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2935)
>
> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2954)
>
> at
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:513)
>
> ... 22 more
>
> Caused by: java.lang.reflect.InvocationTargetException
>
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>
> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>
> at
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1491)
>
> ... 27 more
>
> Caused by: javax.jdo.JDOFatalInternalException: Unexpected exception
> caught.
>
> NestedThrowables:
>
> java.lang.reflect.InvocationTargetException
>
> at
> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193)
>
> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
>
> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
>
> at
> org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:410)
>
> at
> org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:439)
>
> at
> org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:334)
>
> at
> org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:290)
>
> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
>
> at
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
>
> at
> org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
>
> at
> org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:642)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:620)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:673)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:478)
>
> at
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78)
>
> at
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5903)
>
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:198)
>
> at
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
>
> ... 32 more
>
> Caused by: java.lang.reflect.InvocationTargetException
>
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>
> at java.lang.reflect.Method.invoke(Method.java:498)
>
> at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
>
> at java.security.AccessController.doPrivileged(Native Method)
>
> at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
>
> at
> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
>
> ... 51 more
>
> Caused by: java.lang.NoClassDefFoundError: Could not initialize class
> org.apache.derby.jdbc.AutoloadedDriver40
>
> at java.lang.Class.forName0(Native Method)
>
> at java.lang.Class.forName(Class.java:348)
>
> at java.sql.DriverManager.isDriverAllowed(DriverManager.java:556)
>
> at java.sql.DriverManager.getConnection(DriverManager.java:661)
>
> at java.sql.DriverManager.getConnection(DriverManager.java:208)
>
> at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:349)
>
> at com.jolbox.bonecp.BoneCP.<init>(BoneCP.java:416)
>
> at
> com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
>
> at
> org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:501)
>
> at
> org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:298)
>
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>
> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>
> at
> org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
>
> at
> org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
>
> at
> org.datanucleus.NucleusContext.createStoreManagerForProperties(NucleusContext.java:1187)
>
> at org.datanucleus.NucleusContext.initialise(NucleusContext.java:356)
>
> at
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:775)
>
> at
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:333)
>
> at
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
>
> ... 59 more
>
>
> [image: http://]
>
> Tariq, Mohammad
> about.me/mti
> [image: http://]
> <http://about.me/mti>
>
>