You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@phoenix.apache.org by junaid khalid <ju...@platalytics.com> on 2015/03/16 09:50:57 UTC

Fwd: java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString

i have a spark program in which it connects to hbase using phoenix and
upserts record in hbase table. It runs fine when run through spark-submit
command and work as expected. But when I run it through oozie, it gives
following exception. While running submitting through oozie, if spark is
run in local mode the program works fine.

I am using spark 1.2, phoenix 4.2.3 and hbase 0.98.6-cdh5.3.1.

---
15/03/16 13:13:18 WARN HTable: Error calling coprocessor service
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService for
row \x00\x00TABLE55068E2AED4AB9B607BBBE49
java.util.concurrent.ExecutionException: java.lang.IllegalAccessError:
com/google/protobuf/HBaseZeroCopyByteString
at java.util.concurrent.FutureTask.report(FutureTask.java:122)
at java.util.concurrent.FutureTask.get(FutureTask.java:188)
at
org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1583)
at
org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1540)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1006)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
at
org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
at
org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
at
org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
at
org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
at
org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
at
org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
at
org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
at
org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
at
org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
at
org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
at
org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
at
com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
at
com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
at scala.collection.Iterator$class.foreach(Iterator.scala:727)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
at
com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
at
com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
at
org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
at
org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
at org.apache.spark.scheduler.Task.run(Task.scala:56)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.IllegalAccessError:
com/google/protobuf/HBaseZeroCopyByteString
at
org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
... 3 more
java.sql.SQLException: java.lang.IllegalAccessError:
com/google/protobuf/HBaseZeroCopyByteString
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1024)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
at
org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
at
org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
at
org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
at
org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
at
org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
at
org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
at
org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
at
org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
at
org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
at
org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
at
org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
at
com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
at
com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
at scala.collection.Iterator$class.foreach(Iterator.scala:727)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
at
com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
at
com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
at
org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
at
org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
at org.apache.spark.scheduler.Task.run(Task.scala:56)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.IllegalAccessError:
com/google/protobuf/HBaseZeroCopyByteString
at
org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
... 3 more

Re: java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString

Posted by James Taylor <ja...@apache.org>.
Are you familiar with this project:
https://github.com/simplymeasured/phoenix-spark/
<https://github.com/simplymeasured/phoenix-spark/pull/2> ?

On Thursday, March 19, 2015, junaid khalid <ju...@platalytics.com>
wrote:

> i tried setting hbase-protocol to the SPARK_CLASSPATH but that didnt
> worked for me. I can see that jar in environment of spark executor ui, but
> that doesnt solve the problem. Also note that while submitting via
> spark-submit phoenix works fine with spark.
>
> i couldn't find  phoenix-core version  4.2.3 on maven or any where .
> Is phoenix-core is equivalent to phoenix-client.jar which I got while
> building phoenix?
>
> On Thu, Mar 19, 2015 at 2:59 AM, Samarth Jain <samarth@apache.org
> <javascript:_e(%7B%7D,'cvml','samarth@apache.org');>> wrote:
>
>> You also want to make sure that you are using compatible versions of
>> client and server jars:
>>
>> phoenix-core version is 4.3.0 and phoenix-server.jar version is 4.2.3 are
>> *NOT* compatible.
>>
>> The server side jar version should always be the same or newer (in
>> version) than the client side jar. In general we support backward
>> compatibility between point releases as long as the server side jars are
>> upgraded before the client.
>>
>> On Wed, Mar 18, 2015 at 2:33 PM, Josh Mahonin <jmahonin@interset.com
>> <javascript:_e(%7B%7D,'cvml','jmahonin@interset.com');>> wrote:
>>
>>> Have you tried adding hbase-protocol to the SPARK_CLASSPATH? That worked
>>> for me to get Spark playing nicely with Phoenix.
>>>
>>> On Tue, Mar 17, 2015 at 6:15 PM, Andrew Purtell <apurtell@apache.org
>>> <javascript:_e(%7B%7D,'cvml','apurtell@apache.org');>> wrote:
>>>
>>>> This is HBASE-11118 (https://issues.apache.org/jira/browse/HBASE-11118).
>>>> Looks like someone else wrote in that Oozie wasn't working for them. You
>>>> should follow up on the HBase issue tracker, although no promises, it may
>>>> be an Oozie problem, but this is not a Phoenix issue.
>>>>
>>>> On Mon, Mar 16, 2015 at 2:39 AM, junaid khalid <
>>>> junaid.khalid@platalytics.com
>>>> <javascript:_e(%7B%7D,'cvml','junaid.khalid@platalytics.com');>> wrote:
>>>>
>>>>> hbase-protocol.jar is added to path. I can see that in spark-UI of the
>>>>> running application.
>>>>>
>>>>> phoenix-core version is 4.3.0 and phoenix-server.jar version is 4.2.3.
>>>>>
>>>>> On Mon, Mar 16, 2015 at 2:15 PM, Fulin Sun <sunfl@certusnet.com.cn
>>>>> <javascript:_e(%7B%7D,'cvml','sunfl@certusnet.com.cn');>> wrote:
>>>>>
>>>>>> Hi,
>>>>>> Did you add hbase-protocol.jar into your application classpath?
>>>>>> Do you find some version incompatibility between your client and
>>>>>> server?
>>>>>>
>>>>>> Thanks,
>>>>>> Sun.
>>>>>>
>>>>>> ------------------------------
>>>>>> ------------------------------
>>>>>>
>>>>>> CertusNet
>>>>>>
>>>>>>
>>>>>> *From:* junaid khalid
>>>>>> <javascript:_e(%7B%7D,'cvml','junaid.khalid@platalytics.com');>
>>>>>> *Date:* 2015-03-16 16:50
>>>>>> *To:* user <javascript:_e(%7B%7D,'cvml','user@phoenix.apache.org');>
>>>>>> *Subject:* Fwd: java.lang.IllegalAccessError:
>>>>>> com/google/protobuf/HBaseZeroCopyByteString
>>>>>> i have a spark program in which it connects to hbase using phoenix
>>>>>> and upserts record in hbase table. It runs fine when run through
>>>>>> spark-submit command and work as expected. But when I run it through oozie,
>>>>>> it gives following exception. While running submitting through oozie, if
>>>>>> spark is run in local mode the program works fine.
>>>>>>
>>>>>> I am using spark 1.2, phoenix 4.2.3 and hbase 0.98.6-cdh5.3.1.
>>>>>>
>>>>>> ---
>>>>>> 15/03/16 13:13:18 WARN HTable: Error calling coprocessor service
>>>>>> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService for
>>>>>> row \x00\x00TABLE55068E2AED4AB9B607BBBE49
>>>>>> java.util.concurrent.ExecutionException:
>>>>>> java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString
>>>>>> at java.util.concurrent.FutureTask.report(FutureTask.java:122)
>>>>>> at java.util.concurrent.FutureTask.get(FutureTask.java:188)
>>>>>> at
>>>>>> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1583)
>>>>>> at
>>>>>> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1540)
>>>>>> at
>>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1006)
>>>>>> at
>>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
>>>>>> at
>>>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
>>>>>> at
>>>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
>>>>>> at
>>>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
>>>>>> at
>>>>>> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
>>>>>> at
>>>>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
>>>>>> at
>>>>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
>>>>>> at
>>>>>> org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
>>>>>> at
>>>>>> org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
>>>>>> at
>>>>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
>>>>>> at
>>>>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
>>>>>> at
>>>>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
>>>>>> at
>>>>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
>>>>>> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>>>>>> at
>>>>>> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
>>>>>> at
>>>>>> org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
>>>>>> at
>>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
>>>>>> at
>>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
>>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>>>>>> at
>>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
>>>>>> at
>>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
>>>>>> at
>>>>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>>>>> at
>>>>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>>>>> at
>>>>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>>>>> at
>>>>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>>>>> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
>>>>>> at org.apache.spark.scheduler.Task.run(Task.scala:56)
>>>>>> at
>>>>>> org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
>>>>>> at
>>>>>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>>>>>> at
>>>>>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>>>>>> at java.lang.Thread.run(Thread.java:745)
>>>>>> Caused by: java.lang.IllegalAccessError:
>>>>>> com/google/protobuf/HBaseZeroCopyByteString
>>>>>> at
>>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
>>>>>> at
>>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
>>>>>> at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
>>>>>> at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>>>>>> ... 3 more
>>>>>> java.sql.SQLException: java.lang.IllegalAccessError:
>>>>>> com/google/protobuf/HBaseZeroCopyByteString
>>>>>> at
>>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1024)
>>>>>> at
>>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
>>>>>> at
>>>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
>>>>>> at
>>>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
>>>>>> at
>>>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
>>>>>> at
>>>>>> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
>>>>>> at
>>>>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
>>>>>> at
>>>>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
>>>>>> at
>>>>>> org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
>>>>>> at
>>>>>> org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
>>>>>> at
>>>>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
>>>>>> at
>>>>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
>>>>>> at
>>>>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
>>>>>> at
>>>>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
>>>>>> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>>>>>> at
>>>>>> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
>>>>>> at
>>>>>> org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
>>>>>> at
>>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
>>>>>> at
>>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
>>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>>>>>> at
>>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
>>>>>> at
>>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
>>>>>> at
>>>>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>>>>> at
>>>>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>>>>> at
>>>>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>>>>> at
>>>>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>>>>> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
>>>>>> at org.apache.spark.scheduler.Task.run(Task.scala:56)
>>>>>> at
>>>>>> org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
>>>>>> at
>>>>>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>>>>>> at
>>>>>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>>>>>> at java.lang.Thread.run(Thread.java:745)
>>>>>> Caused by: java.lang.IllegalAccessError:
>>>>>> com/google/protobuf/HBaseZeroCopyByteString
>>>>>> at
>>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
>>>>>> at
>>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
>>>>>> at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
>>>>>> at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>>>>>> ... 3 more
>>>>>>
>>>>>>
>>>>>
>>>>
>>>>
>>>> --
>>>> Best regards,
>>>>
>>>>    - Andy
>>>>
>>>> Problems worthy of attack prove their worth by hitting back. - Piet
>>>> Hein (via Tom White)
>>>>
>>>
>>>
>>
>

Re: Fwd: java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString

Posted by junaid khalid <ju...@platalytics.com>.
i tried setting hbase-protocol to the SPARK_CLASSPATH but that didnt worked
for me. I can see that jar in environment of spark executor ui, but that
doesnt solve the problem. Also note that while submitting via spark-submit
phoenix works fine with spark.

i couldn't find  phoenix-core version  4.2.3 on maven or any where .
Is phoenix-core is equivalent to phoenix-client.jar which I got while
building phoenix?

On Thu, Mar 19, 2015 at 2:59 AM, Samarth Jain <sa...@apache.org> wrote:

> You also want to make sure that you are using compatible versions of
> client and server jars:
>
> phoenix-core version is 4.3.0 and phoenix-server.jar version is 4.2.3 are
> *NOT* compatible.
>
> The server side jar version should always be the same or newer (in
> version) than the client side jar. In general we support backward
> compatibility between point releases as long as the server side jars are
> upgraded before the client.
>
> On Wed, Mar 18, 2015 at 2:33 PM, Josh Mahonin <jm...@interset.com>
> wrote:
>
>> Have you tried adding hbase-protocol to the SPARK_CLASSPATH? That worked
>> for me to get Spark playing nicely with Phoenix.
>>
>> On Tue, Mar 17, 2015 at 6:15 PM, Andrew Purtell <ap...@apache.org>
>> wrote:
>>
>>> This is HBASE-11118 (https://issues.apache.org/jira/browse/HBASE-11118).
>>> Looks like someone else wrote in that Oozie wasn't working for them. You
>>> should follow up on the HBase issue tracker, although no promises, it may
>>> be an Oozie problem, but this is not a Phoenix issue.
>>>
>>> On Mon, Mar 16, 2015 at 2:39 AM, junaid khalid <
>>> junaid.khalid@platalytics.com> wrote:
>>>
>>>> hbase-protocol.jar is added to path. I can see that in spark-UI of the
>>>> running application.
>>>>
>>>> phoenix-core version is 4.3.0 and phoenix-server.jar version is 4.2.3.
>>>>
>>>> On Mon, Mar 16, 2015 at 2:15 PM, Fulin Sun <su...@certusnet.com.cn>
>>>> wrote:
>>>>
>>>>> Hi,
>>>>> Did you add hbase-protocol.jar into your application classpath?
>>>>> Do you find some version incompatibility between your client and
>>>>> server?
>>>>>
>>>>> Thanks,
>>>>> Sun.
>>>>>
>>>>> ------------------------------
>>>>> ------------------------------
>>>>>
>>>>> CertusNet
>>>>>
>>>>>
>>>>> *From:* junaid khalid <ju...@platalytics.com>
>>>>> *Date:* 2015-03-16 16:50
>>>>> *To:* user <us...@phoenix.apache.org>
>>>>> *Subject:* Fwd: java.lang.IllegalAccessError:
>>>>> com/google/protobuf/HBaseZeroCopyByteString
>>>>> i have a spark program in which it connects to hbase using phoenix and
>>>>> upserts record in hbase table. It runs fine when run through spark-submit
>>>>> command and work as expected. But when I run it through oozie, it gives
>>>>> following exception. While running submitting through oozie, if spark is
>>>>> run in local mode the program works fine.
>>>>>
>>>>> I am using spark 1.2, phoenix 4.2.3 and hbase 0.98.6-cdh5.3.1.
>>>>>
>>>>> ---
>>>>> 15/03/16 13:13:18 WARN HTable: Error calling coprocessor service
>>>>> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService for
>>>>> row \x00\x00TABLE55068E2AED4AB9B607BBBE49
>>>>> java.util.concurrent.ExecutionException: java.lang.IllegalAccessError:
>>>>> com/google/protobuf/HBaseZeroCopyByteString
>>>>> at java.util.concurrent.FutureTask.report(FutureTask.java:122)
>>>>> at java.util.concurrent.FutureTask.get(FutureTask.java:188)
>>>>> at
>>>>> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1583)
>>>>> at
>>>>> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1540)
>>>>> at
>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1006)
>>>>> at
>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
>>>>> at
>>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
>>>>> at
>>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
>>>>> at
>>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
>>>>> at
>>>>> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
>>>>> at
>>>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
>>>>> at
>>>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
>>>>> at
>>>>> org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
>>>>> at
>>>>> org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
>>>>> at
>>>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
>>>>> at
>>>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
>>>>> at
>>>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
>>>>> at
>>>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
>>>>> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>>>>> at
>>>>> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
>>>>> at
>>>>> org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
>>>>> at
>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
>>>>> at
>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>>>>> at
>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
>>>>> at
>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
>>>>> at
>>>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>>>> at
>>>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>>>> at
>>>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>>>> at
>>>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>>>> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
>>>>> at org.apache.spark.scheduler.Task.run(Task.scala:56)
>>>>> at
>>>>> org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
>>>>> at
>>>>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>>>>> at
>>>>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>>>>> at java.lang.Thread.run(Thread.java:745)
>>>>> Caused by: java.lang.IllegalAccessError:
>>>>> com/google/protobuf/HBaseZeroCopyByteString
>>>>> at
>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
>>>>> at
>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
>>>>> at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
>>>>> at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>>>>> ... 3 more
>>>>> java.sql.SQLException: java.lang.IllegalAccessError:
>>>>> com/google/protobuf/HBaseZeroCopyByteString
>>>>> at
>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1024)
>>>>> at
>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
>>>>> at
>>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
>>>>> at
>>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
>>>>> at
>>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
>>>>> at
>>>>> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
>>>>> at
>>>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
>>>>> at
>>>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
>>>>> at
>>>>> org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
>>>>> at
>>>>> org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
>>>>> at
>>>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
>>>>> at
>>>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
>>>>> at
>>>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
>>>>> at
>>>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
>>>>> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>>>>> at
>>>>> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
>>>>> at
>>>>> org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
>>>>> at
>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
>>>>> at
>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>>>>> at
>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
>>>>> at
>>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
>>>>> at
>>>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>>>> at
>>>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>>>> at
>>>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>>>> at
>>>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>>>> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
>>>>> at org.apache.spark.scheduler.Task.run(Task.scala:56)
>>>>> at
>>>>> org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
>>>>> at
>>>>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>>>>> at
>>>>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>>>>> at java.lang.Thread.run(Thread.java:745)
>>>>> Caused by: java.lang.IllegalAccessError:
>>>>> com/google/protobuf/HBaseZeroCopyByteString
>>>>> at
>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
>>>>> at
>>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
>>>>> at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
>>>>> at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>>>>> ... 3 more
>>>>>
>>>>>
>>>>
>>>
>>>
>>> --
>>> Best regards,
>>>
>>>    - Andy
>>>
>>> Problems worthy of attack prove their worth by hitting back. - Piet Hein
>>> (via Tom White)
>>>
>>
>>
>

Re: Fwd: java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString

Posted by Samarth Jain <sa...@apache.org>.
You also want to make sure that you are using compatible versions of client
and server jars:

phoenix-core version is 4.3.0 and phoenix-server.jar version is 4.2.3 are
*NOT* compatible.

The server side jar version should always be the same or newer (in version)
than the client side jar. In general we support backward compatibility
between point releases as long as the server side jars are upgraded before
the client.

On Wed, Mar 18, 2015 at 2:33 PM, Josh Mahonin <jm...@interset.com> wrote:

> Have you tried adding hbase-protocol to the SPARK_CLASSPATH? That worked
> for me to get Spark playing nicely with Phoenix.
>
> On Tue, Mar 17, 2015 at 6:15 PM, Andrew Purtell <ap...@apache.org>
> wrote:
>
>> This is HBASE-11118 (https://issues.apache.org/jira/browse/HBASE-11118).
>> Looks like someone else wrote in that Oozie wasn't working for them. You
>> should follow up on the HBase issue tracker, although no promises, it may
>> be an Oozie problem, but this is not a Phoenix issue.
>>
>> On Mon, Mar 16, 2015 at 2:39 AM, junaid khalid <
>> junaid.khalid@platalytics.com> wrote:
>>
>>> hbase-protocol.jar is added to path. I can see that in spark-UI of the
>>> running application.
>>>
>>> phoenix-core version is 4.3.0 and phoenix-server.jar version is 4.2.3.
>>>
>>> On Mon, Mar 16, 2015 at 2:15 PM, Fulin Sun <su...@certusnet.com.cn>
>>> wrote:
>>>
>>>> Hi,
>>>> Did you add hbase-protocol.jar into your application classpath?
>>>> Do you find some version incompatibility between your client and
>>>> server?
>>>>
>>>> Thanks,
>>>> Sun.
>>>>
>>>> ------------------------------
>>>> ------------------------------
>>>>
>>>> CertusNet
>>>>
>>>>
>>>> *From:* junaid khalid <ju...@platalytics.com>
>>>> *Date:* 2015-03-16 16:50
>>>> *To:* user <us...@phoenix.apache.org>
>>>> *Subject:* Fwd: java.lang.IllegalAccessError:
>>>> com/google/protobuf/HBaseZeroCopyByteString
>>>> i have a spark program in which it connects to hbase using phoenix and
>>>> upserts record in hbase table. It runs fine when run through spark-submit
>>>> command and work as expected. But when I run it through oozie, it gives
>>>> following exception. While running submitting through oozie, if spark is
>>>> run in local mode the program works fine.
>>>>
>>>> I am using spark 1.2, phoenix 4.2.3 and hbase 0.98.6-cdh5.3.1.
>>>>
>>>> ---
>>>> 15/03/16 13:13:18 WARN HTable: Error calling coprocessor service
>>>> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService for
>>>> row \x00\x00TABLE55068E2AED4AB9B607BBBE49
>>>> java.util.concurrent.ExecutionException: java.lang.IllegalAccessError:
>>>> com/google/protobuf/HBaseZeroCopyByteString
>>>> at java.util.concurrent.FutureTask.report(FutureTask.java:122)
>>>> at java.util.concurrent.FutureTask.get(FutureTask.java:188)
>>>> at
>>>> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1583)
>>>> at
>>>> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1540)
>>>> at
>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1006)
>>>> at
>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
>>>> at
>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
>>>> at
>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
>>>> at
>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
>>>> at
>>>> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
>>>> at
>>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
>>>> at
>>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
>>>> at
>>>> org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
>>>> at
>>>> org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
>>>> at
>>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
>>>> at
>>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
>>>> at
>>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
>>>> at
>>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
>>>> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>>>> at
>>>> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
>>>> at
>>>> org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
>>>> at
>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
>>>> at
>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>>>> at
>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
>>>> at
>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
>>>> at
>>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>>> at
>>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>>> at
>>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>>> at
>>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>>> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
>>>> at org.apache.spark.scheduler.Task.run(Task.scala:56)
>>>> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
>>>> at
>>>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>>>> at
>>>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>>>> at java.lang.Thread.run(Thread.java:745)
>>>> Caused by: java.lang.IllegalAccessError:
>>>> com/google/protobuf/HBaseZeroCopyByteString
>>>> at
>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
>>>> at
>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
>>>> at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
>>>> at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>>>> ... 3 more
>>>> java.sql.SQLException: java.lang.IllegalAccessError:
>>>> com/google/protobuf/HBaseZeroCopyByteString
>>>> at
>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1024)
>>>> at
>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
>>>> at
>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
>>>> at
>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
>>>> at
>>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
>>>> at
>>>> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
>>>> at
>>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
>>>> at
>>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
>>>> at
>>>> org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
>>>> at
>>>> org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
>>>> at
>>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
>>>> at
>>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
>>>> at
>>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
>>>> at
>>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
>>>> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>>>> at
>>>> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
>>>> at
>>>> org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
>>>> at
>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
>>>> at
>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>>>> at
>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
>>>> at
>>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
>>>> at
>>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>>> at
>>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>>> at
>>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>>> at
>>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>>> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
>>>> at org.apache.spark.scheduler.Task.run(Task.scala:56)
>>>> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
>>>> at
>>>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>>>> at
>>>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>>>> at java.lang.Thread.run(Thread.java:745)
>>>> Caused by: java.lang.IllegalAccessError:
>>>> com/google/protobuf/HBaseZeroCopyByteString
>>>> at
>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
>>>> at
>>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
>>>> at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
>>>> at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>>>> ... 3 more
>>>>
>>>>
>>>
>>
>>
>> --
>> Best regards,
>>
>>    - Andy
>>
>> Problems worthy of attack prove their worth by hitting back. - Piet Hein
>> (via Tom White)
>>
>
>

Re: Fwd: java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString

Posted by Josh Mahonin <jm...@interset.com>.
Have you tried adding hbase-protocol to the SPARK_CLASSPATH? That worked
for me to get Spark playing nicely with Phoenix.

On Tue, Mar 17, 2015 at 6:15 PM, Andrew Purtell <ap...@apache.org> wrote:

> This is HBASE-11118 (https://issues.apache.org/jira/browse/HBASE-11118).
> Looks like someone else wrote in that Oozie wasn't working for them. You
> should follow up on the HBase issue tracker, although no promises, it may
> be an Oozie problem, but this is not a Phoenix issue.
>
> On Mon, Mar 16, 2015 at 2:39 AM, junaid khalid <
> junaid.khalid@platalytics.com> wrote:
>
>> hbase-protocol.jar is added to path. I can see that in spark-UI of the
>> running application.
>>
>> phoenix-core version is 4.3.0 and phoenix-server.jar version is 4.2.3.
>>
>> On Mon, Mar 16, 2015 at 2:15 PM, Fulin Sun <su...@certusnet.com.cn>
>> wrote:
>>
>>> Hi,
>>> Did you add hbase-protocol.jar into your application classpath?
>>> Do you find some version incompatibility between your client and server?
>>>
>>> Thanks,
>>> Sun.
>>>
>>> ------------------------------
>>> ------------------------------
>>>
>>> CertusNet
>>>
>>>
>>> *From:* junaid khalid <ju...@platalytics.com>
>>> *Date:* 2015-03-16 16:50
>>> *To:* user <us...@phoenix.apache.org>
>>> *Subject:* Fwd: java.lang.IllegalAccessError:
>>> com/google/protobuf/HBaseZeroCopyByteString
>>> i have a spark program in which it connects to hbase using phoenix and
>>> upserts record in hbase table. It runs fine when run through spark-submit
>>> command and work as expected. But when I run it through oozie, it gives
>>> following exception. While running submitting through oozie, if spark is
>>> run in local mode the program works fine.
>>>
>>> I am using spark 1.2, phoenix 4.2.3 and hbase 0.98.6-cdh5.3.1.
>>>
>>> ---
>>> 15/03/16 13:13:18 WARN HTable: Error calling coprocessor service
>>> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService for
>>> row \x00\x00TABLE55068E2AED4AB9B607BBBE49
>>> java.util.concurrent.ExecutionException: java.lang.IllegalAccessError:
>>> com/google/protobuf/HBaseZeroCopyByteString
>>> at java.util.concurrent.FutureTask.report(FutureTask.java:122)
>>> at java.util.concurrent.FutureTask.get(FutureTask.java:188)
>>> at
>>> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1583)
>>> at
>>> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1540)
>>> at
>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1006)
>>> at
>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
>>> at
>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
>>> at
>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
>>> at
>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
>>> at
>>> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
>>> at
>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
>>> at
>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
>>> at
>>> org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
>>> at
>>> org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
>>> at
>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
>>> at
>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
>>> at
>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
>>> at
>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
>>> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>>> at
>>> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
>>> at
>>> org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
>>> at
>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
>>> at
>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
>>> at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>>> at
>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
>>> at
>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
>>> at
>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>> at
>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>> at
>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>> at
>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
>>> at org.apache.spark.scheduler.Task.run(Task.scala:56)
>>> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
>>> at
>>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>>> at
>>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>>> at java.lang.Thread.run(Thread.java:745)
>>> Caused by: java.lang.IllegalAccessError:
>>> com/google/protobuf/HBaseZeroCopyByteString
>>> at
>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
>>> at
>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
>>> at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
>>> at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>>> ... 3 more
>>> java.sql.SQLException: java.lang.IllegalAccessError:
>>> com/google/protobuf/HBaseZeroCopyByteString
>>> at
>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1024)
>>> at
>>> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
>>> at
>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
>>> at
>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
>>> at
>>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
>>> at
>>> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
>>> at
>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
>>> at
>>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
>>> at
>>> org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
>>> at
>>> org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
>>> at
>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
>>> at
>>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
>>> at
>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
>>> at
>>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
>>> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>>> at
>>> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
>>> at
>>> org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
>>> at
>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
>>> at
>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
>>> at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>>> at
>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
>>> at
>>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
>>> at
>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>> at
>>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>>> at
>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>> at
>>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>>> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
>>> at org.apache.spark.scheduler.Task.run(Task.scala:56)
>>> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
>>> at
>>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>>> at
>>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>>> at java.lang.Thread.run(Thread.java:745)
>>> Caused by: java.lang.IllegalAccessError:
>>> com/google/protobuf/HBaseZeroCopyByteString
>>> at
>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
>>> at
>>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
>>> at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
>>> at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>>> ... 3 more
>>>
>>>
>>
>
>
> --
> Best regards,
>
>    - Andy
>
> Problems worthy of attack prove their worth by hitting back. - Piet Hein
> (via Tom White)
>

Re: Fwd: java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString

Posted by Andrew Purtell <ap...@apache.org>.
This is HBASE-11118 (https://issues.apache.org/jira/browse/HBASE-11118).
Looks like someone else wrote in that Oozie wasn't working for them. You
should follow up on the HBase issue tracker, although no promises, it may
be an Oozie problem, but this is not a Phoenix issue.

On Mon, Mar 16, 2015 at 2:39 AM, junaid khalid <
junaid.khalid@platalytics.com> wrote:

> hbase-protocol.jar is added to path. I can see that in spark-UI of the
> running application.
>
> phoenix-core version is 4.3.0 and phoenix-server.jar version is 4.2.3.
>
> On Mon, Mar 16, 2015 at 2:15 PM, Fulin Sun <su...@certusnet.com.cn> wrote:
>
>> Hi,
>> Did you add hbase-protocol.jar into your application classpath?
>> Do you find some version incompatibility between your client and server?
>>
>> Thanks,
>> Sun.
>>
>> ------------------------------
>> ------------------------------
>>
>> CertusNet
>>
>>
>> *From:* junaid khalid <ju...@platalytics.com>
>> *Date:* 2015-03-16 16:50
>> *To:* user <us...@phoenix.apache.org>
>> *Subject:* Fwd: java.lang.IllegalAccessError:
>> com/google/protobuf/HBaseZeroCopyByteString
>> i have a spark program in which it connects to hbase using phoenix and
>> upserts record in hbase table. It runs fine when run through spark-submit
>> command and work as expected. But when I run it through oozie, it gives
>> following exception. While running submitting through oozie, if spark is
>> run in local mode the program works fine.
>>
>> I am using spark 1.2, phoenix 4.2.3 and hbase 0.98.6-cdh5.3.1.
>>
>> ---
>> 15/03/16 13:13:18 WARN HTable: Error calling coprocessor service
>> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService for
>> row \x00\x00TABLE55068E2AED4AB9B607BBBE49
>> java.util.concurrent.ExecutionException: java.lang.IllegalAccessError:
>> com/google/protobuf/HBaseZeroCopyByteString
>> at java.util.concurrent.FutureTask.report(FutureTask.java:122)
>> at java.util.concurrent.FutureTask.get(FutureTask.java:188)
>> at
>> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1583)
>> at
>> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1540)
>> at
>> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1006)
>> at
>> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
>> at
>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
>> at
>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
>> at
>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
>> at
>> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
>> at
>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
>> at
>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
>> at
>> org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
>> at
>> org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
>> at
>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
>> at
>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
>> at
>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
>> at
>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
>> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>> at
>> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
>> at
>> org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
>> at
>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
>> at
>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
>> at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>> at
>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
>> at
>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
>> at
>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>> at
>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>> at
>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>> at
>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
>> at org.apache.spark.scheduler.Task.run(Task.scala:56)
>> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
>> at
>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>> at
>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>> at java.lang.Thread.run(Thread.java:745)
>> Caused by: java.lang.IllegalAccessError:
>> com/google/protobuf/HBaseZeroCopyByteString
>> at
>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
>> at
>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
>> at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
>> at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>> ... 3 more
>> java.sql.SQLException: java.lang.IllegalAccessError:
>> com/google/protobuf/HBaseZeroCopyByteString
>> at
>> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1024)
>> at
>> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
>> at
>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
>> at
>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
>> at
>> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
>> at
>> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
>> at
>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
>> at
>> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
>> at
>> org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
>> at
>> org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
>> at
>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
>> at
>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
>> at
>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
>> at
>> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
>> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>> at
>> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
>> at
>> org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
>> at
>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
>> at
>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
>> at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>> at
>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
>> at
>> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
>> at
>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>> at
>> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
>> at
>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>> at
>> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
>> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
>> at org.apache.spark.scheduler.Task.run(Task.scala:56)
>> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
>> at
>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>> at
>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>> at java.lang.Thread.run(Thread.java:745)
>> Caused by: java.lang.IllegalAccessError:
>> com/google/protobuf/HBaseZeroCopyByteString
>> at
>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
>> at
>> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
>> at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
>> at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>> ... 3 more
>>
>>
>


-- 
Best regards,

   - Andy

Problems worthy of attack prove their worth by hitting back. - Piet Hein
(via Tom White)

Re: Fwd: java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString

Posted by junaid khalid <ju...@platalytics.com>.
hbase-protocol.jar is added to path. I can see that in spark-UI of the
running application.

phoenix-core version is 4.3.0 and phoenix-server.jar version is 4.2.3.

On Mon, Mar 16, 2015 at 2:15 PM, Fulin Sun <su...@certusnet.com.cn> wrote:

> Hi,
> Did you add hbase-protocol.jar into your application classpath?
> Do you find some version incompatibility between your client and server?
>
> Thanks,
> Sun.
>
> ------------------------------
> ------------------------------
>
> CertusNet
>
>
> *From:* junaid khalid <ju...@platalytics.com>
> *Date:* 2015-03-16 16:50
> *To:* user <us...@phoenix.apache.org>
> *Subject:* Fwd: java.lang.IllegalAccessError:
> com/google/protobuf/HBaseZeroCopyByteString
> i have a spark program in which it connects to hbase using phoenix and
> upserts record in hbase table. It runs fine when run through spark-submit
> command and work as expected. But when I run it through oozie, it gives
> following exception. While running submitting through oozie, if spark is
> run in local mode the program works fine.
>
> I am using spark 1.2, phoenix 4.2.3 and hbase 0.98.6-cdh5.3.1.
>
> ---
> 15/03/16 13:13:18 WARN HTable: Error calling coprocessor service
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService for
> row \x00\x00TABLE55068E2AED4AB9B607BBBE49
> java.util.concurrent.ExecutionException: java.lang.IllegalAccessError:
> com/google/protobuf/HBaseZeroCopyByteString
> at java.util.concurrent.FutureTask.report(FutureTask.java:122)
> at java.util.concurrent.FutureTask.get(FutureTask.java:188)
> at
> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1583)
> at
> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1540)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1006)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
> at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
> at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
> at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
> at
> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
> at
> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
> at
> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
> at
> org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
> at
> org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
> at
> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
> at
> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
> at scala.collection.Iterator$class.foreach(Iterator.scala:727)
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
> at
> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
> at
> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
> at
> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
> at
> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
> at
> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
> at
> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
> at org.apache.spark.scheduler.Task.run(Task.scala:56)
> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.IllegalAccessError:
> com/google/protobuf/HBaseZeroCopyByteString
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
> at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
> at java.util.concurrent.FutureTask.run(FutureTask.java:262)
> ... 3 more
> java.sql.SQLException: java.lang.IllegalAccessError:
> com/google/protobuf/HBaseZeroCopyByteString
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1024)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
> at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
> at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
> at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
> at
> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
> at
> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
> at
> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
> at
> org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
> at
> org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
> at
> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
> at
> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
> at scala.collection.Iterator$class.foreach(Iterator.scala:727)
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
> at
> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
> at
> com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
> at
> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
> at
> org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
> at
> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
> at
> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
> at org.apache.spark.scheduler.Task.run(Task.scala:56)
> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.IllegalAccessError:
> com/google/protobuf/HBaseZeroCopyByteString
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
> at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
> at java.util.concurrent.FutureTask.run(FutureTask.java:262)
> ... 3 more
>
>

Re: Fwd: java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString

Posted by Fulin Sun <su...@certusnet.com.cn>.
Hi, 
Did you add hbase-protocol.jar into your application classpath? 
Do you find some version incompatibility between your client and server? 

Thanks,
Sun.





CertusNet 

From: junaid khalid
Date: 2015-03-16 16:50
To: user
Subject: Fwd: java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString
i have a spark program in which it connects to hbase using phoenix and upserts record in hbase table. It runs fine when run through spark-submit command and work as expected. But when I run it through oozie, it gives following exception. While running submitting through oozie, if spark is run in local mode the program works fine. 

I am using spark 1.2, phoenix 4.2.3 and hbase 0.98.6-cdh5.3.1.

---
15/03/16 13:13:18 WARN HTable: Error calling coprocessor service org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService for row \x00\x00TABLE55068E2AED4AB9B607BBBE49
java.util.concurrent.ExecutionException: java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString
at java.util.concurrent.FutureTask.report(FutureTask.java:122)
at java.util.concurrent.FutureTask.get(FutureTask.java:188)
at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1583)
at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1540)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1006)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
at org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
at org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
at org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
at org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
at org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
at org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
at org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
at com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
at com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
at scala.collection.Iterator$class.foreach(Iterator.scala:727)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
at com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
at com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
at org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
at org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
at org.apache.spark.scheduler.Task.run(Task.scala:56)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString
at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
... 3 more
java.sql.SQLException: java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString
at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1024)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:348)
at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:309)
at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:305)
at org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:352)
at org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
at org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:231)
at org.apache.phoenix.compile.FromCompiler.getResolverForMutation(FromCompiler.java:207)
at org.apache.phoenix.compile.UpsertCompiler.compile(UpsertCompiler.java:248)
at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:487)
at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableUpsertStatement.compilePlan(PhoenixStatement.java:478)
at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:279)
at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:272)
at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
at org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:270)
at org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1052)
at com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:194)
at com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1$$anonfun$apply$1.apply(SmartSink.scala:175)
at scala.collection.Iterator$class.foreach(Iterator.scala:727)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
at com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:175)
at com.origins.platform.connectors.smartSink.SmartSink$$anonfun$loadData$1.apply(SmartSink.scala:169)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:773)
at org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
at org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
at org.apache.spark.scheduler.Task.run(Task.scala:56)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString
at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1265)
at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1571)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
... 3 more