You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@phoenix.apache.org by Pavani Addanki <pa...@gmail.com> on 2017/09/14 17:06:08 UTC
java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString
Hi All,
I am facing this issue while trying to insert from spark to Pheonix.
Tried adding /protobuf-java-2.5.0.jar, hbase-protocol-0.98.9-hadoop2.jar
and all other dependencies to the path but still unable to resolve the
error.
How to resolve the issue.
Please find the complete error log below
17/09/14 11:56:47 ERROR SparkVerticaBase: Job aborted due to stage failure:
Task 2 in stage 40.0 failed 4 times, most recent failure: Lost task 2.3 in
stage 40.0 (TID 82, clpd862.sldc.sbc.com, executor 2):
java.lang.RuntimeException: Exception while committing to database.
at
org.apache.phoenix.mapreduce.PhoenixRecordWriter.write(PhoenixRecordWriter.java:85)
at
org.apache.phoenix.mapreduce.PhoenixRecordWriter.write(PhoenixRecordWriter.java:39)
at
org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12$$anonfun$apply$4.apply$mcV$sp(PairRDDFunctions.scala:1125)
at
org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12$$anonfun$apply$4.apply(PairRDDFunctions.scala:1123)
at
org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12$$anonfun$apply$4.apply(PairRDDFunctions.scala:1123)
at
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1341)
at
org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12.apply(PairRDDFunctions.scala:1131)
at
org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12.apply(PairRDDFunctions.scala:1102)
at
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:99)
at
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.sql.SQLException: java.util.concurrent.ExecutionException:
java.lang.Exception: java.lang.IllegalAccessError:
com/google/protobuf/HBaseZeroCopyByteString
at
org.apache.phoenix.cache.ServerCacheClient.addServerCache(ServerCacheClient.java:266)
at
org.apache.phoenix.index.IndexMetaDataCacheClient.addIndexMetadataCache(IndexMetaDataCacheClient.java:78)
at
org.apache.phoenix.execute.MutationState.setMetaDataOnMutations(MutationState.java:1068)
at
org.apache.phoenix.execute.MutationState.send(MutationState.java:918)
at
org.apache.phoenix.execute.MutationState.send(MutationState.java:1317)
at
org.apache.phoenix.execute.MutationState.commit(MutationState.java:1149)
at
org.apache.phoenix.jdbc.PhoenixConnection$3.call(PhoenixConnection.java:520)
at
org.apache.phoenix.jdbc.PhoenixConnection$3.call(PhoenixConnection.java:517)
at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
at
org.apache.phoenix.jdbc.PhoenixConnection.commit(PhoenixConnection.java:517)
at
org.apache.phoenix.mapreduce.PhoenixRecordWriter.write(PhoenixRecordWriter.java:82)
... 13 more
Caused by: java.util.concurrent.ExecutionException: java.lang.Exception:
java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString
at java.util.concurrent.FutureTask.report(FutureTask.java:122)
at java.util.concurrent.FutureTask.get(FutureTask.java:206)
at
org.apache.phoenix.cache.ServerCacheClient.addServerCache(ServerCacheClient.java:258)
... 23 more
Caused by: java.lang.Exception: java.lang.IllegalAccessError:
com/google/protobuf/HBaseZeroCopyByteString
at
org.apache.phoenix.cache.ServerCacheClient$1.call(ServerCacheClient.java:226)
at
org.apache.phoenix.cache.ServerCacheClient$1.call(ServerCacheClient.java:185)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
org.apache.phoenix.job.JobManager$InstrumentedJobFutureTask.run(JobManager.java:183)
... 3 more
Caused by: java.lang.IllegalAccessError:
com/google/protobuf/HBaseZeroCopyByteString
at
org.apache.phoenix.cache.ServerCacheClient$1$1.call(ServerCacheClient.java:217)
at
org.apache.phoenix.cache.ServerCacheClient$1$1.call(ServerCacheClient.java:192)
at org.apache.hadoop.hbase.client.HTable$15.call(HTable.java:1763)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
... 3 more
Driver stacktrace:
17/09/14 11:56:47 ERROR TransportRequestHandler: Error while invoking
RpcHandler#receive() for one-way message.
org.apache.spark.SparkException: Could not find CoarseGrainedScheduler.
at
org.apache.spark.rpc.netty.Dispatcher.postMessage(Dispatcher.scala:154)
at
org.apache.spark.rpc.netty.Dispatcher.postOneWayMessage(Dispatcher.scala:134)
at
org.apache.spark.rpc.netty.NettyRpcHandler.receive(NettyRpcEnv.scala:570)
at
org.apache.spark.network.server.TransportRequestHandler.processOneWayMessage(TransportRequestHandler.java:180)
at
org.apache.spark.network.server.TransportRequestHandler.handle(TransportRequestHandler.java:109)
at
org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:119)
at
org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:51)
at
io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
at
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
at
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
at
io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346)
at
io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:266)
at
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
at
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
at
io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346)
at
io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102)
at
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
at
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
at
io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346)
at
org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:85)
at
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
at
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
at
io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346)
at
io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1294)
at
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
at
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
at
io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:911)
at
io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
at
io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:652)
at
io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:575)
at
io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:489)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:451)
at
io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:140)
at
io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144)
at java.lang.Thread.run(Thread.java:745)
Re: java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString
Posted by Anoop John <an...@gmail.com>.
Sounds similar issue HBASE-10304 but that is fixed from 0.98.0
onwards.. Pls check whether ur code is having this fix or not.
-Anoop-
On Thu, Sep 14, 2017 at 10:36 PM, Pavani Addanki
<pa...@gmail.com> wrote:
> Hi All,
>
> I am facing this issue while trying to insert from spark to Pheonix.
>
> Tried adding /protobuf-java-2.5.0.jar, hbase-protocol-0.98.9-hadoop2.jar
> and all other dependencies to the path but still unable to resolve the
> error.
>
> How to resolve the issue.
>
> Please find the complete error log below
>
>
>
> 17/09/14 11:56:47 ERROR SparkVerticaBase: Job aborted due to stage failure:
> Task 2 in stage 40.0 failed 4 times, most recent failure: Lost task 2.3 in
> stage 40.0 (TID 82, clpd862.sldc.sbc.com, executor 2):
> java.lang.RuntimeException: Exception while committing to database.
> at
> org.apache.phoenix.mapreduce.PhoenixRecordWriter.write(PhoenixRecordWriter.java:85)
> at
> org.apache.phoenix.mapreduce.PhoenixRecordWriter.write(PhoenixRecordWriter.java:39)
> at
> org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12$$anonfun$apply$4.apply$mcV$sp(PairRDDFunctions.scala:1125)
> at
> org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12$$anonfun$apply$4.apply(PairRDDFunctions.scala:1123)
> at
> org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12$$anonfun$apply$4.apply(PairRDDFunctions.scala:1123)
> at
> org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1341)
> at
> org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12.apply(PairRDDFunctions.scala:1131)
> at
> org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsNewAPIHadoopDataset$1$$anonfun$12.apply(PairRDDFunctions.scala:1102)
> at
> org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
> at org.apache.spark.scheduler.Task.run(Task.scala:99)
> at
> org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: java.sql.SQLException: java.util.concurrent.ExecutionException:
> java.lang.Exception: java.lang.IllegalAccessError:
> com/google/protobuf/HBaseZeroCopyByteString
> at
> org.apache.phoenix.cache.ServerCacheClient.addServerCache(ServerCacheClient.java:266)
> at
> org.apache.phoenix.index.IndexMetaDataCacheClient.addIndexMetadataCache(IndexMetaDataCacheClient.java:78)
> at
> org.apache.phoenix.execute.MutationState.setMetaDataOnMutations(MutationState.java:1068)
> at
> org.apache.phoenix.execute.MutationState.send(MutationState.java:918)
> at
> org.apache.phoenix.execute.MutationState.send(MutationState.java:1317)
> at
> org.apache.phoenix.execute.MutationState.commit(MutationState.java:1149)
> at
> org.apache.phoenix.jdbc.PhoenixConnection$3.call(PhoenixConnection.java:520)
> at
> org.apache.phoenix.jdbc.PhoenixConnection$3.call(PhoenixConnection.java:517)
> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
> at
> org.apache.phoenix.jdbc.PhoenixConnection.commit(PhoenixConnection.java:517)
> at
> org.apache.phoenix.mapreduce.PhoenixRecordWriter.write(PhoenixRecordWriter.java:82)
> ... 13 more
> Caused by: java.util.concurrent.ExecutionException: java.lang.Exception:
> java.lang.IllegalAccessError: com/google/protobuf/HBaseZeroCopyByteString
> at java.util.concurrent.FutureTask.report(FutureTask.java:122)
> at java.util.concurrent.FutureTask.get(FutureTask.java:206)
> at
> org.apache.phoenix.cache.ServerCacheClient.addServerCache(ServerCacheClient.java:258)
> ... 23 more
> Caused by: java.lang.Exception: java.lang.IllegalAccessError:
> com/google/protobuf/HBaseZeroCopyByteString
> at
> org.apache.phoenix.cache.ServerCacheClient$1.call(ServerCacheClient.java:226)
> at
> org.apache.phoenix.cache.ServerCacheClient$1.call(ServerCacheClient.java:185)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> at
> org.apache.phoenix.job.JobManager$InstrumentedJobFutureTask.run(JobManager.java:183)
> ... 3 more
> Caused by: java.lang.IllegalAccessError:
> com/google/protobuf/HBaseZeroCopyByteString
> at
> org.apache.phoenix.cache.ServerCacheClient$1$1.call(ServerCacheClient.java:217)
> at
> org.apache.phoenix.cache.ServerCacheClient$1$1.call(ServerCacheClient.java:192)
> at org.apache.hadoop.hbase.client.HTable$15.call(HTable.java:1763)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> ... 3 more
>
> Driver stacktrace:
> 17/09/14 11:56:47 ERROR TransportRequestHandler: Error while invoking
> RpcHandler#receive() for one-way message.
> org.apache.spark.SparkException: Could not find CoarseGrainedScheduler.
> at
> org.apache.spark.rpc.netty.Dispatcher.postMessage(Dispatcher.scala:154)
> at
> org.apache.spark.rpc.netty.Dispatcher.postOneWayMessage(Dispatcher.scala:134)
> at
> org.apache.spark.rpc.netty.NettyRpcHandler.receive(NettyRpcEnv.scala:570)
> at
> org.apache.spark.network.server.TransportRequestHandler.processOneWayMessage(TransportRequestHandler.java:180)
> at
> org.apache.spark.network.server.TransportRequestHandler.handle(TransportRequestHandler.java:109)
> at
> org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:119)
> at
> org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:51)
> at
> io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
> at
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
> at
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
> at
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346)
> at
> io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:266)
> at
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
> at
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
> at
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346)
> at
> io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102)
> at
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
> at
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
> at
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346)
> at
> org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:85)
> at
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
> at
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
> at
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346)
> at
> io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1294)
> at
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
> at
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
> at
> io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:911)
> at
> io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
> at
> io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:652)
> at
> io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:575)
> at
> io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:489)
> at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:451)
> at
> io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:140)
> at
> io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144)
> at java.lang.Thread.run(Thread.java:745)