You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@flink.apache.org by Shankara <sh...@gmail.com> on 2017/11/07 14:34:35 UTC

Getting java.lang.ClassNotFoundException: for protobuf generated class

Hi,

    I am using flink 2.1.0 version and protobuf-java 2.6.1 version.
I am getting below exception for protobuf generated class. I have included
jar which is having that class. 

Can you please help me to check it. 

org.apache.beam.sdk.util.UserCodeException:
org.apache.flink.streaming.runtime.tasks.ExceptionInChainedOperatorException:
Could not forward element to next operator
	at
org.apache.beam.sdk.util.UserCodeException.wrap(UserCodeException.java:36)
	at
org.apache.beam.sdk.io.kafka.KafkaIO$TypedWithoutMetadata$1$DoFnInvoker.invokeProcessElement(Unknown
Source)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
	at
org.apache.beam.runners.flink.metrics.DoFnRunnerWithMetricsUpdate.processElement(DoFnRunnerWithMetricsUpdate.java:65)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator.processElement(DoFnOperator.java:368)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:528)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:503)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:483)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:891)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:869)
	at
org.apache.flink.streaming.api.operators.TimestampedCollector.collect(TimestampedCollector.java:51)
	at
org.apache.beam.runners.flink.FlinkStreamingTransformTranslators$StripIdsMap.flatMap(FlinkStreamingTransformTranslators.java:213)
	at
org.apache.beam.runners.flink.FlinkStreamingTransformTranslators$StripIdsMap.flatMap(FlinkStreamingTransformTranslators.java:207)
	at
org.apache.flink.streaming.api.operators.StreamFlatMap.processElement(StreamFlatMap.java:50)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:528)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:503)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:483)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:891)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:869)
	at
org.apache.flink.streaming.api.operators.StreamSourceContexts$ManualWatermarkContext.processAndCollectWithTimestamp(StreamSourceContexts.java:309)
	at
org.apache.flink.streaming.api.operators.StreamSourceContexts$WatermarkContext.collectWithTimestamp(StreamSourceContexts.java:408)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSourceWrapper.emitElement(UnboundedSourceWrapper.java:329)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSourceWrapper.run(UnboundedSourceWrapper.java:267)
	at
org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:87)
	at
org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:55)
	at
org.apache.flink.streaming.runtime.tasks.SourceStreamTask.run(SourceStreamTask.java:95)
	at
org.apache.flink.streaming.runtime.tasks.StoppableSourceStreamTask.run(StoppableSourceStreamTask.java:39)
	at
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:263)
	at org.apache.flink.runtime.taskmanager.Task.run(Task.java:702)
	at java.lang.Thread.run(Thread.java:748)
Caused by:
org.apache.flink.streaming.runtime.tasks.ExceptionInChainedOperatorException:
Could not forward element to next operator
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:530)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:503)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:483)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:891)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:869)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator$MultiOutputOutputManagerFactory$1.output(DoFnOperator.java:730)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:211)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:66)
	at
org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:436)
	at
org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:424)
	at
org.apache.beam.sdk.io.kafka.KafkaIO$TypedWithoutMetadata$1.processElement(KafkaIO.java:690)
Caused by: org.apache.beam.sdk.util.UserCodeException:
org.apache.flink.streaming.runtime.tasks.ExceptionInChainedOperatorException:
Could not forward element to next operator
	at
org.apache.beam.sdk.util.UserCodeException.wrap(UserCodeException.java:36)
	at
com.huawei.ccn.intelliom.ims.feature.extractor.KpiExtractor$FilterSamplesToAdv$DoFnInvoker.invokeProcessElement(Unknown
Source)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
	at
org.apache.beam.runners.flink.metrics.DoFnRunnerWithMetricsUpdate.processElement(DoFnRunnerWithMetricsUpdate.java:65)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator.processElement(DoFnOperator.java:368)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:528)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:503)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:483)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:891)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:869)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator$MultiOutputOutputManagerFactory$1.output(DoFnOperator.java:730)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:211)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:66)
	at
org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:436)
	at
org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:424)
	at
org.apache.beam.sdk.io.kafka.KafkaIO$TypedWithoutMetadata$1.processElement(KafkaIO.java:690)
	at
org.apache.beam.sdk.io.kafka.KafkaIO$TypedWithoutMetadata$1$DoFnInvoker.invokeProcessElement(Unknown
Source)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
	at
org.apache.beam.runners.flink.metrics.DoFnRunnerWithMetricsUpdate.processElement(DoFnRunnerWithMetricsUpdate.java:65)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator.processElement(DoFnOperator.java:368)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:528)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:503)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:483)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:891)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:869)
	at
org.apache.flink.streaming.api.operators.TimestampedCollector.collect(TimestampedCollector.java:51)
	at
org.apache.beam.runners.flink.FlinkStreamingTransformTranslators$StripIdsMap.flatMap(FlinkStreamingTransformTranslators.java:213)
	at
org.apache.beam.runners.flink.FlinkStreamingTransformTranslators$StripIdsMap.flatMap(FlinkStreamingTransformTranslators.java:207)
	at
org.apache.flink.streaming.api.operators.StreamFlatMap.processElement(StreamFlatMap.java:50)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:528)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:503)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:483)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:891)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:869)
	at
org.apache.flink.streaming.api.operators.StreamSourceContexts$ManualWatermarkContext.processAndCollectWithTimestamp(StreamSourceContexts.java:309)
	at
org.apache.flink.streaming.api.operators.StreamSourceContexts$WatermarkContext.collectWithTimestamp(StreamSourceContexts.java:408)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSourceWrapper.emitElement(UnboundedSourceWrapper.java:329)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSourceWrapper.run(UnboundedSourceWrapper.java:267)
	at
org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:87)
	at
org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:55)
	at
org.apache.flink.streaming.runtime.tasks.SourceStreamTask.run(SourceStreamTask.java:95)
	at
org.apache.flink.streaming.runtime.tasks.StoppableSourceStreamTask.run(StoppableSourceStreamTask.java:39)
	at
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:263)
	at org.apache.flink.runtime.taskmanager.Task.run(Task.java:702)
	at java.lang.Thread.run(Thread.java:748)
Caused by:
org.apache.flink.streaming.runtime.tasks.ExceptionInChainedOperatorException:
Could not forward element to next operator
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:530)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:503)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:483)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:891)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:869)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator$MultiOutputOutputManagerFactory$1.output(DoFnOperator.java:730)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:211)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:66)
	at
org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:436)
	at
org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:424)
	at
com.huawei.ccn.intelliom.ims.feature.extractor.KpiExtractor$FilterSamplesToAdv.processElement(KpiExtractor.java:242)
Caused by: java.lang.RuntimeException: Unable to find proto buffer class
	at
com.google.protobuf.GeneratedMessageLite$SerializedForm.readResolve(GeneratedMessageLite.java:775)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at java.io.ObjectStreamClass.invokeReadResolve(ObjectStreamClass.java:1148)
	at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2036)
	at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1535)
	at java.io.ObjectInputStream.readObject(ObjectInputStream.java:422)
	at
org.apache.beam.sdk.coders.SerializableCoder.decode(SerializableCoder.java:138)
	at
org.apache.beam.sdk.coders.SerializableCoder.decode(SerializableCoder.java:48)
	at org.apache.beam.sdk.coders.Coder.decode(Coder.java:160)
	at org.apache.beam.sdk.coders.KvCoder.decode(KvCoder.java:85)
	at org.apache.beam.sdk.coders.KvCoder.decode(KvCoder.java:36)
	at
org.apache.beam.sdk.util.WindowedValue$FullWindowedValueCoder.decode(WindowedValue.java:667)
	at
org.apache.beam.sdk.util.WindowedValue$FullWindowedValueCoder.decode(WindowedValue.java:599)
	at
org.apache.beam.sdk.util.CoderUtils.decodeFromSafeStream(CoderUtils.java:130)
	at
org.apache.beam.sdk.util.CoderUtils.decodeFromByteArray(CoderUtils.java:113)
	at
org.apache.beam.sdk.util.CoderUtils.decodeFromByteArray(CoderUtils.java:107)
	at org.apache.beam.sdk.util.CoderUtils.clone(CoderUtils.java:156)
	at
org.apache.beam.runners.flink.translation.types.CoderTypeSerializer.copy(CoderTypeSerializer.java:64)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:526)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:503)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:483)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:891)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:869)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator$MultiOutputOutputManagerFactory$1.output(DoFnOperator.java:730)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:211)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:66)
	at
org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:436)
	at
org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:424)
	at
com.huawei.ccn.intelliom.ims.feature.extractor.KpiExtractor$FilterSamplesToAdv.processElement(KpiExtractor.java:242)
	at
com.huawei.ccn.intelliom.ims.feature.extractor.KpiExtractor$FilterSamplesToAdv$DoFnInvoker.invokeProcessElement(Unknown
Source)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
	at
org.apache.beam.runners.flink.metrics.DoFnRunnerWithMetricsUpdate.processElement(DoFnRunnerWithMetricsUpdate.java:65)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator.processElement(DoFnOperator.java:368)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:528)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:503)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:483)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:891)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:869)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator$MultiOutputOutputManagerFactory$1.output(DoFnOperator.java:730)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:211)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:66)
	at
org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:436)
	at
org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:424)
	at
org.apache.beam.sdk.io.kafka.KafkaIO$TypedWithoutMetadata$1.processElement(KafkaIO.java:690)
	at
org.apache.beam.sdk.io.kafka.KafkaIO$TypedWithoutMetadata$1$DoFnInvoker.invokeProcessElement(Unknown
Source)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
	at
org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
	at
org.apache.beam.runners.flink.metrics.DoFnRunnerWithMetricsUpdate.processElement(DoFnRunnerWithMetricsUpdate.java:65)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator.processElement(DoFnOperator.java:368)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:528)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:503)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:483)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:891)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:869)
	at
org.apache.flink.streaming.api.operators.TimestampedCollector.collect(TimestampedCollector.java:51)
	at
org.apache.beam.runners.flink.FlinkStreamingTransformTranslators$StripIdsMap.flatMap(FlinkStreamingTransformTranslators.java:213)
	at
org.apache.beam.runners.flink.FlinkStreamingTransformTranslators$StripIdsMap.flatMap(FlinkStreamingTransformTranslators.java:207)
	at
org.apache.flink.streaming.api.operators.StreamFlatMap.processElement(StreamFlatMap.java:50)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:528)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:503)
	at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:483)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:891)
	at
org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:869)
	at
org.apache.flink.streaming.api.operators.StreamSourceContexts$ManualWatermarkContext.processAndCollectWithTimestamp(StreamSourceContexts.java:309)
	at
org.apache.flink.streaming.api.operators.StreamSourceContexts$WatermarkContext.collectWithTimestamp(StreamSourceContexts.java:408)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSourceWrapper.emitElement(UnboundedSourceWrapper.java:329)
	at
org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSourceWrapper.run(UnboundedSourceWrapper.java:267)
	at
org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:87)
	at
org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:55)
	at
org.apache.flink.streaming.runtime.tasks.SourceStreamTask.run(SourceStreamTask.java:95)
	at
org.apache.flink.streaming.runtime.tasks.StoppableSourceStreamTask.run(StoppableSourceStreamTask.java:39)
	at
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:263)
	at org.apache.flink.runtime.taskmanager.Task.run(Task.java:702)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.ClassNotFoundException:
com.huawei.ccn.intelliom.ims.MeasurementTable$measurementTable
	at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
	at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
	at java.lang.Class.forName0(Native Method)
	at java.lang.Class.forName(Class.java:264)
	at
com.google.protobuf.GeneratedMessageLite$SerializedForm.readResolve(GeneratedMessageLite.java:768)
	... 77 more



--
Sent from: http://apache-flink-user-mailing-list-archive.2336050.n4.nabble.com/

Re: Getting java.lang.ClassNotFoundException: for protobuf generated class

Posted by Gordon Weakliem <gw...@sovrn.com>.
Jared has a good point, what is mvn dependency:tree showing?

On Wed, Nov 22, 2017 at 7:54 AM, Jared Stehler <
jared.stehler@intellifylearning.com> wrote:

> Protobuf is notorious for throwing things like “class not found” when
> built and run with different versions of the library; I believe flink is
> using protobuf 2.5.0 and you mentioned using 2.6.1, which I think would be
> a possible cause of this issue.
>
> --
> Jared Stehler
> Chief Architect - Intellify Learning
> o: 617.701.6330 x703 <(617)%20701-6330>
>
>
>
> On Nov 22, 2017, at 4:25 AM, Nico Kruber <ni...@data-artisans.com> wrote:
>
> But wouldn't a failed dependency show another ClassNotFoundException?
>
> On Tuesday, 21 November 2017 20:31:58 CET Gordon Weakliem wrote:
>
> Isn't one cause for ClassNotFoundException that the class can't load due to
> failed dependencies or a failure in a static constructor?
>
> If jar -tf target/program.jar | grep MeasurementTable shows the class is
> present, are there other dependencies missing? You may need to add runtime
> dependencies into your pom or gradle.build file.
>
> On Tue, Nov 21, 2017 at 2:28 AM, Nico Kruber <ni...@data-artisans.com>
> wrote:
>
> Hi Shankara,
> sorry for the late response, but honestly, I cannot think of a reason that
> some of your program's classes (using only a single jar file) are found
> some
> others are not, except for the class not being in the jar.
>
> Or there's some class loader issue in the Flink Beam runner (which I find
> unlikely though) - maybe Aljoscha (cc'd) can elaborate a bit more on the
> Beam
> side and has some other idea.
>
>
> Nico
>
> On Tuesday, 14 November 2017 14:54:28 CET Shankara wrote:
>
> Hi Nico,
>
>
> - how do you run the job?
>
> If we run same program in flink local then it works fine. For
>
>
> flink local we used command line
>
>          mvn package exec:java
>
> -Dexec.mainClass=com.huawei.ccn.intelliom.ims.tmon.TMon
> -Dexec.args="--threshold=Measurment:0:4001:1:90:85:CPU
> --broker=192.168.56.1:9092" -Pflink-runner
>
>       When we use flink cluster and submit jar using web UI then we are
>
> getting exception. like below
> <http://apache-flink-user-mailing-list-archive.2336050.
>
>
> n4.nabble.com/file/t1
>
> 169/image953.png>
>
>     Exception :
> <http://apache-flink-user-mailing-list-archive.2336050.
>
>
> n4.nabble.com/file/t1
>
> 169/image_%281%29.png>
>
> - how do you add/include the jar with the missing class?
>
> We are generating the linked jar using the maven-jar-plugin.
>
>
> And
>
> in the bundled jar all the protobuf generated class exist. There is no
> missing class.
>
> - is that jar file part of your program's jar or separate?
>
> since we are using the jar-plugin, the protobuf jar is also
>
>
> part
>
> of the generated jar.
>
> - is the missing class, i.e. "com.huawei.ccn.intelliom.ims.
>
>
> MeasurementTable
>
> $measurementTable" (an inner class starting in lower-case?), really in
>
>
> the
>
> jar
> file? It might be a wrongly generated protobuf class ...
>
> sub Class is exit in Protobuf generated class. Please find the
>
>
> attached class.
>
> <http://apache-flink-user-mailing-list-archive.2336050.
>
>
> n4.nabble.com/file/t1
>
> 169/Selection_028.png>
>
> Thanks,
> Shankara
>
>
>
> --
> Sent from:
> http://apache-flink-user-mailing-list-archive.2336050.n4.nabble.com/
>
>
>
>


-- 
[image: Img]
*  Gordon Weakliem*|  Sr. Software Engineer
  *O *303.493.5490
*  Boulder* | NYC | London    <https://twitter.com/sovrnholdings>
<https://www.facebook.com/sovrnholdings/>
<https://www.linkedin.com/company/3594890/>


CONFIDENTIALITY. This communication is intended only for the use of the
intended recipient(s) and contains information that is privileged and
confidential. As a recipient of this confidential and proprietary
information, you are prohibited from distributing this information outside
of sovrn. Further, if you are not the intended recipient, please note that
any dissemination of this communication is prohibited. If you have received
this communication in error, please erase all copies of the message,
including all attachments, and please also notify the sender immediately.
Thank you for your cooperation.

Re: Getting java.lang.ClassNotFoundException: for protobuf generated class

Posted by Jared Stehler <ja...@intellifylearning.com>.
Protobuf is notorious for throwing things like “class not found” when built
and run with different versions of the library; I believe flink is using
protobuf 2.5.0 and you mentioned using 2.6.1, which I think would be a
possible cause of this issue.

--
Jared Stehler
Chief Architect - Intellify Learning
o: 617.701.6330 x703 <(617)%20701-6330>



On Nov 22, 2017, at 4:25 AM, Nico Kruber <ni...@data-artisans.com> wrote:

But wouldn't a failed dependency show another ClassNotFoundException?

On Tuesday, 21 November 2017 20:31:58 CET Gordon Weakliem wrote:

Isn't one cause for ClassNotFoundException that the class can't load due to
failed dependencies or a failure in a static constructor?

If jar -tf target/program.jar | grep MeasurementTable shows the class is
present, are there other dependencies missing? You may need to add runtime
dependencies into your pom or gradle.build file.

On Tue, Nov 21, 2017 at 2:28 AM, Nico Kruber <ni...@data-artisans.com> wrote:

Hi Shankara,
sorry for the late response, but honestly, I cannot think of a reason that
some of your program's classes (using only a single jar file) are found
some
others are not, except for the class not being in the jar.

Or there's some class loader issue in the Flink Beam runner (which I find
unlikely though) - maybe Aljoscha (cc'd) can elaborate a bit more on the
Beam
side and has some other idea.


Nico

On Tuesday, 14 November 2017 14:54:28 CET Shankara wrote:

Hi Nico,


- how do you run the job?

If we run same program in flink local then it works fine. For


flink local we used command line

         mvn package exec:java

-Dexec.mainClass=com.huawei.ccn.intelliom.ims.tmon.TMon
-Dexec.args="--threshold=Measurment:0:4001:1:90:85:CPU
--broker=192.168.56.1:9092" -Pflink-runner

      When we use flink cluster and submit jar using web UI then we are

getting exception. like below
<http://apache-flink-user-mailing-list-archive.2336050.


n4.nabble.com/file/t1

169/image953.png>

    Exception :
<http://apache-flink-user-mailing-list-archive.2336050.


n4.nabble.com/file/t1

169/image_%281%29.png>

- how do you add/include the jar with the missing class?

We are generating the linked jar using the maven-jar-plugin.


And

in the bundled jar all the protobuf generated class exist. There is no
missing class.

- is that jar file part of your program's jar or separate?

since we are using the jar-plugin, the protobuf jar is also


part

of the generated jar.

- is the missing class, i.e. "com.huawei.ccn.intelliom.ims.


MeasurementTable

$measurementTable" (an inner class starting in lower-case?), really in


the

jar
file? It might be a wrongly generated protobuf class ...

sub Class is exit in Protobuf generated class. Please find the


attached class.

<http://apache-flink-user-mailing-list-archive.2336050.


n4.nabble.com/file/t1

169/Selection_028.png>

Thanks,
Shankara



--
Sent from:
http://apache-flink-user-mailing-list-archive.2336050.n4.nabble.com/

Re: Getting java.lang.ClassNotFoundException: for protobuf generated class

Posted by Nico Kruber <ni...@data-artisans.com>.
But wouldn't a failed dependency show another ClassNotFoundException?

On Tuesday, 21 November 2017 20:31:58 CET Gordon Weakliem wrote:
> Isn't one cause for ClassNotFoundException that the class can't load due to
> failed dependencies or a failure in a static constructor?
> 
> If jar -tf target/program.jar | grep MeasurementTable shows the class is
> present, are there other dependencies missing? You may need to add runtime
> dependencies into your pom or gradle.build file.
> 
> On Tue, Nov 21, 2017 at 2:28 AM, Nico Kruber <ni...@data-artisans.com> wrote:
> > Hi Shankara,
> > sorry for the late response, but honestly, I cannot think of a reason that
> > some of your program's classes (using only a single jar file) are found
> > some
> > others are not, except for the class not being in the jar.
> > 
> > Or there's some class loader issue in the Flink Beam runner (which I find
> > unlikely though) - maybe Aljoscha (cc'd) can elaborate a bit more on the
> > Beam
> > side and has some other idea.
> > 
> > 
> > Nico
> > 
> > On Tuesday, 14 November 2017 14:54:28 CET Shankara wrote:
> > > Hi Nico,
> > > 
> > > 
> > > - how do you run the job?
> > > 
> > >        >> If we run same program in flink local then it works fine. For
> > > 
> > > flink local we used command line
> > > 
> > >           mvn package exec:java
> > > 
> > > -Dexec.mainClass=com.huawei.ccn.intelliom.ims.tmon.TMon
> > > -Dexec.args="--threshold=Measurment:0:4001:1:90:85:CPU
> > > --broker=192.168.56.1:9092" -Pflink-runner
> > > 
> > >        When we use flink cluster and submit jar using web UI then we are
> > > 
> > > getting exception. like below
> > > <http://apache-flink-user-mailing-list-archive.2336050.
> > 
> > n4.nabble.com/file/t1
> > 
> > > 169/image953.png>
> > > 
> > >      Exception :
> > > <http://apache-flink-user-mailing-list-archive.2336050.
> > 
> > n4.nabble.com/file/t1
> > 
> > > 169/image_%281%29.png>
> > > 
> > > - how do you add/include the jar with the missing class?
> > > 
> > >        >> We are generating the linked jar using the maven-jar-plugin.
> > 
> > And
> > 
> > > in the bundled jar all the protobuf generated class exist. There is no
> > > missing class.
> > > 
> > > - is that jar file part of your program's jar or separate?
> > > 
> > >        >> since we are using the jar-plugin, the protobuf jar is also
> > 
> > part
> > 
> > > of the generated jar.
> > > 
> > > - is the missing class, i.e. "com.huawei.ccn.intelliom.ims.
> > 
> > MeasurementTable
> > 
> > > $measurementTable" (an inner class starting in lower-case?), really in
> > 
> > the
> > 
> > > jar
> > > file? It might be a wrongly generated protobuf class ...
> > > 
> > >    >> sub Class is exit in Protobuf generated class. Please find the
> > > 
> > > attached class.
> > > 
> > > <http://apache-flink-user-mailing-list-archive.2336050.
> > 
> > n4.nabble.com/file/t1
> > 
> > > 169/Selection_028.png>
> > > 
> > > Thanks,
> > > Shankara
> > > 
> > > 
> > > 
> > > --
> > > Sent from:
> > > http://apache-flink-user-mailing-list-archive.2336050.n4.nabble.com/


Re: Getting java.lang.ClassNotFoundException: for protobuf generated class

Posted by Gordon Weakliem <gw...@sovrn.com>.
Isn't one cause for ClassNotFoundException that the class can't load due to
failed dependencies or a failure in a static constructor?

If jar -tf target/program.jar | grep MeasurementTable shows the class is
present, are there other dependencies missing? You may need to add runtime
dependencies into your pom or gradle.build file.

On Tue, Nov 21, 2017 at 2:28 AM, Nico Kruber <ni...@data-artisans.com> wrote:

> Hi Shankara,
> sorry for the late response, but honestly, I cannot think of a reason that
> some of your program's classes (using only a single jar file) are found
> some
> others are not, except for the class not being in the jar.
>
> Or there's some class loader issue in the Flink Beam runner (which I find
> unlikely though) - maybe Aljoscha (cc'd) can elaborate a bit more on the
> Beam
> side and has some other idea.
>
>
> Nico
>
>
> On Tuesday, 14 November 2017 14:54:28 CET Shankara wrote:
> > Hi Nico,
> >
> >
> > - how do you run the job?
> >
> >        >> If we run same program in flink local then it works fine. For
> >
> > flink local we used command line
> >           mvn package exec:java
> > -Dexec.mainClass=com.huawei.ccn.intelliom.ims.tmon.TMon
> > -Dexec.args="--threshold=Measurment:0:4001:1:90:85:CPU
> > --broker=192.168.56.1:9092" -Pflink-runner
> >
> >        When we use flink cluster and submit jar using web UI then we are
> > getting exception. like below
> > <http://apache-flink-user-mailing-list-archive.2336050.
> n4.nabble.com/file/t1
> > 169/image953.png>
> >
> >      Exception :
> >
> > <http://apache-flink-user-mailing-list-archive.2336050.
> n4.nabble.com/file/t1
> > 169/image_%281%29.png>
> >
> > - how do you add/include the jar with the missing class?
> >
> >        >> We are generating the linked jar using the maven-jar-plugin.
> And
> >
> > in the bundled jar all the protobuf generated class exist. There is no
> > missing class.
> >
> > - is that jar file part of your program's jar or separate?
> >
> >        >> since we are using the jar-plugin, the protobuf jar is also
> part
> >
> > of the generated jar.
> >
> > - is the missing class, i.e. "com.huawei.ccn.intelliom.ims.
> MeasurementTable
> > $measurementTable" (an inner class starting in lower-case?), really in
> the
> > jar
> > file? It might be a wrongly generated protobuf class ...
> >
> >    >> sub Class is exit in Protobuf generated class. Please find the
> >
> > attached class.
> >
> > <http://apache-flink-user-mailing-list-archive.2336050.
> n4.nabble.com/file/t1
> > 169/Selection_028.png>
> >
> > Thanks,
> > Shankara
> >
> >
> >
> > --
> > Sent from:
> > http://apache-flink-user-mailing-list-archive.2336050.n4.nabble.com/
>
>


-- 
[image: Img]
*  Gordon Weakliem*|  Sr. Software Engineer
  *O *303.493.5490
*  Boulder* | NYC | London    <https://twitter.com/sovrnholdings>
<https://www.facebook.com/sovrnholdings/>
<https://www.linkedin.com/company/3594890/>


CONFIDENTIALITY. This communication is intended only for the use of the
intended recipient(s) and contains information that is privileged and
confidential. As a recipient of this confidential and proprietary
information, you are prohibited from distributing this information outside
of sovrn. Further, if you are not the intended recipient, please note that
any dissemination of this communication is prohibited. If you have received
this communication in error, please erase all copies of the message,
including all attachments, and please also notify the sender immediately.
Thank you for your cooperation.

Re: Getting java.lang.ClassNotFoundException: for protobuf generated class

Posted by Nico Kruber <ni...@data-artisans.com>.
Hi Shankara,
sorry for the late response, but honestly, I cannot think of a reason that 
some of your program's classes (using only a single jar file) are found some 
others are not, except for the class not being in the jar.

Or there's some class loader issue in the Flink Beam runner (which I find 
unlikely though) - maybe Aljoscha (cc'd) can elaborate a bit more on the Beam 
side and has some other idea.


Nico


On Tuesday, 14 November 2017 14:54:28 CET Shankara wrote:
> Hi Nico,
> 
> 
> - how do you run the job?
> 
>        >> If we run same program in flink local then it works fine. For
> 
> flink local we used command line
>           mvn package exec:java
> -Dexec.mainClass=com.huawei.ccn.intelliom.ims.tmon.TMon
> -Dexec.args="--threshold=Measurment:0:4001:1:90:85:CPU
> --broker=192.168.56.1:9092" -Pflink-runner
> 
>        When we use flink cluster and submit jar using web UI then we are
> getting exception. like below
> <http://apache-flink-user-mailing-list-archive.2336050.n4.nabble.com/file/t1
> 169/image953.png>
> 
>      Exception :
> 
> <http://apache-flink-user-mailing-list-archive.2336050.n4.nabble.com/file/t1
> 169/image_%281%29.png>
> 
> - how do you add/include the jar with the missing class?
> 
>        >> We are generating the linked jar using the maven-jar-plugin. And
> 
> in the bundled jar all the protobuf generated class exist. There is no
> missing class.
> 
> - is that jar file part of your program's jar or separate?
> 
>        >> since we are using the jar-plugin, the protobuf jar is also part
> 
> of the generated jar.
> 
> - is the missing class, i.e. "com.huawei.ccn.intelliom.ims.MeasurementTable
> $measurementTable" (an inner class starting in lower-case?), really in the
> jar
> file? It might be a wrongly generated protobuf class ...
> 
>    >> sub Class is exit in Protobuf generated class. Please find the
> 
> attached class.
> 
> <http://apache-flink-user-mailing-list-archive.2336050.n4.nabble.com/file/t1
> 169/Selection_028.png>
> 
> Thanks,
> Shankara
> 
> 
> 
> --
> Sent from:
> http://apache-flink-user-mailing-list-archive.2336050.n4.nabble.com/


Re: Getting java.lang.ClassNotFoundException: for protobuf generated class

Posted by Shankara <sh...@gmail.com>.
Hi Nico,


- how do you run the job?  
       >> If we run same program in flink local then it works fine. For
flink local we used command line
          mvn package exec:java
-Dexec.mainClass=com.huawei.ccn.intelliom.ims.tmon.TMon     
-Dexec.args="--threshold=Measurment:0:4001:1:90:85:CPU
--broker=192.168.56.1:9092" -Pflink-runner

       When we use flink cluster and submit jar using web UI then we are
getting exception. like below
<http://apache-flink-user-mailing-list-archive.2336050.n4.nabble.com/file/t1169/image953.png> 

     Exception : 
   
<http://apache-flink-user-mailing-list-archive.2336050.n4.nabble.com/file/t1169/image_%281%29.png>     

- how do you add/include the jar with the missing class? 
       >> We are generating the linked jar using the maven-jar-plugin. And
in the bundled jar all the protobuf generated class exist. There is no
missing class. 

- is that jar file part of your program's jar or separate? 
       >> since we are using the jar-plugin, the protobuf jar is also part
of the generated jar.

- is the missing class, i.e. "com.huawei.ccn.intelliom.ims.MeasurementTable
$measurementTable" (an inner class starting in lower-case?), really in the
jar
file? It might be a wrongly generated protobuf class ...
   >> sub Class is exit in Protobuf generated class. Please find the
attached class.
     
<http://apache-flink-user-mailing-list-archive.2336050.n4.nabble.com/file/t1169/Selection_028.png> 

Thanks,
Shankara



--
Sent from: http://apache-flink-user-mailing-list-archive.2336050.n4.nabble.com/

Re: Getting java.lang.ClassNotFoundException: for protobuf generated class

Posted by Nico Kruber <ni...@data-artisans.com>.
Hi Shankara,
can you give us some more details, e.g.
- how do you run the job?
- how do you add/include the jar with the missing class?
- is that jar file part of your program's jar or separate?
- is the missing class, i.e. "com.huawei.ccn.intelliom.ims.MeasurementTable
$measurementTable" (an inner class starting in lower-case?), really in the jar 
file? It might be a wrongly generated protobuf class ...


Nico

On Tuesday, 7 November 2017 15:34:35 CET Shankara wrote:
> Hi,
> 
>     I am using flink 2.1.0 version and protobuf-java 2.6.1 version.
> I am getting below exception for protobuf generated class. I have included
> jar which is having that class.
> 
> Can you please help me to check it.
> 
> org.apache.beam.sdk.util.UserCodeException:
> org.apache.flink.streaming.runtime.tasks.ExceptionInChainedOperatorException
> : Could not forward element to next operator
> 	at
> org.apache.beam.sdk.util.UserCodeException.wrap(UserCodeException.java:36)
> 	at
> org.apache.beam.sdk.io.kafka.KafkaIO$TypedWithoutMetadata$1$DoFnInvoker.invo
> keProcessElement(Unknown Source)
> 	at
> org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoF
> nRunner.java:177) at
> org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunne
> r.java:141) at
> org.apache.beam.runners.flink.metrics.DoFnRunnerWithMetricsUpdate.processEle
> ment(DoFnRunnerWithMetricsUpdate.java:65) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator.pr
> ocessElement(DoFnOperator.java:368) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .pushToOperator(OperatorChain.java:528) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:503) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:483) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:891) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:869) at
> org.apache.flink.streaming.api.operators.TimestampedCollector.collect(Timest
> ampedCollector.java:51) at
> org.apache.beam.runners.flink.FlinkStreamingTransformTranslators$StripIdsMap
> .flatMap(FlinkStreamingTransformTranslators.java:213) at
> org.apache.beam.runners.flink.FlinkStreamingTransformTranslators$StripIdsMap
> .flatMap(FlinkStreamingTransformTranslators.java:207) at
> org.apache.flink.streaming.api.operators.StreamFlatMap.processElement(Stream
> FlatMap.java:50) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .pushToOperator(OperatorChain.java:528) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:503) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:483) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:891) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:869) at
> org.apache.flink.streaming.api.operators.StreamSourceContexts$ManualWatermar
> kContext.processAndCollectWithTimestamp(StreamSourceContexts.java:309) at
> org.apache.flink.streaming.api.operators.StreamSourceContexts$WatermarkConte
> xt.collectWithTimestamp(StreamSourceContexts.java:408) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSou
> rceWrapper.emitElement(UnboundedSourceWrapper.java:329) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSou
> rceWrapper.run(UnboundedSourceWrapper.java:267) at
> org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:
> 87) at
> org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:
> 55) at
> org.apache.flink.streaming.runtime.tasks.SourceStreamTask.run(SourceStreamTa
> sk.java:95) at
> org.apache.flink.streaming.runtime.tasks.StoppableSourceStreamTask.run(Stopp
> ableSourceStreamTask.java:39) at
> org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:2
> 63) at org.apache.flink.runtime.taskmanager.Task.run(Task.java:702)
> 	at java.lang.Thread.run(Thread.java:748)
> Caused by:
> org.apache.flink.streaming.runtime.tasks.ExceptionInChainedOperatorException
> : Could not forward element to next operator
> 	at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .pushToOperator(OperatorChain.java:530) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:503) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:483) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:891) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:869) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator$Mu
> ltiOutputOutputManagerFactory$1.output(DoFnOperator.java:730) at
> org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFn
> Runner.java:211) at
> org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.ja
> va:66) at
> org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(Simp
> leDoFnRunner.java:436) at
> org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(Simp
> leDoFnRunner.java:424) at
> org.apache.beam.sdk.io.kafka.KafkaIO$TypedWithoutMetadata$1.processElement(K
> afkaIO.java:690) Caused by: org.apache.beam.sdk.util.UserCodeException:
> org.apache.flink.streaming.runtime.tasks.ExceptionInChainedOperatorException
> : Could not forward element to next operator
> 	at
> org.apache.beam.sdk.util.UserCodeException.wrap(UserCodeException.java:36)
> 	at
> com.huawei.ccn.intelliom.ims.feature.extractor.KpiExtractor$FilterSamplesToA
> dv$DoFnInvoker.invokeProcessElement(Unknown Source)
> 	at
> org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoF
> nRunner.java:177) at
> org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunne
> r.java:141) at
> org.apache.beam.runners.flink.metrics.DoFnRunnerWithMetricsUpdate.processEle
> ment(DoFnRunnerWithMetricsUpdate.java:65) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator.pr
> ocessElement(DoFnOperator.java:368) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .pushToOperator(OperatorChain.java:528) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:503) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:483) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:891) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:869) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator$Mu
> ltiOutputOutputManagerFactory$1.output(DoFnOperator.java:730) at
> org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFn
> Runner.java:211) at
> org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.ja
> va:66) at
> org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(Simp
> leDoFnRunner.java:436) at
> org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(Simp
> leDoFnRunner.java:424) at
> org.apache.beam.sdk.io.kafka.KafkaIO$TypedWithoutMetadata$1.processElement(K
> afkaIO.java:690) at
> org.apache.beam.sdk.io.kafka.KafkaIO$TypedWithoutMetadata$1$DoFnInvoker.invo
> keProcessElement(Unknown Source)
> 	at
> org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoF
> nRunner.java:177) at
> org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunne
> r.java:141) at
> org.apache.beam.runners.flink.metrics.DoFnRunnerWithMetricsUpdate.processEle
> ment(DoFnRunnerWithMetricsUpdate.java:65) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator.pr
> ocessElement(DoFnOperator.java:368) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .pushToOperator(OperatorChain.java:528) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:503) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:483) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:891) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:869) at
> org.apache.flink.streaming.api.operators.TimestampedCollector.collect(Timest
> ampedCollector.java:51) at
> org.apache.beam.runners.flink.FlinkStreamingTransformTranslators$StripIdsMap
> .flatMap(FlinkStreamingTransformTranslators.java:213) at
> org.apache.beam.runners.flink.FlinkStreamingTransformTranslators$StripIdsMap
> .flatMap(FlinkStreamingTransformTranslators.java:207) at
> org.apache.flink.streaming.api.operators.StreamFlatMap.processElement(Stream
> FlatMap.java:50) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .pushToOperator(OperatorChain.java:528) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:503) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:483) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:891) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:869) at
> org.apache.flink.streaming.api.operators.StreamSourceContexts$ManualWatermar
> kContext.processAndCollectWithTimestamp(StreamSourceContexts.java:309) at
> org.apache.flink.streaming.api.operators.StreamSourceContexts$WatermarkConte
> xt.collectWithTimestamp(StreamSourceContexts.java:408) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSou
> rceWrapper.emitElement(UnboundedSourceWrapper.java:329) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSou
> rceWrapper.run(UnboundedSourceWrapper.java:267) at
> org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:
> 87) at
> org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:
> 55) at
> org.apache.flink.streaming.runtime.tasks.SourceStreamTask.run(SourceStreamTa
> sk.java:95) at
> org.apache.flink.streaming.runtime.tasks.StoppableSourceStreamTask.run(Stopp
> ableSourceStreamTask.java:39) at
> org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:2
> 63) at org.apache.flink.runtime.taskmanager.Task.run(Task.java:702)
> 	at java.lang.Thread.run(Thread.java:748)
> Caused by:
> org.apache.flink.streaming.runtime.tasks.ExceptionInChainedOperatorException
> : Could not forward element to next operator
> 	at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .pushToOperator(OperatorChain.java:530) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:503) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:483) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:891) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:869) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator$Mu
> ltiOutputOutputManagerFactory$1.output(DoFnOperator.java:730) at
> org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFn
> Runner.java:211) at
> org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.ja
> va:66) at
> org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(Simp
> leDoFnRunner.java:436) at
> org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(Simp
> leDoFnRunner.java:424) at
> com.huawei.ccn.intelliom.ims.feature.extractor.KpiExtractor$FilterSamplesToA
> dv.processElement(KpiExtractor.java:242) Caused by:
> java.lang.RuntimeException: Unable to find proto buffer class at
> com.google.protobuf.GeneratedMessageLite$SerializedForm.readResolve(Generate
> dMessageLite.java:775) at
> sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62
> ) at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl
> .java:43) at java.lang.reflect.Method.invoke(Method.java:498)
> 	at java.io.ObjectStreamClass.invokeReadResolve(ObjectStreamClass.java:
1148)
> at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2036)
> 	at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1535)
> 	at java.io.ObjectInputStream.readObject(ObjectInputStream.java:422)
> 	at
> org.apache.beam.sdk.coders.SerializableCoder.decode(SerializableCoder.java:1
> 38) at
> org.apache.beam.sdk.coders.SerializableCoder.decode(SerializableCoder.java:4
> 8) at org.apache.beam.sdk.coders.Coder.decode(Coder.java:160)
> 	at org.apache.beam.sdk.coders.KvCoder.decode(KvCoder.java:85)
> 	at org.apache.beam.sdk.coders.KvCoder.decode(KvCoder.java:36)
> 	at
> org.apache.beam.sdk.util.WindowedValue$FullWindowedValueCoder.decode(Windowe
> dValue.java:667) at
> org.apache.beam.sdk.util.WindowedValue$FullWindowedValueCoder.decode(Windowe
> dValue.java:599) at
> org.apache.beam.sdk.util.CoderUtils.decodeFromSafeStream(CoderUtils.java:130
> ) at
> org.apache.beam.sdk.util.CoderUtils.decodeFromByteArray(CoderUtils.java:113)
> at
> org.apache.beam.sdk.util.CoderUtils.decodeFromByteArray(CoderUtils.java:107)
> at org.apache.beam.sdk.util.CoderUtils.clone(CoderUtils.java:156) at
> org.apache.beam.runners.flink.translation.types.CoderTypeSerializer.copy(Cod
> erTypeSerializer.java:64) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .pushToOperator(OperatorChain.java:526) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:503) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:483) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:891) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:869) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator$Mu
> ltiOutputOutputManagerFactory$1.output(DoFnOperator.java:730) at
> org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFn
> Runner.java:211) at
> org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.ja
> va:66) at
> org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(Simp
> leDoFnRunner.java:436) at
> org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(Simp
> leDoFnRunner.java:424) at
> com.huawei.ccn.intelliom.ims.feature.extractor.KpiExtractor$FilterSamplesToA
> dv.processElement(KpiExtractor.java:242) at
> com.huawei.ccn.intelliom.ims.feature.extractor.KpiExtractor$FilterSamplesToA
> dv$DoFnInvoker.invokeProcessElement(Unknown Source)
> 	at
> org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoF
> nRunner.java:177) at
> org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunne
> r.java:141) at
> org.apache.beam.runners.flink.metrics.DoFnRunnerWithMetricsUpdate.processEle
> ment(DoFnRunnerWithMetricsUpdate.java:65) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator.pr
> ocessElement(DoFnOperator.java:368) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .pushToOperator(OperatorChain.java:528) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:503) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:483) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:891) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:869) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator$Mu
> ltiOutputOutputManagerFactory$1.output(DoFnOperator.java:730) at
> org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFn
> Runner.java:211) at
> org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.ja
> va:66) at
> org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(Simp
> leDoFnRunner.java:436) at
> org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(Simp
> leDoFnRunner.java:424) at
> org.apache.beam.sdk.io.kafka.KafkaIO$TypedWithoutMetadata$1.processElement(K
> afkaIO.java:690) at
> org.apache.beam.sdk.io.kafka.KafkaIO$TypedWithoutMetadata$1$DoFnInvoker.invo
> keProcessElement(Unknown Source)
> 	at
> org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoF
> nRunner.java:177) at
> org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunne
> r.java:141) at
> org.apache.beam.runners.flink.metrics.DoFnRunnerWithMetricsUpdate.processEle
> ment(DoFnRunnerWithMetricsUpdate.java:65) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator.pr
> ocessElement(DoFnOperator.java:368) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .pushToOperator(OperatorChain.java:528) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:503) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:483) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:891) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:869) at
> org.apache.flink.streaming.api.operators.TimestampedCollector.collect(Timest
> ampedCollector.java:51) at
> org.apache.beam.runners.flink.FlinkStreamingTransformTranslators$StripIdsMap
> .flatMap(FlinkStreamingTransformTranslators.java:213) at
> org.apache.beam.runners.flink.FlinkStreamingTransformTranslators$StripIdsMap
> .flatMap(FlinkStreamingTransformTranslators.java:207) at
> org.apache.flink.streaming.api.operators.StreamFlatMap.processElement(Stream
> FlatMap.java:50) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .pushToOperator(OperatorChain.java:528) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:503) at
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput
> .collect(OperatorChain.java:483) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:891) at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutp
> ut.collect(AbstractStreamOperator.java:869) at
> org.apache.flink.streaming.api.operators.StreamSourceContexts$ManualWatermar
> kContext.processAndCollectWithTimestamp(StreamSourceContexts.java:309) at
> org.apache.flink.streaming.api.operators.StreamSourceContexts$WatermarkConte
> xt.collectWithTimestamp(StreamSourceContexts.java:408) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSou
> rceWrapper.emitElement(UnboundedSourceWrapper.java:329) at
> org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSou
> rceWrapper.run(UnboundedSourceWrapper.java:267) at
> org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:
> 87) at
> org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:
> 55) at
> org.apache.flink.streaming.runtime.tasks.SourceStreamTask.run(SourceStreamTa
> sk.java:95) at
> org.apache.flink.streaming.runtime.tasks.StoppableSourceStreamTask.run(Stopp
> ableSourceStreamTask.java:39) at
> org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:2
> 63) at org.apache.flink.runtime.taskmanager.Task.run(Task.java:702)
> 	at java.lang.Thread.run(Thread.java:748)
> Caused by: java.lang.ClassNotFoundException:
> com.huawei.ccn.intelliom.ims.MeasurementTable$measurementTable
> 	at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
> 	at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
> 	at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335)
> 	at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
> 	at java.lang.Class.forName0(Native Method)
> 	at java.lang.Class.forName(Class.java:264)
> 	at
> com.google.protobuf.GeneratedMessageLite$SerializedForm.readResolve(Generate
> dMessageLite.java:768) ... 77 more
> 
> 
> 
> --
> Sent from:
> http://apache-flink-user-mailing-list-archive.2336050.n4.nabble.com/