You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@mahout.apache.org by Andrew Musselman <an...@gmail.com> on 2015/02/24 18:50:57 UTC

Spark shell broken

I've just rebuild mahout master and spark v1.2.1-rc2 and am getting this
error when I try out the spark-shell; am I missing something?

$ bin/mahout spark-shell
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in
[jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.2.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
error:
     while compiling: <init>
        during phase: typer
     library version: version 2.10.4
    compiler version: version 2.10.0
  reconstructed args:

  last tree to typer: Literal(Constant(()))
              symbol: null
   symbol definition: null
                 tpe: Unit
       symbol owners:
      context owners: package <empty>

== Enclosing template or block ==

Block( // tree.tpe=Unit
  {}
  ()
)

== Expanded type of tree ==

TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))

uncaught exception during compilation: java.lang.NoSuchMethodError

Failed to initialize compiler: NoSuchMethodError.
This is most often remedied by a full clean and recompile.
Otherwise, your classpath may continue bytecode compiled by
different and incompatible versions of scala.

java.lang.NoSuchMethodError:
scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
    at
scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
    at
scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
    at scala.tools.nsc.typechecker.Namers$Namer.classSig(Namers.scala:907)
    at scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1289)
    at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
    at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
    at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
    at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
    at
scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
    at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
    at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
    at
scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
    at
scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
    at
scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
    at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
    at scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
    at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
    at
scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
    at
scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
    at
scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
    at
scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
    at scala.collection.immutable.List.loop$1(List.scala:170)
    at scala.collection.immutable.List.mapConserve(List.scala:186)
    at
scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
    at
scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
    at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
    at
scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
    at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
    at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
    at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
    at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
    at scala.collection.Iterator$class.foreach(Iterator.scala:727)
    at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
    at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
    at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
    at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
    at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
    at org.apache.spark.repl.SparkIMain.org
$apache$spark$repl$SparkIMain$$_initialize(SparkIMain.scala:187)
    at
org.apache.spark.repl.SparkIMain.initializeSynchronous(SparkIMain.scala:208)
    at
org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:961)
    at
org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
    at
org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
    at
scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
    at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
    at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
15/02/24 09:42:52 WARN SparkILoop$SparkILoopInterpreter: Warning: compiler
accessed before init set up.  Assuming no postInit code.
error:
     while compiling: <console>
        during phase: typer
     library version: version 2.10.4
    compiler version: version 2.10.0
  reconstructed args:

  last tree to typer: Literal(Constant(()))
              symbol: null
   symbol definition: null
                 tpe: Unit
       symbol owners:
      context owners: package $line1

== Enclosing template or block ==

Block( // tree.tpe=Unit
  {}
  ()
)

== Expanded type of tree ==

TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))

uncaught exception during compilation: java.lang.NoSuchMethodError
Exception in thread "main" java.lang.NoSuchMethodError:
scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
    at
scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
    at
scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
    at scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1300)
    at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
    at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
    at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
    at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
    at
scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
    at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
    at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
    at
scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
    at
scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
    at
scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
    at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
    at scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
    at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
    at
scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
    at
scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
    at
scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
    at
scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
    at scala.collection.immutable.List.loop$1(List.scala:170)
    at scala.collection.immutable.List.mapConserve(List.scala:186)
    at
scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
    at
scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
    at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
    at
scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
    at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
    at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
    at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
    at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
    at scala.collection.Iterator$class.foreach(Iterator.scala:727)
    at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
    at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
    at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
    at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
    at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
    at
org.apache.spark.repl.SparkIMain.compileSourcesKeepingRun(SparkIMain.scala:528)
    at
org.apache.spark.repl.SparkIMain$ReadEvalPrint.compileAndSaveRun(SparkIMain.scala:923)
    at
org.apache.spark.repl.SparkIMain$ReadEvalPrint.compile(SparkIMain.scala:879)
    at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:719)
    at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:762)
    at
org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
    at
org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
    at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:270)
    at org.apache.spark.repl.SparkIMain.quietBind(SparkIMain.scala:761)
    at
org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$apply$mcZ$sp$2.apply$mcV$sp(SparkILoop.scala:935)
    at
org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:147)
    at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:60)
    at
org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:106)
    at
org.apache.mahout.sparkbindings.shell.MahoutSparkILoop.postInitialization(MahoutSparkILoop.scala:24)
    at
org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:962)
    at
org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
    at
org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
    at
scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
    at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
    at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)

Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
PS normally i would just reset the head to ^1 but that would require forced
rewrite and asf git doesn't allow this (and for a good reason, really). so
revert on master would be necessary.

On Tue, Feb 24, 2015 at 10:51 AM, Dmitriy Lyubimov <dl...@gmail.com>
wrote:

> i mean roll back #74 and apply it to a branch spark-1.2
>
> (i assume minor spark releases are fairly irrelevant for this type of
> things so that's why i am suggesting the name "spark 1.2 for feature
> branch. Or `spark-1.2.x` if you prefer.)
>
> On Tue, Feb 24, 2015 at 10:48 AM, Andrew Musselman <
> andrew.musselman@gmail.com> wrote:
>
>> Roll back meaning just the entry in the pom?
>>
>> On Tue, Feb 24, 2015 at 10:31 AM, Pat Ferrel <pa...@occamsmachete.com>
>> wrote:
>>
>> > 1.2? I thought the previous version was Spark 1.1.0?
>> >
>> > I need 1.2  so I’m up for trying to fix this. It was a contribution,
>> maybe
>> > the originator has a clue.
>> >
>> > BTW can’t run spark-itemsimilarity on the cluster either though all unit
>> > tests pass and the cluster seems to be working with their shell and
>> > examples.
>> >
>> > I get:
>> >
>> > Exception in thread "main" com.typesafe.config.ConfigException$Missing:
>> No
>> > configuration setting found for key 'akka.event-handlers'
>> >         at
>> > com.typesafe.config.impl.SimpleConfig.findKey(SimpleConfig.java:115)
>> >         at
>> > com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:136)
>> >         at
>> > com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:142)
>> >         at
>> > com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:150)
>> >         at
>> > com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:155)
>> >         at
>> > com.typesafe.config.impl.SimpleConfig.getList(SimpleConfig.java:203)
>> >         at
>> >
>> com.typesafe.config.impl.SimpleConfig.getHomogeneousUnwrappedList(SimpleConfig.java:260)
>> >         at
>> >
>> com.typesafe.config.impl.SimpleConfig.getStringList(SimpleConfig.java:318)
>> >         at akka.actor.ActorSystem$Settings.<init>(ActorSystem.scala:150)
>> >         at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:470)
>> >         ...
>> >
>> > On Feb 24, 2015, at 10:22 AM, Dmitriy Lyubimov <dl...@gmail.com>
>> wrote:
>> >
>> > As a remedy, i'd suggest to branch out spark 1.2 work and rollback 1.2.1
>> > commit on master until 1.2 branch is fixed.
>> >
>> > On Tue, Feb 24, 2015 at 10:19 AM, Dmitriy Lyubimov <dl...@gmail.com>
>> > wrote:
>> >
>> > > oops.
>> > >
>> > > tests dont test shell startup.
>> > >
>> > > apparently stuff got out of sync with 1.2
>> > >
>> > > On Tue, Feb 24, 2015 at 10:02 AM, Pat Ferrel <pa...@occamsmachete.com>
>> > > wrote:
>> > >
>> > >> Me too and I built with 1.2.1
>> > >>
>> > >> On Feb 24, 2015, at 9:50 AM, Andrew Musselman <
>> > andrew.musselman@gmail.com>
>> > >> wrote:
>> > >>
>> > >> I've just rebuild mahout master and spark v1.2.1-rc2 and am getting
>> this
>> > >> error when I try out the spark-shell; am I missing something?
>> > >>
>> > >> $ bin/mahout spark-shell
>> > >> SLF4J: Class path contains multiple SLF4J bindings.
>> > >> SLF4J: Found binding in
>> > >>
>> > >>
>> >
>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> > >> SLF4J: Found binding in
>> > >>
>> > >>
>> >
>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> > >> SLF4J: Found binding in
>> > >>
>> > >>
>> >
>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.2.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> > >> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>> > >> explanation.
>> > >> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>> > >> error:
>> > >>    while compiling: <init>
>> > >>       during phase: typer
>> > >>    library version: version 2.10.4
>> > >>   compiler version: version 2.10.0
>> > >> reconstructed args:
>> > >>
>> > >> last tree to typer: Literal(Constant(()))
>> > >>             symbol: null
>> > >>  symbol definition: null
>> > >>                tpe: Unit
>> > >>      symbol owners:
>> > >>     context owners: package <empty>
>> > >>
>> > >> == Enclosing template or block ==
>> > >>
>> > >> Block( // tree.tpe=Unit
>> > >> {}
>> > >> ()
>> > >> )
>> > >>
>> > >> == Expanded type of tree ==
>> > >>
>> > >> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
>> > >>
>> > >> uncaught exception during compilation: java.lang.NoSuchMethodError
>> > >>
>> > >> Failed to initialize compiler: NoSuchMethodError.
>> > >> This is most often remedied by a full clean and recompile.
>> > >> Otherwise, your classpath may continue bytecode compiled by
>> > >> different and incompatible versions of scala.
>> > >>
>> > >> java.lang.NoSuchMethodError:
>> > >>
>> > >>
>> >
>> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
>> > >>   at
>> > >>
>> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
>> > >>   at
>> > >>
>> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
>> > >>   at
>> scala.tools.nsc.typechecker.Namers$Namer.classSig(Namers.scala:907)
>> > >>   at
>> > scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1289)
>> > >>   at
>> scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
>> > >>   at
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
>> > >>   at
>> > >>
>> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
>> > >>   at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
>> > >>   at
>> > scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
>> > >>   at
>> scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
>> > >>   at
>> scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>> > >>   at scala.collection.immutable.List.loop$1(List.scala:170)
>> > >>   at scala.collection.immutable.List.mapConserve(List.scala:186)
>> > >>   at
>> > >>
>> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
>> > >>   at
>> scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
>> > >>   at
>> scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>> > >>   at
>> scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
>> > >>   at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>> > >>   at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>> > >>   at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
>> > >>   at
>> scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
>> > >>   at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
>> > >>   at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
>> > >>   at org.apache.spark.repl.SparkIMain.org
>> > >> $apache$spark$repl$SparkIMain$$_initialize(SparkIMain.scala:187)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkIMain.initializeSynchronous(SparkIMain.scala:208)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:961)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>> > >>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
>> > >>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
>> > >>   at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
>> > >>   at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
>> > >> 15/02/24 09:42:52 WARN SparkILoop$SparkILoopInterpreter: Warning:
>> > compiler
>> > >> accessed before init set up.  Assuming no postInit code.
>> > >> error:
>> > >>    while compiling: <console>
>> > >>       during phase: typer
>> > >>    library version: version 2.10.4
>> > >>   compiler version: version 2.10.0
>> > >> reconstructed args:
>> > >>
>> > >> last tree to typer: Literal(Constant(()))
>> > >>             symbol: null
>> > >>  symbol definition: null
>> > >>                tpe: Unit
>> > >>      symbol owners:
>> > >>     context owners: package $line1
>> > >>
>> > >> == Enclosing template or block ==
>> > >>
>> > >> Block( // tree.tpe=Unit
>> > >> {}
>> > >> ()
>> > >> )
>> > >>
>> > >> == Expanded type of tree ==
>> > >>
>> > >> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
>> > >>
>> > >> uncaught exception during compilation: java.lang.NoSuchMethodError
>> > >> Exception in thread "main" java.lang.NoSuchMethodError:
>> > >>
>> > >>
>> >
>> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
>> > >>   at
>> > >>
>> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
>> > >>   at
>> > >>
>> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
>> > >>   at
>> > scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1300)
>> > >>   at
>> scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
>> > >>   at
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
>> > >>   at
>> > >>
>> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
>> > >>   at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
>> > >>   at
>> > scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
>> > >>   at
>> scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
>> > >>   at
>> scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>> > >>   at scala.collection.immutable.List.loop$1(List.scala:170)
>> > >>   at scala.collection.immutable.List.mapConserve(List.scala:186)
>> > >>   at
>> > >>
>> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
>> > >>   at
>> scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
>> > >>   at
>> scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>> > >>   at
>> scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
>> > >>   at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>> > >>   at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>> > >>   at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
>> > >>   at
>> scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
>> > >>   at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
>> > >>   at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkIMain.compileSourcesKeepingRun(SparkIMain.scala:528)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compileAndSaveRun(SparkIMain.scala:923)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compile(SparkIMain.scala:879)
>> > >>   at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:719)
>> > >>   at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:762)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
>> > >>   at
>> > org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:270)
>> > >>   at org.apache.spark.repl.SparkIMain.quietBind(SparkIMain.scala:761)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$apply$mcZ$sp$2.apply$mcV$sp(SparkILoop.scala:935)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:147)
>> > >>   at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:60)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:106)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.mahout.sparkbindings.shell.MahoutSparkILoop.postInitialization(MahoutSparkILoop.scala:24)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:962)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>> > >>   at
>> > >>
>> > >>
>> >
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>> > >>   at
>> > >>
>> > >>
>> >
>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>> > >>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
>> > >>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
>> > >>   at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
>> > >>   at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
>> > >>
>> > >>
>> > >
>> >
>> >
>>
>
>

Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
i mean roll back #74 and apply it to a branch spark-1.2

(i assume minor spark releases are fairly irrelevant for this type of
things so that's why i am suggesting the name "spark 1.2 for feature
branch. Or `spark-1.2.x` if you prefer.)

On Tue, Feb 24, 2015 at 10:48 AM, Andrew Musselman <
andrew.musselman@gmail.com> wrote:

> Roll back meaning just the entry in the pom?
>
> On Tue, Feb 24, 2015 at 10:31 AM, Pat Ferrel <pa...@occamsmachete.com>
> wrote:
>
> > 1.2? I thought the previous version was Spark 1.1.0?
> >
> > I need 1.2  so I’m up for trying to fix this. It was a contribution,
> maybe
> > the originator has a clue.
> >
> > BTW can’t run spark-itemsimilarity on the cluster either though all unit
> > tests pass and the cluster seems to be working with their shell and
> > examples.
> >
> > I get:
> >
> > Exception in thread "main" com.typesafe.config.ConfigException$Missing:
> No
> > configuration setting found for key 'akka.event-handlers'
> >         at
> > com.typesafe.config.impl.SimpleConfig.findKey(SimpleConfig.java:115)
> >         at
> > com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:136)
> >         at
> > com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:142)
> >         at
> > com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:150)
> >         at
> > com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:155)
> >         at
> > com.typesafe.config.impl.SimpleConfig.getList(SimpleConfig.java:203)
> >         at
> >
> com.typesafe.config.impl.SimpleConfig.getHomogeneousUnwrappedList(SimpleConfig.java:260)
> >         at
> >
> com.typesafe.config.impl.SimpleConfig.getStringList(SimpleConfig.java:318)
> >         at akka.actor.ActorSystem$Settings.<init>(ActorSystem.scala:150)
> >         at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:470)
> >         ...
> >
> > On Feb 24, 2015, at 10:22 AM, Dmitriy Lyubimov <dl...@gmail.com>
> wrote:
> >
> > As a remedy, i'd suggest to branch out spark 1.2 work and rollback 1.2.1
> > commit on master until 1.2 branch is fixed.
> >
> > On Tue, Feb 24, 2015 at 10:19 AM, Dmitriy Lyubimov <dl...@gmail.com>
> > wrote:
> >
> > > oops.
> > >
> > > tests dont test shell startup.
> > >
> > > apparently stuff got out of sync with 1.2
> > >
> > > On Tue, Feb 24, 2015 at 10:02 AM, Pat Ferrel <pa...@occamsmachete.com>
> > > wrote:
> > >
> > >> Me too and I built with 1.2.1
> > >>
> > >> On Feb 24, 2015, at 9:50 AM, Andrew Musselman <
> > andrew.musselman@gmail.com>
> > >> wrote:
> > >>
> > >> I've just rebuild mahout master and spark v1.2.1-rc2 and am getting
> this
> > >> error when I try out the spark-shell; am I missing something?
> > >>
> > >> $ bin/mahout spark-shell
> > >> SLF4J: Class path contains multiple SLF4J bindings.
> > >> SLF4J: Found binding in
> > >>
> > >>
> >
> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > >> SLF4J: Found binding in
> > >>
> > >>
> >
> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > >> SLF4J: Found binding in
> > >>
> > >>
> >
> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.2.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > >> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> > >> explanation.
> > >> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
> > >> error:
> > >>    while compiling: <init>
> > >>       during phase: typer
> > >>    library version: version 2.10.4
> > >>   compiler version: version 2.10.0
> > >> reconstructed args:
> > >>
> > >> last tree to typer: Literal(Constant(()))
> > >>             symbol: null
> > >>  symbol definition: null
> > >>                tpe: Unit
> > >>      symbol owners:
> > >>     context owners: package <empty>
> > >>
> > >> == Enclosing template or block ==
> > >>
> > >> Block( // tree.tpe=Unit
> > >> {}
> > >> ()
> > >> )
> > >>
> > >> == Expanded type of tree ==
> > >>
> > >> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
> > >>
> > >> uncaught exception during compilation: java.lang.NoSuchMethodError
> > >>
> > >> Failed to initialize compiler: NoSuchMethodError.
> > >> This is most often remedied by a full clean and recompile.
> > >> Otherwise, your classpath may continue bytecode compiled by
> > >> different and incompatible versions of scala.
> > >>
> > >> java.lang.NoSuchMethodError:
> > >>
> > >>
> >
> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
> > >>   at
> > >>
> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
> > >>   at
> > >> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
> > >>   at
> scala.tools.nsc.typechecker.Namers$Namer.classSig(Namers.scala:907)
> > >>   at
> > scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1289)
> > >>   at
> scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
> > >>   at
> > >>
> >
> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
> > >>   at
> > >> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
> > >>   at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
> > >>   at
> > scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
> > >>   at
> scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
> > >>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
> > >>   at scala.collection.immutable.List.loop$1(List.scala:170)
> > >>   at scala.collection.immutable.List.mapConserve(List.scala:186)
> > >>   at
> > >> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
> > >>   at
> scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
> > >>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
> > >>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
> > >>   at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
> > >>   at scala.collection.Iterator$class.foreach(Iterator.scala:727)
> > >>   at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
> > >>   at
> scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
> > >>   at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
> > >>   at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
> > >>   at org.apache.spark.repl.SparkIMain.org
> > >> $apache$spark$repl$SparkIMain$$_initialize(SparkIMain.scala:187)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkIMain.initializeSynchronous(SparkIMain.scala:208)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:961)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
> > >>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
> > >>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
> > >>   at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
> > >>   at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
> > >> 15/02/24 09:42:52 WARN SparkILoop$SparkILoopInterpreter: Warning:
> > compiler
> > >> accessed before init set up.  Assuming no postInit code.
> > >> error:
> > >>    while compiling: <console>
> > >>       during phase: typer
> > >>    library version: version 2.10.4
> > >>   compiler version: version 2.10.0
> > >> reconstructed args:
> > >>
> > >> last tree to typer: Literal(Constant(()))
> > >>             symbol: null
> > >>  symbol definition: null
> > >>                tpe: Unit
> > >>      symbol owners:
> > >>     context owners: package $line1
> > >>
> > >> == Enclosing template or block ==
> > >>
> > >> Block( // tree.tpe=Unit
> > >> {}
> > >> ()
> > >> )
> > >>
> > >> == Expanded type of tree ==
> > >>
> > >> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
> > >>
> > >> uncaught exception during compilation: java.lang.NoSuchMethodError
> > >> Exception in thread "main" java.lang.NoSuchMethodError:
> > >>
> > >>
> >
> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
> > >>   at
> > >>
> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
> > >>   at
> > >> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
> > >>   at
> > scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1300)
> > >>   at
> scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
> > >>   at
> > >>
> >
> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
> > >>   at
> > >> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
> > >>   at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
> > >>   at
> > scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
> > >>   at
> scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
> > >>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
> > >>   at scala.collection.immutable.List.loop$1(List.scala:170)
> > >>   at scala.collection.immutable.List.mapConserve(List.scala:186)
> > >>   at
> > >> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
> > >>   at
> scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
> > >>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
> > >>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
> > >>   at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
> > >>   at scala.collection.Iterator$class.foreach(Iterator.scala:727)
> > >>   at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
> > >>   at
> scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
> > >>   at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
> > >>   at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkIMain.compileSourcesKeepingRun(SparkIMain.scala:528)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compileAndSaveRun(SparkIMain.scala:923)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compile(SparkIMain.scala:879)
> > >>   at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:719)
> > >>   at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:762)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
> > >>   at
> > org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:270)
> > >>   at org.apache.spark.repl.SparkIMain.quietBind(SparkIMain.scala:761)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$apply$mcZ$sp$2.apply$mcV$sp(SparkILoop.scala:935)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:147)
> > >>   at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:60)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:106)
> > >>   at
> > >>
> > >>
> >
> org.apache.mahout.sparkbindings.shell.MahoutSparkILoop.postInitialization(MahoutSparkILoop.scala:24)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:962)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
> > >>   at
> > >>
> > >>
> >
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
> > >>   at
> > >>
> > >>
> >
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
> > >>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
> > >>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
> > >>   at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
> > >>   at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
> > >>
> > >>
> > >
> >
> >
>

Re: Spark shell broken

Posted by Andrew Musselman <an...@gmail.com>.
I'll try that out, make the branch, and push the last commit to master.

On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <pa...@occamsmachete.com> wrote:

> to be safe I’d “git reset —hard xyz” to the commit previous to the 1.2.1
>
> I merged a big commit with this and upgraded my cluster to 1.2.1 so will
> stick with this for a bit.
>
> If anyone has a clue please speak up. It seems related to starting a
> context. The error in spark-itemsimilarity is much simpler than the shell
> one.
>
> 15/02/24 10:17:57 INFO spark.SecurityManager: Changing view acls to: pat,
> 15/02/24 10:17:57 INFO spark.SecurityManager: Changing modify acls to: pat,
> 15/02/24 10:17:57 INFO spark.SecurityManager: SecurityManager:
> authentication disabled; ui acls disabled; users with view permissions:
> Set(pat, ); users with modify permissions: Set(pat, )
> Exception in thread "main" com.typesafe.config.ConfigException$Missing: No
> configuration setting found for key 'akka.event-handlers'
>     at com.typesafe.config.impl.SimpleConfig.findKey(SimpleConfig.java:115)
>     at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:136)
>     at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:142)
> at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:150)
>     at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:155)
>     at com.typesafe.config.impl.SimpleConfig.getList(SimpleConfig.java:203)
>     at
> com.typesafe.config.impl.SimpleConfig.getHomogeneousUnwrappedList(SimpleConfig.java:260)
>     at
> com.typesafe.config.impl.SimpleConfig.getStringList(SimpleConfig.java:318)
>     at akka.actor.ActorSystem$Settings.<init>(ActorSystem.scala:150)
>     at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:470)
>     at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)
>     at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)
>     at
> org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)
>     at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)
>     at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)
> at
> org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)
>     at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
>     at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)
>     at
> org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)
>     at org.apache.spark.SparkEnv$.create(SparkEnv.scala:153)
>     at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)
>     at
> org.apache.mahout.sparkbindings.package$.mahoutSparkContext(package.scala:95)
>     at
> org.apache.mahout.drivers.MahoutSparkDriver.start(MahoutSparkDriver.scala:81)
>     at
> org.apache.mahout.drivers.ItemSimilarityDriver$.start(ItemSimilarityDriver.scala:118)
>
>
> On Feb 24, 2015, at 10:48 AM, Andrew Musselman <an...@gmail.com>
> wrote:
>
> Roll back meaning just the entry in the pom?
>
> On Tue, Feb 24, 2015 at 10:31 AM, Pat Ferrel <pa...@occamsmachete.com>
> wrote:
>
> > 1.2? I thought the previous version was Spark 1.1.0?
> >
> > I need 1.2  so I’m up for trying to fix this. It was a contribution,
> maybe
> > the originator has a clue.
> >
> > BTW can’t run spark-itemsimilarity on the cluster either though all unit
> > tests pass and the cluster seems to be working with their shell and
> > examples.
> >
> > I get:
> >
> > Exception in thread "main" com.typesafe.config.ConfigException$Missing:
> No
> > configuration setting found for key 'akka.event-handlers'
> >        at
> > com.typesafe.config.impl.SimpleConfig.findKey(SimpleConfig.java:115)
> >        at
> > com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:136)
> >        at
> > com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:142)
> >        at
> > com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:150)
> >        at
> > com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:155)
> >        at
> > com.typesafe.config.impl.SimpleConfig.getList(SimpleConfig.java:203)
> >        at
> >
> com.typesafe.config.impl.SimpleConfig.getHomogeneousUnwrappedList(SimpleConfig.java:260)
> >        at
> >
> com.typesafe.config.impl.SimpleConfig.getStringList(SimpleConfig.java:318)
> >        at akka.actor.ActorSystem$Settings.<init>(ActorSystem.scala:150)
> >        at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:470)
> >        ...
> >
> > On Feb 24, 2015, at 10:22 AM, Dmitriy Lyubimov <dl...@gmail.com>
> wrote:
> >
> > As a remedy, i'd suggest to branch out spark 1.2 work and rollback 1.2.1
> > commit on master until 1.2 branch is fixed.
> >
> > On Tue, Feb 24, 2015 at 10:19 AM, Dmitriy Lyubimov <dl...@gmail.com>
> > wrote:
> >
> >> oops.
> >>
> >> tests dont test shell startup.
> >>
> >> apparently stuff got out of sync with 1.2
> >>
> >> On Tue, Feb 24, 2015 at 10:02 AM, Pat Ferrel <pa...@occamsmachete.com>
> >> wrote:
> >>
> >>> Me too and I built with 1.2.1
> >>>
> >>> On Feb 24, 2015, at 9:50 AM, Andrew Musselman <
> > andrew.musselman@gmail.com>
> >>> wrote:
> >>>
> >>> I've just rebuild mahout master and spark v1.2.1-rc2 and am getting
> this
> >>> error when I try out the spark-shell; am I missing something?
> >>>
> >>> $ bin/mahout spark-shell
> >>> SLF4J: Class path contains multiple SLF4J bindings.
> >>> SLF4J: Found binding in
> >>>
> >>>
> >
> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> >>> SLF4J: Found binding in
> >>>
> >>>
> >
> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> >>> SLF4J: Found binding in
> >>>
> >>>
> >
> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.2.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> >>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> >>> explanation.
> >>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
> >>> error:
> >>>   while compiling: <init>
> >>>      during phase: typer
> >>>   library version: version 2.10.4
> >>>  compiler version: version 2.10.0
> >>> reconstructed args:
> >>>
> >>> last tree to typer: Literal(Constant(()))
> >>>            symbol: null
> >>> symbol definition: null
> >>>               tpe: Unit
> >>>     symbol owners:
> >>>    context owners: package <empty>
> >>>
> >>> == Enclosing template or block ==
> >>>
> >>> Block( // tree.tpe=Unit
> >>> {}
> >>> ()
> >>> )
> >>>
> >>> == Expanded type of tree ==
> >>>
> >>> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
> >>>
> >>> uncaught exception during compilation: java.lang.NoSuchMethodError
> >>>
> >>> Failed to initialize compiler: NoSuchMethodError.
> >>> This is most often remedied by a full clean and recompile.
> >>> Otherwise, your classpath may continue bytecode compiled by
> >>> different and incompatible versions of scala.
> >>>
> >>> java.lang.NoSuchMethodError:
> >>>
> >>>
> >
> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
> >>>  at
> >>> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
> >>>  at
> >>> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
> >>>  at scala.tools.nsc.typechecker.Namers$Namer.classSig(Namers.scala:907)
> >>>  at
> > scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1289)
> >>>  at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
> >>>  at
> >>>
> >
> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
> >>>  at
> >>> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
> >>>  at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
> >>>  at
> > scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
> >>>  at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
> >>>  at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
> >>>  at scala.collection.immutable.List.loop$1(List.scala:170)
> >>>  at scala.collection.immutable.List.mapConserve(List.scala:186)
> >>>  at
> >>> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
> >>>  at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
> >>>  at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
> >>>  at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
> >>>  at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
> >>>  at scala.collection.Iterator$class.foreach(Iterator.scala:727)
> >>>  at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
> >>>  at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
> >>>  at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
> >>>  at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
> >>>  at org.apache.spark.repl.SparkIMain.org
> >>> $apache$spark$repl$SparkIMain$$_initialize(SparkIMain.scala:187)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkIMain.initializeSynchronous(SparkIMain.scala:208)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:961)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
> >>>  at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
> >>>  at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
> >>>  at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
> >>>  at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
> >>> 15/02/24 09:42:52 WARN SparkILoop$SparkILoopInterpreter: Warning:
> > compiler
> >>> accessed before init set up.  Assuming no postInit code.
> >>> error:
> >>>   while compiling: <console>
> >>>      during phase: typer
> >>>   library version: version 2.10.4
> >>>  compiler version: version 2.10.0
> >>> reconstructed args:
> >>>
> >>> last tree to typer: Literal(Constant(()))
> >>>            symbol: null
> >>> symbol definition: null
> >>>               tpe: Unit
> >>>     symbol owners:
> >>>    context owners: package $line1
> >>>
> >>> == Enclosing template or block ==
> >>>
> >>> Block( // tree.tpe=Unit
> >>> {}
> >>> ()
> >>> )
> >>>
> >>> == Expanded type of tree ==
> >>>
> >>> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
> >>>
> >>> uncaught exception during compilation: java.lang.NoSuchMethodError
> >>> Exception in thread "main" java.lang.NoSuchMethodError:
> >>>
> >>>
> >
> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
> >>>  at
> >>> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
> >>>  at
> >>> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
> >>>  at
> > scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1300)
> >>>  at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
> >>>  at
> >>>
> >
> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
> >>>  at
> >>> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
> >>>  at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
> >>>  at
> > scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
> >>>  at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
> >>>  at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
> >>>  at scala.collection.immutable.List.loop$1(List.scala:170)
> >>>  at scala.collection.immutable.List.mapConserve(List.scala:186)
> >>>  at
> >>> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
> >>>  at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
> >>>  at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
> >>>  at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
> >>>  at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
> >>>  at scala.collection.Iterator$class.foreach(Iterator.scala:727)
> >>>  at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
> >>>  at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
> >>>  at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
> >>>  at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkIMain.compileSourcesKeepingRun(SparkIMain.scala:528)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compileAndSaveRun(SparkIMain.scala:923)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compile(SparkIMain.scala:879)
> >>>  at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:719)
> >>>  at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:762)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
> >>>  at
> > org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:270)
> >>>  at org.apache.spark.repl.SparkIMain.quietBind(SparkIMain.scala:761)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$apply$mcZ$sp$2.apply$mcV$sp(SparkILoop.scala:935)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:147)
> >>>  at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:60)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:106)
> >>>  at
> >>>
> >>>
> >
> org.apache.mahout.sparkbindings.shell.MahoutSparkILoop.postInitialization(MahoutSparkILoop.scala:24)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:962)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
> >>>  at
> >>>
> >>>
> >
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
> >>>  at
> >>>
> >>>
> >
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
> >>>  at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
> >>>  at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
> >>>  at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
> >>>  at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
> >>>
> >>>
> >>
> >
> >
>
>

Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
ASF also mirrors dropping branches, i remember doing that too. but it won't
allow history rewrites.

On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dl...@gmail.com>
wrote:

> what exactly did you try to do?
>
> just resetting HEAD will not work on remote branch -- you need force-sync
> that (with +) since it is a history-rewriting push, but asf git does not
> allow that.
>
> ASF will mirror ALL branches afaik. I think i've done it before. so if you
> create a new one it should (eventually) get there.
>
> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
> andrew.musselman@gmail.com> wrote:
>
>> Does ASF git get mirrored to GitHub?  I tried pushing a branch and don't
>> see it there yet.
>>
>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <dl...@gmail.com>
>> wrote:
>>
>> > On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <pa...@occamsmachete.com>
>> > wrote:
>> >
>> > > to be safe I’d “git reset —hard xyz” to the commit previous to the
>> 1.2.1
>> > >
>> >
>> > As i just explained, that resets are not possible with ASF git.
>> Reverting
>> > is the only option.
>> >
>> > -d
>> >
>>
>
>

Re: Spark shell broken

Posted by Pat Ferrel <pa...@occamsmachete.com>.
So I guess I’ll take 1.2.1 out of the pom but leave all the other prep stuff from the contribution so if anyone wanted to they could just change the Spark version and it would work. That is if they knew the -D:k=v magic


On Feb 27, 2015, at 2:34 PM, Dmitriy Lyubimov <dl...@gmail.com> wrote:

Following CDH releases perhaps helps a bit. they tend to skip buggy "releases".

On Fri, Feb 27, 2015 at 2:07 PM, Pat Ferrel <pa...@occamsmachete.com> wrote:
> The deserialization thing is a Spark bug. The work around requires that you put a key/value in the SparkConf to point to a jar on _all_ workers that has Guava in it.
> 
> Spark 1.2.1 doesn’t seem to be a good thing to support.
> 
> The drivers allows this on the command line with -D:spark.executor.extraClassPath="/Users/pat/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-dependency-reduced.jar”
> 
> I’ll check if this is fixed for 1.3, which is in RC now.
> 
> FWIW these guys seem to create a new release every time they need to fix a bug. It’s a coin flip which release will be stable.
> 
> 
> On Feb 26, 2015, at 9:29 AM, Pat Ferrel <pa...@occamsmachete.com> wrote:
> 
> Great, thanks. So I bet the only remaining problem is getting the right Guava in to Spark client and workers.
> 
> Spark shades Guava so it is probably not causing the problems since it’s never supposed to be in scope.  When I use the same version number I get errors anyway so it must be in Mahout artifacts and probably the assembly jar created in mahout/spark with transitive dependencies.
> 
> I was planning to wait on Dmitriy’s refactoring before messing with that but in a separate branch I did a drastically pared down version that might be better to use.
> 
> On Feb 26, 2015, at 8:58 AM, Andrew Palumbo <ap...@outlook.com> wrote:
> 
> 
> just tested spark-testnb with the -ma option set with no problems. spark naive bayes is pretty bland and doesn't use any classes other than core scala, mahout and spark.
> 
> On 02/26/2015 11:36 AM, Pat Ferrel wrote:
>> A difference in how classes are found comes about when running a standalone cluster. You start Spark with the spark/sbin/start-all.sh with only localhost in slaves. Then run the driver “mahout nbtest … -ma spark://Maclaurin.local:7077” or some such.
>> 
>> re: poms, of course—stupid question.
>> 
>> On Feb 26, 2015, at 8:19 AM, Andrew Palumbo <ap...@outlook.com> wrote:
>> 
>> The NB Drivers were working (locally) with the 1.2.1 patch before those deps were added. I haven't tested the drivers since but I did test a script that runs NB from the spark-shell with no problems.
>> 
>> I put those in math-scala since all of our scala modules depend on math-scala.
>> 
>> On 02/26/2015 11:01 AM, Pat Ferrel wrote:
>>> should these be in all scala poms or just math-scala?
>>> 
>>> Is the NB stuff working with the 1.2 branch?
>>> 
>>> On Feb 24, 2015, at 1:25 PM, Andrew Palumbo <ap...@outlook.com> wrote:
>>> 
>>> sorry- I left out the scala-compiler artifact (at the top)  it should read:
>>> 
>>> <dependency>
>>> <groupId>org.scala-lang</groupId>
>>> <artifactId>scala-compiler</artifactId>
>>> <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>> <groupId>org.scala-lang</groupId>
>>> <artifactId>scala-reflect</artifactId>
>>> <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>> <groupId>org.scala-lang</groupId>
>>> <artifactId>scala-library</artifactId>
>>> <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>> <groupId>org.scala-lang</groupId>
>>> <artifactId>scala-actors</artifactId>
>>> <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>> <groupId>org.scala-lang</groupId>
>>> <artifactId>scalap</artifactId>
>>> <version>${scala.version}</version>
>>> </dependency>
>>> 
>>> 
>>> 
>>> adding these has $mahout spark-shell working for me on spark 1.2.1.
>>> 
>>> 
>>> 
>>> On 02/24/2015 04:20 PM, Andrew Palumbo wrote:
>>>> adding in the following dependencies to the math-scala pom.xml seems to fix the problem for me on the old master before the revert/1.2 branch:
>>>> 
>>>> <dependency>
>>>> <groupId>org.scala-lang</groupId>
>>>> <artifactId>scala-reflect</artifactId>
>>>> <version>${scala.version}</version>
>>>> </dependency>
>>>> <dependency>
>>>> <groupId>org.scala-lang</groupId>
>>>> <artifactId>scala-library</artifactId>
>>>> <version>${scala.version}</version>
>>>> </dependency>
>>>> <dependency>
>>>> <groupId>org.scala-lang</groupId>
>>>> <artifactId>scala-actors</artifactId>
>>>> <version>${scala.version}</version>
>>>> </dependency>
>>>> <dependency>
>>>> <groupId>org.scala-lang</groupId>
>>>> <artifactId>scalap</artifactId>
>>>> <version>${scala.version}</version>
>>>> </dependency>
>>>> 
>>>> 
>>>> I dont know if this will help with item-similarity, and doesnt seem to have anything to do with the crash that Andrew M. is experiencing on the reverted spark-1.1.0 master.
>>>> 
>>>> 
>>>> 
>>>> On 02/24/2015 03:59 PM, Andrew Musselman wrote:
>>>>> Except after some time it blew up:
>>>>> 
>>>>> $ bin/mahout spark-shell
>>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>>> SLF4J: Found binding in
>>>>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>> SLF4J: Found binding in
>>>>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>> SLF4J: Found binding in
>>>>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>>> explanation.
>>>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>>> 
>>>>>                        _                 _
>>>>>        _ __ ___   __ _| |__   ___  _   _| |_
>>>>>       | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>>       | | | | | | (_| | | | | (_) | |_| | |_
>>>>>       |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>>> 
>>>>> 
>>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>>> 1.7.0_03)
>>>>> Type in expressions to have them evaluated.
>>>>> Type :help for more information.
>>>>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a loopback
>>>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>>>>> another address
>>>>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
>>>>> library for your platform... using builtin-java classes where applicable
>>>>> Created spark context..
>>>>> Mahout distributed context is available as "implicit val sdc".
>>>>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId; local class incompatible: stream
>>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>>> serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>   at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>   at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>   at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>   at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>   at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>   at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>   at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>   at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>   at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>   at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>   at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>   at scala.util.Try$.apply(Try.scala:161)
>>>>>   at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>   at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>   at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>   at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>   at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>   at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>   at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>   at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>   at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>   at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>   at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>   at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
>>>>> killed. Reason: Master removed our application: FAILED
>>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>>>>> cluster scheduler: Master removed our application: FAILED
>>>>> 
>>>>> 
>>>>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>>>>> andrew.musselman@gmail.com> wrote:
>>>>> 
>>>>>> Bingo, works off master now; thanks Dmitriy.
>>>>>> 
>>>>>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>>> wrote:
>>>>>> 
>>>>>>> ok spark 1.2 is mirrored now.
>>>>>>> and master should be also ok (back to 1.1)
>>>>>>> 
>>>>>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>> 
>>>>>>>> I reset hard to the previous commit, created a branch, and pushed it.
>>>>>>>> 
>>>>>>>> I got email confirmation like so:
>>>>>>>> Repository: mahout
>>>>>>>> Updated Branches:
>>>>>>>> refs/heads/spark-1.2 [created] 901ef03b4
>>>>>>>> 
>>>>>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>>>>> wrote:
>>>>>>>> 
>>>>>>>>> yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
>>>>>>> iti
>>>>>>>>> should be there eventually, give it a bit of time.
>>>>>>>>> 
>>>>>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
>>>>>>>>> wrote:
>>>>>>>>> 
>>>>>>>>>> what exactly did you try to do?
>>>>>>>>>> 
>>>>>>>>>> just resetting HEAD will not work on remote branch -- you need
>>>>>>>> force-sync
>>>>>>>>>> that (with +) since it is a history-rewriting push, but asf git does
>>>>>>>> not
>>>>>>>>>> allow that.
>>>>>>>>>> 
>>>>>>>>>> ASF will mirror ALL branches afaik. I think i've done it before. so
>>>>>>> if
>>>>>>>>> you
>>>>>>>>>> create a new one it should (eventually) get there.
>>>>>>>>>> 
>>>>>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>>>> 
>>>>>>>>>>> Does ASF git get mirrored to GitHub?  I tried pushing a branch and
>>>>>>>> don't
>>>>>>>>>>> see it there yet.
>>>>>>>>>>> 
>>>>>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>>>>>> dlieu.7@gmail.com
>>>>>>>>>>> wrote:
>>>>>>>>>>> 
>>>>>>>>>>>> On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>>>>>> pat@occamsmachete.com
>>>>>>>>>>>> wrote:
>>>>>>>>>>>> 
>>>>>>>>>>>>> to be safe I’d “git reset —hard xyz” to the commit previous to
>>>>>>> the
>>>>>>>>>>> 1.2.1
>>>>>>>>>>>> As i just explained, that resets are not possible with ASF git.
>>>>>>>>>>> Reverting
>>>>>>>>>>>> is the only option.
>>>>>>>>>>>> 
>>>>>>>>>>>> -d
>>>>>>>>>>>> 
>> 
> 
> 
> 


Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
Following CDH releases perhaps helps a bit. they tend to skip buggy "releases".

On Fri, Feb 27, 2015 at 2:07 PM, Pat Ferrel <pa...@occamsmachete.com> wrote:
> The deserialization thing is a Spark bug. The work around requires that you put a key/value in the SparkConf to point to a jar on _all_ workers that has Guava in it.
>
> Spark 1.2.1 doesn’t seem to be a good thing to support.
>
> The drivers allows this on the command line with -D:spark.executor.extraClassPath="/Users/pat/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-dependency-reduced.jar”
>
> I’ll check if this is fixed for 1.3, which is in RC now.
>
> FWIW these guys seem to create a new release every time they need to fix a bug. It’s a coin flip which release will be stable.
>
>
> On Feb 26, 2015, at 9:29 AM, Pat Ferrel <pa...@occamsmachete.com> wrote:
>
> Great, thanks. So I bet the only remaining problem is getting the right Guava in to Spark client and workers.
>
> Spark shades Guava so it is probably not causing the problems since it’s never supposed to be in scope.  When I use the same version number I get errors anyway so it must be in Mahout artifacts and probably the assembly jar created in mahout/spark with transitive dependencies.
>
> I was planning to wait on Dmitriy’s refactoring before messing with that but in a separate branch I did a drastically pared down version that might be better to use.
>
> On Feb 26, 2015, at 8:58 AM, Andrew Palumbo <ap...@outlook.com> wrote:
>
>
> just tested spark-testnb with the -ma option set with no problems. spark naive bayes is pretty bland and doesn't use any classes other than core scala, mahout and spark.
>
> On 02/26/2015 11:36 AM, Pat Ferrel wrote:
>> A difference in how classes are found comes about when running a standalone cluster. You start Spark with the spark/sbin/start-all.sh with only localhost in slaves. Then run the driver “mahout nbtest … -ma spark://Maclaurin.local:7077” or some such.
>>
>> re: poms, of course—stupid question.
>>
>> On Feb 26, 2015, at 8:19 AM, Andrew Palumbo <ap...@outlook.com> wrote:
>>
>> The NB Drivers were working (locally) with the 1.2.1 patch before those deps were added. I haven't tested the drivers since but I did test a script that runs NB from the spark-shell with no problems.
>>
>> I put those in math-scala since all of our scala modules depend on math-scala.
>>
>> On 02/26/2015 11:01 AM, Pat Ferrel wrote:
>>> should these be in all scala poms or just math-scala?
>>>
>>> Is the NB stuff working with the 1.2 branch?
>>>
>>> On Feb 24, 2015, at 1:25 PM, Andrew Palumbo <ap...@outlook.com> wrote:
>>>
>>> sorry- I left out the scala-compiler artifact (at the top)  it should read:
>>>
>>> <dependency>
>>> <groupId>org.scala-lang</groupId>
>>> <artifactId>scala-compiler</artifactId>
>>> <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>> <groupId>org.scala-lang</groupId>
>>> <artifactId>scala-reflect</artifactId>
>>> <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>> <groupId>org.scala-lang</groupId>
>>> <artifactId>scala-library</artifactId>
>>> <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>> <groupId>org.scala-lang</groupId>
>>> <artifactId>scala-actors</artifactId>
>>> <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>> <groupId>org.scala-lang</groupId>
>>> <artifactId>scalap</artifactId>
>>> <version>${scala.version}</version>
>>> </dependency>
>>>
>>>
>>>
>>> adding these has $mahout spark-shell working for me on spark 1.2.1.
>>>
>>>
>>>
>>> On 02/24/2015 04:20 PM, Andrew Palumbo wrote:
>>>> adding in the following dependencies to the math-scala pom.xml seems to fix the problem for me on the old master before the revert/1.2 branch:
>>>>
>>>> <dependency>
>>>> <groupId>org.scala-lang</groupId>
>>>> <artifactId>scala-reflect</artifactId>
>>>> <version>${scala.version}</version>
>>>> </dependency>
>>>> <dependency>
>>>> <groupId>org.scala-lang</groupId>
>>>> <artifactId>scala-library</artifactId>
>>>> <version>${scala.version}</version>
>>>> </dependency>
>>>> <dependency>
>>>> <groupId>org.scala-lang</groupId>
>>>> <artifactId>scala-actors</artifactId>
>>>> <version>${scala.version}</version>
>>>> </dependency>
>>>> <dependency>
>>>> <groupId>org.scala-lang</groupId>
>>>> <artifactId>scalap</artifactId>
>>>> <version>${scala.version}</version>
>>>> </dependency>
>>>>
>>>>
>>>> I dont know if this will help with item-similarity, and doesnt seem to have anything to do with the crash that Andrew M. is experiencing on the reverted spark-1.1.0 master.
>>>>
>>>>
>>>>
>>>> On 02/24/2015 03:59 PM, Andrew Musselman wrote:
>>>>> Except after some time it blew up:
>>>>>
>>>>> $ bin/mahout spark-shell
>>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>>> SLF4J: Found binding in
>>>>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>> SLF4J: Found binding in
>>>>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>> SLF4J: Found binding in
>>>>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>>> explanation.
>>>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>>>
>>>>>                         _                 _
>>>>>         _ __ ___   __ _| |__   ___  _   _| |_
>>>>>        | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>>        | | | | | | (_| | | | | (_) | |_| | |_
>>>>>        |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>>>
>>>>>
>>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>>> 1.7.0_03)
>>>>> Type in expressions to have them evaluated.
>>>>> Type :help for more information.
>>>>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a loopback
>>>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>>>>> another address
>>>>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
>>>>> library for your platform... using builtin-java classes where applicable
>>>>> Created spark context..
>>>>> Mahout distributed context is available as "implicit val sdc".
>>>>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId; local class incompatible: stream
>>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>>> serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>    at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at
>>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>    at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>    at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>    at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>    at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>    at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>    at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>    at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>    at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
>>>>> killed. Reason: Master removed our application: FAILED
>>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>>>>> cluster scheduler: Master removed our application: FAILED
>>>>>
>>>>>
>>>>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>>>>> andrew.musselman@gmail.com> wrote:
>>>>>
>>>>>> Bingo, works off master now; thanks Dmitriy.
>>>>>>
>>>>>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>>> wrote:
>>>>>>
>>>>>>> ok spark 1.2 is mirrored now.
>>>>>>> and master should be also ok (back to 1.1)
>>>>>>>
>>>>>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>
>>>>>>>> I reset hard to the previous commit, created a branch, and pushed it.
>>>>>>>>
>>>>>>>> I got email confirmation like so:
>>>>>>>> Repository: mahout
>>>>>>>> Updated Branches:
>>>>>>>>  refs/heads/spark-1.2 [created] 901ef03b4
>>>>>>>>
>>>>>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>>>>> wrote:
>>>>>>>>
>>>>>>>>> yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
>>>>>>> iti
>>>>>>>>> should be there eventually, give it a bit of time.
>>>>>>>>>
>>>>>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
>>>>>>>>> wrote:
>>>>>>>>>
>>>>>>>>>> what exactly did you try to do?
>>>>>>>>>>
>>>>>>>>>> just resetting HEAD will not work on remote branch -- you need
>>>>>>>> force-sync
>>>>>>>>>> that (with +) since it is a history-rewriting push, but asf git does
>>>>>>>> not
>>>>>>>>>> allow that.
>>>>>>>>>>
>>>>>>>>>> ASF will mirror ALL branches afaik. I think i've done it before. so
>>>>>>> if
>>>>>>>>> you
>>>>>>>>>> create a new one it should (eventually) get there.
>>>>>>>>>>
>>>>>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>>>>
>>>>>>>>>>> Does ASF git get mirrored to GitHub?  I tried pushing a branch and
>>>>>>>> don't
>>>>>>>>>>> see it there yet.
>>>>>>>>>>>
>>>>>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>>>>>> dlieu.7@gmail.com
>>>>>>>>>>> wrote:
>>>>>>>>>>>
>>>>>>>>>>>> On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>>>>>> pat@occamsmachete.com
>>>>>>>>>>>> wrote:
>>>>>>>>>>>>
>>>>>>>>>>>>> to be safe I’d “git reset —hard xyz” to the commit previous to
>>>>>>> the
>>>>>>>>>>> 1.2.1
>>>>>>>>>>>> As i just explained, that resets are not possible with ASF git.
>>>>>>>>>>> Reverting
>>>>>>>>>>>> is the only option.
>>>>>>>>>>>>
>>>>>>>>>>>> -d
>>>>>>>>>>>>
>>
>
>
>

Re: Spark shell broken

Posted by Pat Ferrel <pa...@occamsmachete.com>.
The deserialization thing is a Spark bug. The work around requires that you put a key/value in the SparkConf to point to a jar on _all_ workers that has Guava in it.

Spark 1.2.1 doesn’t seem to be a good thing to support. 

The drivers allows this on the command line with -D:spark.executor.extraClassPath="/Users/pat/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-dependency-reduced.jar”

I’ll check if this is fixed for 1.3, which is in RC now.

FWIW these guys seem to create a new release every time they need to fix a bug. It’s a coin flip which release will be stable.


On Feb 26, 2015, at 9:29 AM, Pat Ferrel <pa...@occamsmachete.com> wrote:

Great, thanks. So I bet the only remaining problem is getting the right Guava in to Spark client and workers.

Spark shades Guava so it is probably not causing the problems since it’s never supposed to be in scope.  When I use the same version number I get errors anyway so it must be in Mahout artifacts and probably the assembly jar created in mahout/spark with transitive dependencies.

I was planning to wait on Dmitriy’s refactoring before messing with that but in a separate branch I did a drastically pared down version that might be better to use. 

On Feb 26, 2015, at 8:58 AM, Andrew Palumbo <ap...@outlook.com> wrote:


just tested spark-testnb with the -ma option set with no problems. spark naive bayes is pretty bland and doesn't use any classes other than core scala, mahout and spark.

On 02/26/2015 11:36 AM, Pat Ferrel wrote:
> A difference in how classes are found comes about when running a standalone cluster. You start Spark with the spark/sbin/start-all.sh with only localhost in slaves. Then run the driver “mahout nbtest … -ma spark://Maclaurin.local:7077” or some such.
> 
> re: poms, of course—stupid question.
> 
> On Feb 26, 2015, at 8:19 AM, Andrew Palumbo <ap...@outlook.com> wrote:
> 
> The NB Drivers were working (locally) with the 1.2.1 patch before those deps were added. I haven't tested the drivers since but I did test a script that runs NB from the spark-shell with no problems.
> 
> I put those in math-scala since all of our scala modules depend on math-scala.
> 
> On 02/26/2015 11:01 AM, Pat Ferrel wrote:
>> should these be in all scala poms or just math-scala?
>> 
>> Is the NB stuff working with the 1.2 branch?
>> 
>> On Feb 24, 2015, at 1:25 PM, Andrew Palumbo <ap...@outlook.com> wrote:
>> 
>> sorry- I left out the scala-compiler artifact (at the top)  it should read:
>> 
>> <dependency>
>> <groupId>org.scala-lang</groupId>
>> <artifactId>scala-compiler</artifactId>
>> <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>> <groupId>org.scala-lang</groupId>
>> <artifactId>scala-reflect</artifactId>
>> <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>> <groupId>org.scala-lang</groupId>
>> <artifactId>scala-library</artifactId>
>> <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>> <groupId>org.scala-lang</groupId>
>> <artifactId>scala-actors</artifactId>
>> <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>> <groupId>org.scala-lang</groupId>
>> <artifactId>scalap</artifactId>
>> <version>${scala.version}</version>
>> </dependency>
>> 
>> 
>> 
>> adding these has $mahout spark-shell working for me on spark 1.2.1.
>> 
>> 
>> 
>> On 02/24/2015 04:20 PM, Andrew Palumbo wrote:
>>> adding in the following dependencies to the math-scala pom.xml seems to fix the problem for me on the old master before the revert/1.2 branch:
>>> 
>>> <dependency>
>>> <groupId>org.scala-lang</groupId>
>>> <artifactId>scala-reflect</artifactId>
>>> <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>> <groupId>org.scala-lang</groupId>
>>> <artifactId>scala-library</artifactId>
>>> <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>> <groupId>org.scala-lang</groupId>
>>> <artifactId>scala-actors</artifactId>
>>> <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>> <groupId>org.scala-lang</groupId>
>>> <artifactId>scalap</artifactId>
>>> <version>${scala.version}</version>
>>> </dependency>
>>> 
>>> 
>>> I dont know if this will help with item-similarity, and doesnt seem to have anything to do with the crash that Andrew M. is experiencing on the reverted spark-1.1.0 master.
>>> 
>>> 
>>> 
>>> On 02/24/2015 03:59 PM, Andrew Musselman wrote:
>>>> Except after some time it blew up:
>>>> 
>>>> $ bin/mahout spark-shell
>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>> SLF4J: Found binding in
>>>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>> SLF4J: Found binding in
>>>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>> SLF4J: Found binding in
>>>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>> explanation.
>>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>> 
>>>>                         _                 _
>>>>         _ __ ___   __ _| |__   ___  _   _| |_
>>>>        | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>        | | | | | | (_| | | | | (_) | |_| | |_
>>>>        |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>> 
>>>> 
>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>> 1.7.0_03)
>>>> Type in expressions to have them evaluated.
>>>> Type :help for more information.
>>>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a loopback
>>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>>>> another address
>>>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
>>>> library for your platform... using builtin-java classes where applicable
>>>> Created spark context..
>>>> Mahout distributed context is available as "implicit val sdc".
>>>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId; local class incompatible: stream
>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>> serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>    at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>    at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>    at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>    at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>    at scala.util.Try$.apply(Try.scala:161)
>>>>    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>    at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>    at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>    at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>    at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>    at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
>>>> killed. Reason: Master removed our application: FAILED
>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>>>> cluster scheduler: Master removed our application: FAILED
>>>> 
>>>> 
>>>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>>>> andrew.musselman@gmail.com> wrote:
>>>> 
>>>>> Bingo, works off master now; thanks Dmitriy.
>>>>> 
>>>>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>> wrote:
>>>>> 
>>>>>> ok spark 1.2 is mirrored now.
>>>>>> and master should be also ok (back to 1.1)
>>>>>> 
>>>>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>> 
>>>>>>> I reset hard to the previous commit, created a branch, and pushed it.
>>>>>>> 
>>>>>>> I got email confirmation like so:
>>>>>>> Repository: mahout
>>>>>>> Updated Branches:
>>>>>>>  refs/heads/spark-1.2 [created] 901ef03b4
>>>>>>> 
>>>>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>>>> wrote:
>>>>>>> 
>>>>>>>> yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
>>>>>> iti
>>>>>>>> should be there eventually, give it a bit of time.
>>>>>>>> 
>>>>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
>>>>>>>> wrote:
>>>>>>>> 
>>>>>>>>> what exactly did you try to do?
>>>>>>>>> 
>>>>>>>>> just resetting HEAD will not work on remote branch -- you need
>>>>>>> force-sync
>>>>>>>>> that (with +) since it is a history-rewriting push, but asf git does
>>>>>>> not
>>>>>>>>> allow that.
>>>>>>>>> 
>>>>>>>>> ASF will mirror ALL branches afaik. I think i've done it before. so
>>>>>> if
>>>>>>>> you
>>>>>>>>> create a new one it should (eventually) get there.
>>>>>>>>> 
>>>>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>>> 
>>>>>>>>>> Does ASF git get mirrored to GitHub?  I tried pushing a branch and
>>>>>>> don't
>>>>>>>>>> see it there yet.
>>>>>>>>>> 
>>>>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>>>>> dlieu.7@gmail.com
>>>>>>>>>> wrote:
>>>>>>>>>> 
>>>>>>>>>>> On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>>>>> pat@occamsmachete.com
>>>>>>>>>>> wrote:
>>>>>>>>>>> 
>>>>>>>>>>>> to be safe I’d “git reset —hard xyz” to the commit previous to
>>>>>> the
>>>>>>>>>> 1.2.1
>>>>>>>>>>> As i just explained, that resets are not possible with ASF git.
>>>>>>>>>> Reverting
>>>>>>>>>>> is the only option.
>>>>>>>>>>> 
>>>>>>>>>>> -d
>>>>>>>>>>> 
> 




Re: Spark shell broken

Posted by Pat Ferrel <pa...@occamsmachete.com>.
Great, thanks. So I bet the only remaining problem is getting the right Guava in to Spark client and workers.

Spark shades Guava so it is probably not causing the problems since it’s never supposed to be in scope.  When I use the same version number I get errors anyway so it must be in Mahout artifacts and probably the assembly jar created in mahout/spark with transitive dependencies.

I was planning to wait on Dmitriy’s refactoring before messing with that but in a separate branch I did a drastically pared down version that might be better to use. 

On Feb 26, 2015, at 8:58 AM, Andrew Palumbo <ap...@outlook.com> wrote:


just tested spark-testnb with the -ma option set with no problems. spark naive bayes is pretty bland and doesn't use any classes other than core scala, mahout and spark.

On 02/26/2015 11:36 AM, Pat Ferrel wrote:
> A difference in how classes are found comes about when running a standalone cluster. You start Spark with the spark/sbin/start-all.sh with only localhost in slaves. Then run the driver “mahout nbtest … -ma spark://Maclaurin.local:7077” or some such.
> 
> re: poms, of course—stupid question.
> 
> On Feb 26, 2015, at 8:19 AM, Andrew Palumbo <ap...@outlook.com> wrote:
> 
> The NB Drivers were working (locally) with the 1.2.1 patch before those deps were added. I haven't tested the drivers since but I did test a script that runs NB from the spark-shell with no problems.
> 
> I put those in math-scala since all of our scala modules depend on math-scala.
> 
> On 02/26/2015 11:01 AM, Pat Ferrel wrote:
>> should these be in all scala poms or just math-scala?
>> 
>> Is the NB stuff working with the 1.2 branch?
>> 
>> On Feb 24, 2015, at 1:25 PM, Andrew Palumbo <ap...@outlook.com> wrote:
>> 
>> sorry- I left out the scala-compiler artifact (at the top)  it should read:
>> 
>> <dependency>
>>  <groupId>org.scala-lang</groupId>
>>  <artifactId>scala-compiler</artifactId>
>>  <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>  <groupId>org.scala-lang</groupId>
>>  <artifactId>scala-reflect</artifactId>
>>  <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>  <groupId>org.scala-lang</groupId>
>>  <artifactId>scala-library</artifactId>
>>  <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>  <groupId>org.scala-lang</groupId>
>>  <artifactId>scala-actors</artifactId>
>>  <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>  <groupId>org.scala-lang</groupId>
>>  <artifactId>scalap</artifactId>
>>  <version>${scala.version}</version>
>> </dependency>
>> 
>> 
>> 
>> adding these has $mahout spark-shell working for me on spark 1.2.1.
>> 
>> 
>> 
>> On 02/24/2015 04:20 PM, Andrew Palumbo wrote:
>>> adding in the following dependencies to the math-scala pom.xml seems to fix the problem for me on the old master before the revert/1.2 branch:
>>> 
>>> <dependency>
>>>  <groupId>org.scala-lang</groupId>
>>>  <artifactId>scala-reflect</artifactId>
>>>  <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>>  <groupId>org.scala-lang</groupId>
>>>  <artifactId>scala-library</artifactId>
>>>  <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>>  <groupId>org.scala-lang</groupId>
>>>  <artifactId>scala-actors</artifactId>
>>>  <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>>  <groupId>org.scala-lang</groupId>
>>>  <artifactId>scalap</artifactId>
>>>  <version>${scala.version}</version>
>>> </dependency>
>>> 
>>> 
>>> I dont know if this will help with item-similarity, and doesnt seem to have anything to do with the crash that Andrew M. is experiencing on the reverted spark-1.1.0 master.
>>> 
>>> 
>>> 
>>> On 02/24/2015 03:59 PM, Andrew Musselman wrote:
>>>> Except after some time it blew up:
>>>> 
>>>> $ bin/mahout spark-shell
>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>> SLF4J: Found binding in
>>>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>> SLF4J: Found binding in
>>>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>> SLF4J: Found binding in
>>>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>> explanation.
>>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>> 
>>>>                          _                 _
>>>>          _ __ ___   __ _| |__   ___  _   _| |_
>>>>         | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>         | | | | | | (_| | | | | (_) | |_| | |_
>>>>         |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>> 
>>>> 
>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>> 1.7.0_03)
>>>> Type in expressions to have them evaluated.
>>>> Type :help for more information.
>>>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a loopback
>>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>>>> another address
>>>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
>>>> library for your platform... using builtin-java classes where applicable
>>>> Created spark context..
>>>> Mahout distributed context is available as "implicit val sdc".
>>>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId; local class incompatible: stream
>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>> serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>     at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>     at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>     at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>     at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>     at scala.util.Try$.apply(Try.scala:161)
>>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>     at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>     at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>     at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>     at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>     at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
>>>> killed. Reason: Master removed our application: FAILED
>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>>>> cluster scheduler: Master removed our application: FAILED
>>>> 
>>>> 
>>>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>>>> andrew.musselman@gmail.com> wrote:
>>>> 
>>>>> Bingo, works off master now; thanks Dmitriy.
>>>>> 
>>>>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>> wrote:
>>>>> 
>>>>>> ok spark 1.2 is mirrored now.
>>>>>> and master should be also ok (back to 1.1)
>>>>>> 
>>>>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>> 
>>>>>>> I reset hard to the previous commit, created a branch, and pushed it.
>>>>>>> 
>>>>>>> I got email confirmation like so:
>>>>>>> Repository: mahout
>>>>>>> Updated Branches:
>>>>>>>   refs/heads/spark-1.2 [created] 901ef03b4
>>>>>>> 
>>>>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>>>> wrote:
>>>>>>> 
>>>>>>>> yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
>>>>>> iti
>>>>>>>> should be there eventually, give it a bit of time.
>>>>>>>> 
>>>>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
>>>>>>>> wrote:
>>>>>>>> 
>>>>>>>>> what exactly did you try to do?
>>>>>>>>> 
>>>>>>>>> just resetting HEAD will not work on remote branch -- you need
>>>>>>> force-sync
>>>>>>>>> that (with +) since it is a history-rewriting push, but asf git does
>>>>>>> not
>>>>>>>>> allow that.
>>>>>>>>> 
>>>>>>>>> ASF will mirror ALL branches afaik. I think i've done it before. so
>>>>>> if
>>>>>>>> you
>>>>>>>>> create a new one it should (eventually) get there.
>>>>>>>>> 
>>>>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>>> 
>>>>>>>>>> Does ASF git get mirrored to GitHub?  I tried pushing a branch and
>>>>>>> don't
>>>>>>>>>> see it there yet.
>>>>>>>>>> 
>>>>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>>>>> dlieu.7@gmail.com
>>>>>>>>>> wrote:
>>>>>>>>>> 
>>>>>>>>>>> On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>>>>> pat@occamsmachete.com
>>>>>>>>>>> wrote:
>>>>>>>>>>> 
>>>>>>>>>>>> to be safe I’d “git reset —hard xyz” to the commit previous to
>>>>>> the
>>>>>>>>>> 1.2.1
>>>>>>>>>>> As i just explained, that resets are not possible with ASF git.
>>>>>>>>>> Reverting
>>>>>>>>>>> is the only option.
>>>>>>>>>>> 
>>>>>>>>>>> -d
>>>>>>>>>>> 
> 



Re: Spark shell broken

Posted by Andrew Palumbo <ap...@outlook.com>.
just tested spark-testnb with the -ma option set with no problems. spark 
naive bayes is pretty bland and doesn't use any classes other than core 
scala, mahout and spark.

On 02/26/2015 11:36 AM, Pat Ferrel wrote:
> A difference in how classes are found comes about when running a standalone cluster. You start Spark with the spark/sbin/start-all.sh with only localhost in slaves. Then run the driver “mahout nbtest … -ma spark://Maclaurin.local:7077” or some such.
>
> re: poms, of course—stupid question.
>
> On Feb 26, 2015, at 8:19 AM, Andrew Palumbo <ap...@outlook.com> wrote:
>
> The NB Drivers were working (locally) with the 1.2.1 patch before those deps were added. I haven't tested the drivers since but I did test a script that runs NB from the spark-shell with no problems.
>
> I put those in math-scala since all of our scala modules depend on math-scala.
>
> On 02/26/2015 11:01 AM, Pat Ferrel wrote:
>> should these be in all scala poms or just math-scala?
>>
>> Is the NB stuff working with the 1.2 branch?
>>
>> On Feb 24, 2015, at 1:25 PM, Andrew Palumbo <ap...@outlook.com> wrote:
>>
>> sorry- I left out the scala-compiler artifact (at the top)  it should read:
>>
>> <dependency>
>>   <groupId>org.scala-lang</groupId>
>>   <artifactId>scala-compiler</artifactId>
>>   <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>   <groupId>org.scala-lang</groupId>
>>   <artifactId>scala-reflect</artifactId>
>>   <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>   <groupId>org.scala-lang</groupId>
>>   <artifactId>scala-library</artifactId>
>>   <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>   <groupId>org.scala-lang</groupId>
>>   <artifactId>scala-actors</artifactId>
>>   <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>   <groupId>org.scala-lang</groupId>
>>   <artifactId>scalap</artifactId>
>>   <version>${scala.version}</version>
>> </dependency>
>>
>>
>>
>> adding these has $mahout spark-shell working for me on spark 1.2.1.
>>
>>
>>
>> On 02/24/2015 04:20 PM, Andrew Palumbo wrote:
>>> adding in the following dependencies to the math-scala pom.xml seems to fix the problem for me on the old master before the revert/1.2 branch:
>>>
>>> <dependency>
>>>   <groupId>org.scala-lang</groupId>
>>>   <artifactId>scala-reflect</artifactId>
>>>   <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>>   <groupId>org.scala-lang</groupId>
>>>   <artifactId>scala-library</artifactId>
>>>   <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>>   <groupId>org.scala-lang</groupId>
>>>   <artifactId>scala-actors</artifactId>
>>>   <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>>   <groupId>org.scala-lang</groupId>
>>>   <artifactId>scalap</artifactId>
>>>   <version>${scala.version}</version>
>>> </dependency>
>>>
>>>
>>> I dont know if this will help with item-similarity, and doesnt seem to have anything to do with the crash that Andrew M. is experiencing on the reverted spark-1.1.0 master.
>>>
>>>
>>>
>>> On 02/24/2015 03:59 PM, Andrew Musselman wrote:
>>>> Except after some time it blew up:
>>>>
>>>> $ bin/mahout spark-shell
>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>> SLF4J: Found binding in
>>>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>> SLF4J: Found binding in
>>>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>> SLF4J: Found binding in
>>>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>> explanation.
>>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>>
>>>>                           _                 _
>>>>           _ __ ___   __ _| |__   ___  _   _| |_
>>>>          | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>          | | | | | | (_| | | | | (_) | |_| | |_
>>>>          |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>>
>>>>
>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>> 1.7.0_03)
>>>> Type in expressions to have them evaluated.
>>>> Type :help for more information.
>>>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a loopback
>>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>>>> another address
>>>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
>>>> library for your platform... using builtin-java classes where applicable
>>>> Created spark context..
>>>> Mahout distributed context is available as "implicit val sdc".
>>>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId; local class incompatible: stream
>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>> serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
>>>> killed. Reason: Master removed our application: FAILED
>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>>>> cluster scheduler: Master removed our application: FAILED
>>>>
>>>>
>>>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>>>> andrew.musselman@gmail.com> wrote:
>>>>
>>>>> Bingo, works off master now; thanks Dmitriy.
>>>>>
>>>>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>> wrote:
>>>>>
>>>>>> ok spark 1.2 is mirrored now.
>>>>>> and master should be also ok (back to 1.1)
>>>>>>
>>>>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>
>>>>>>> I reset hard to the previous commit, created a branch, and pushed it.
>>>>>>>
>>>>>>> I got email confirmation like so:
>>>>>>> Repository: mahout
>>>>>>> Updated Branches:
>>>>>>>    refs/heads/spark-1.2 [created] 901ef03b4
>>>>>>>
>>>>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>>>> wrote:
>>>>>>>
>>>>>>>> yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
>>>>>> iti
>>>>>>>> should be there eventually, give it a bit of time.
>>>>>>>>
>>>>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
>>>>>>>> wrote:
>>>>>>>>
>>>>>>>>> what exactly did you try to do?
>>>>>>>>>
>>>>>>>>> just resetting HEAD will not work on remote branch -- you need
>>>>>>> force-sync
>>>>>>>>> that (with +) since it is a history-rewriting push, but asf git does
>>>>>>> not
>>>>>>>>> allow that.
>>>>>>>>>
>>>>>>>>> ASF will mirror ALL branches afaik. I think i've done it before. so
>>>>>> if
>>>>>>>> you
>>>>>>>>> create a new one it should (eventually) get there.
>>>>>>>>>
>>>>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>>>
>>>>>>>>>> Does ASF git get mirrored to GitHub?  I tried pushing a branch and
>>>>>>> don't
>>>>>>>>>> see it there yet.
>>>>>>>>>>
>>>>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>>>>> dlieu.7@gmail.com
>>>>>>>>>> wrote:
>>>>>>>>>>
>>>>>>>>>>> On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>>>>> pat@occamsmachete.com
>>>>>>>>>>> wrote:
>>>>>>>>>>>
>>>>>>>>>>>> to be safe I’d “git reset —hard xyz” to the commit previous to
>>>>>> the
>>>>>>>>>> 1.2.1
>>>>>>>>>>> As i just explained, that resets are not possible with ASF git.
>>>>>>>>>> Reverting
>>>>>>>>>>> is the only option.
>>>>>>>>>>>
>>>>>>>>>>> -d
>>>>>>>>>>>
>


Re: Spark shell broken

Posted by Pat Ferrel <pa...@occamsmachete.com>.
A difference in how classes are found comes about when running a standalone cluster. You start Spark with the spark/sbin/start-all.sh with only localhost in slaves. Then run the driver “mahout nbtest … -ma spark://Maclaurin.local:7077” or some such. 

re: poms, of course—stupid question.

On Feb 26, 2015, at 8:19 AM, Andrew Palumbo <ap...@outlook.com> wrote:

The NB Drivers were working (locally) with the 1.2.1 patch before those deps were added. I haven't tested the drivers since but I did test a script that runs NB from the spark-shell with no problems.

I put those in math-scala since all of our scala modules depend on math-scala.

On 02/26/2015 11:01 AM, Pat Ferrel wrote:
> should these be in all scala poms or just math-scala?
> 
> Is the NB stuff working with the 1.2 branch?
> 
> On Feb 24, 2015, at 1:25 PM, Andrew Palumbo <ap...@outlook.com> wrote:
> 
> sorry- I left out the scala-compiler artifact (at the top)  it should read:
> 
> <dependency>
>  <groupId>org.scala-lang</groupId>
>  <artifactId>scala-compiler</artifactId>
>  <version>${scala.version}</version>
> </dependency>
> <dependency>
>  <groupId>org.scala-lang</groupId>
>  <artifactId>scala-reflect</artifactId>
>  <version>${scala.version}</version>
> </dependency>
> <dependency>
>  <groupId>org.scala-lang</groupId>
>  <artifactId>scala-library</artifactId>
>  <version>${scala.version}</version>
> </dependency>
> <dependency>
>  <groupId>org.scala-lang</groupId>
>  <artifactId>scala-actors</artifactId>
>  <version>${scala.version}</version>
> </dependency>
> <dependency>
>  <groupId>org.scala-lang</groupId>
>  <artifactId>scalap</artifactId>
>  <version>${scala.version}</version>
> </dependency>
> 
> 
> 
> adding these has $mahout spark-shell working for me on spark 1.2.1.
> 
> 
> 
> On 02/24/2015 04:20 PM, Andrew Palumbo wrote:
>> adding in the following dependencies to the math-scala pom.xml seems to fix the problem for me on the old master before the revert/1.2 branch:
>> 
>> <dependency>
>>  <groupId>org.scala-lang</groupId>
>>  <artifactId>scala-reflect</artifactId>
>>  <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>  <groupId>org.scala-lang</groupId>
>>  <artifactId>scala-library</artifactId>
>>  <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>  <groupId>org.scala-lang</groupId>
>>  <artifactId>scala-actors</artifactId>
>>  <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>  <groupId>org.scala-lang</groupId>
>>  <artifactId>scalap</artifactId>
>>  <version>${scala.version}</version>
>> </dependency>
>> 
>> 
>> I dont know if this will help with item-similarity, and doesnt seem to have anything to do with the crash that Andrew M. is experiencing on the reverted spark-1.1.0 master.
>> 
>> 
>> 
>> On 02/24/2015 03:59 PM, Andrew Musselman wrote:
>>> Except after some time it blew up:
>>> 
>>> $ bin/mahout spark-shell
>>> SLF4J: Class path contains multiple SLF4J bindings.
>>> SLF4J: Found binding in
>>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>> SLF4J: Found binding in
>>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>> SLF4J: Found binding in
>>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>> explanation.
>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>> 
>>>                          _                 _
>>>          _ __ ___   __ _| |__   ___  _   _| |_
>>>         | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>         | | | | | | (_| | | | | (_) | |_| | |_
>>>         |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>> 
>>> 
>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>> 1.7.0_03)
>>> Type in expressions to have them evaluated.
>>> Type :help for more information.
>>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a loopback
>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>>> another address
>>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
>>> library for your platform... using builtin-java classes where applicable
>>> Created spark context..
>>> Mahout distributed context is available as "implicit val sdc".
>>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>>> org.apache.spark.storage.BlockManagerId; local class incompatible: stream
>>> classdesc serialVersionUID = 1677335532749418220, local class
>>> serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>     at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>     at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>     at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>     at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>     at scala.util.Try$.apply(Try.scala:161)
>>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>     at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>     at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>     at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>     at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>     at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
>>> killed. Reason: Master removed our application: FAILED
>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>>> cluster scheduler: Master removed our application: FAILED
>>> 
>>> 
>>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>>> andrew.musselman@gmail.com> wrote:
>>> 
>>>> Bingo, works off master now; thanks Dmitriy.
>>>> 
>>>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>> wrote:
>>>> 
>>>>> ok spark 1.2 is mirrored now.
>>>>> and master should be also ok (back to 1.1)
>>>>> 
>>>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>>>> andrew.musselman@gmail.com> wrote:
>>>>> 
>>>>>> I reset hard to the previous commit, created a branch, and pushed it.
>>>>>> 
>>>>>> I got email confirmation like so:
>>>>>> Repository: mahout
>>>>>> Updated Branches:
>>>>>>   refs/heads/spark-1.2 [created] 901ef03b4
>>>>>> 
>>>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>>> wrote:
>>>>>> 
>>>>>>> yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
>>>>> iti
>>>>>>> should be there eventually, give it a bit of time.
>>>>>>> 
>>>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
>>>>>>> wrote:
>>>>>>> 
>>>>>>>> what exactly did you try to do?
>>>>>>>> 
>>>>>>>> just resetting HEAD will not work on remote branch -- you need
>>>>>> force-sync
>>>>>>>> that (with +) since it is a history-rewriting push, but asf git does
>>>>>> not
>>>>>>>> allow that.
>>>>>>>> 
>>>>>>>> ASF will mirror ALL branches afaik. I think i've done it before. so
>>>>> if
>>>>>>> you
>>>>>>>> create a new one it should (eventually) get there.
>>>>>>>> 
>>>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>> 
>>>>>>>>> Does ASF git get mirrored to GitHub?  I tried pushing a branch and
>>>>>> don't
>>>>>>>>> see it there yet.
>>>>>>>>> 
>>>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>>>> dlieu.7@gmail.com
>>>>>>>>> wrote:
>>>>>>>>> 
>>>>>>>>>> On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>>>> pat@occamsmachete.com
>>>>>>>>>> wrote:
>>>>>>>>>> 
>>>>>>>>>>> to be safe I’d “git reset —hard xyz” to the commit previous to
>>>>> the
>>>>>>>>> 1.2.1
>>>>>>>>>> As i just explained, that resets are not possible with ASF git.
>>>>>>>>> Reverting
>>>>>>>>>> is the only option.
>>>>>>>>>> 
>>>>>>>>>> -d
>>>>>>>>>> 
>> 
> 



Re: Spark shell broken

Posted by Andrew Palumbo <ap...@outlook.com>.
The NB Drivers were working (locally) with the 1.2.1 patch before those 
deps were added. I haven't tested the drivers since but I did test a 
script that runs NB from the spark-shell with no problems.

I put those in math-scala since all of our scala modules depend on 
math-scala.

On 02/26/2015 11:01 AM, Pat Ferrel wrote:
> should these be in all scala poms or just math-scala?
>
> Is the NB stuff working with the 1.2 branch?
>
> On Feb 24, 2015, at 1:25 PM, Andrew Palumbo <ap...@outlook.com> wrote:
>
> sorry- I left out the scala-compiler artifact (at the top)  it should read:
>
> <dependency>
>   <groupId>org.scala-lang</groupId>
>   <artifactId>scala-compiler</artifactId>
>   <version>${scala.version}</version>
> </dependency>
> <dependency>
>   <groupId>org.scala-lang</groupId>
>   <artifactId>scala-reflect</artifactId>
>   <version>${scala.version}</version>
> </dependency>
> <dependency>
>   <groupId>org.scala-lang</groupId>
>   <artifactId>scala-library</artifactId>
>   <version>${scala.version}</version>
> </dependency>
> <dependency>
>   <groupId>org.scala-lang</groupId>
>   <artifactId>scala-actors</artifactId>
>   <version>${scala.version}</version>
> </dependency>
> <dependency>
>   <groupId>org.scala-lang</groupId>
>   <artifactId>scalap</artifactId>
>   <version>${scala.version}</version>
> </dependency>
>
>
>
> adding these has $mahout spark-shell working for me on spark 1.2.1.
>
>
>
> On 02/24/2015 04:20 PM, Andrew Palumbo wrote:
>> adding in the following dependencies to the math-scala pom.xml seems to fix the problem for me on the old master before the revert/1.2 branch:
>>
>> <dependency>
>>   <groupId>org.scala-lang</groupId>
>>   <artifactId>scala-reflect</artifactId>
>>   <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>   <groupId>org.scala-lang</groupId>
>>   <artifactId>scala-library</artifactId>
>>   <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>   <groupId>org.scala-lang</groupId>
>>   <artifactId>scala-actors</artifactId>
>>   <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>   <groupId>org.scala-lang</groupId>
>>   <artifactId>scalap</artifactId>
>>   <version>${scala.version}</version>
>> </dependency>
>>
>>
>> I dont know if this will help with item-similarity, and doesnt seem to have anything to do with the crash that Andrew M. is experiencing on the reverted spark-1.1.0 master.
>>
>>
>>
>> On 02/24/2015 03:59 PM, Andrew Musselman wrote:
>>> Except after some time it blew up:
>>>
>>> $ bin/mahout spark-shell
>>> SLF4J: Class path contains multiple SLF4J bindings.
>>> SLF4J: Found binding in
>>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>> SLF4J: Found binding in
>>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>> SLF4J: Found binding in
>>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>> explanation.
>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>
>>>                           _                 _
>>>           _ __ ___   __ _| |__   ___  _   _| |_
>>>          | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>          | | | | | | (_| | | | | (_) | |_| | |_
>>>          |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>
>>>
>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>> 1.7.0_03)
>>> Type in expressions to have them evaluated.
>>> Type :help for more information.
>>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a loopback
>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>>> another address
>>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
>>> library for your platform... using builtin-java classes where applicable
>>> Created spark context..
>>> Mahout distributed context is available as "implicit val sdc".
>>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>>> org.apache.spark.storage.BlockManagerId; local class incompatible: stream
>>> classdesc serialVersionUID = 1677335532749418220, local class
>>> serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
>>> killed. Reason: Master removed our application: FAILED
>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>>> cluster scheduler: Master removed our application: FAILED
>>>
>>>
>>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>>> andrew.musselman@gmail.com> wrote:
>>>
>>>> Bingo, works off master now; thanks Dmitriy.
>>>>
>>>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>> wrote:
>>>>
>>>>> ok spark 1.2 is mirrored now.
>>>>> and master should be also ok (back to 1.1)
>>>>>
>>>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>>>> andrew.musselman@gmail.com> wrote:
>>>>>
>>>>>> I reset hard to the previous commit, created a branch, and pushed it.
>>>>>>
>>>>>> I got email confirmation like so:
>>>>>> Repository: mahout
>>>>>> Updated Branches:
>>>>>>    refs/heads/spark-1.2 [created] 901ef03b4
>>>>>>
>>>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>>> wrote:
>>>>>>
>>>>>>> yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
>>>>> iti
>>>>>>> should be there eventually, give it a bit of time.
>>>>>>>
>>>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
>>>>>>> wrote:
>>>>>>>
>>>>>>>> what exactly did you try to do?
>>>>>>>>
>>>>>>>> just resetting HEAD will not work on remote branch -- you need
>>>>>> force-sync
>>>>>>>> that (with +) since it is a history-rewriting push, but asf git does
>>>>>> not
>>>>>>>> allow that.
>>>>>>>>
>>>>>>>> ASF will mirror ALL branches afaik. I think i've done it before. so
>>>>> if
>>>>>>> you
>>>>>>>> create a new one it should (eventually) get there.
>>>>>>>>
>>>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>>
>>>>>>>>> Does ASF git get mirrored to GitHub?  I tried pushing a branch and
>>>>>> don't
>>>>>>>>> see it there yet.
>>>>>>>>>
>>>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>>>> dlieu.7@gmail.com
>>>>>>>>> wrote:
>>>>>>>>>
>>>>>>>>>> On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>>>> pat@occamsmachete.com
>>>>>>>>>> wrote:
>>>>>>>>>>
>>>>>>>>>>> to be safe I’d “git reset —hard xyz” to the commit previous to
>>>>> the
>>>>>>>>> 1.2.1
>>>>>>>>>> As i just explained, that resets are not possible with ASF git.
>>>>>>>>> Reverting
>>>>>>>>>> is the only option.
>>>>>>>>>>
>>>>>>>>>> -d
>>>>>>>>>>
>>
>


Re: Spark shell broken

Posted by Pat Ferrel <pa...@occamsmachete.com>.
should these be in all scala poms or just math-scala?

Is the NB stuff working with the 1.2 branch?

On Feb 24, 2015, at 1:25 PM, Andrew Palumbo <ap...@outlook.com> wrote:

sorry- I left out the scala-compiler artifact (at the top)  it should read:

<dependency>
 <groupId>org.scala-lang</groupId>
 <artifactId>scala-compiler</artifactId>
 <version>${scala.version}</version>
</dependency>
<dependency>
 <groupId>org.scala-lang</groupId>
 <artifactId>scala-reflect</artifactId>
 <version>${scala.version}</version>
</dependency>
<dependency>
 <groupId>org.scala-lang</groupId>
 <artifactId>scala-library</artifactId>
 <version>${scala.version}</version>
</dependency>
<dependency>
 <groupId>org.scala-lang</groupId>
 <artifactId>scala-actors</artifactId>
 <version>${scala.version}</version>
</dependency>
<dependency>
 <groupId>org.scala-lang</groupId>
 <artifactId>scalap</artifactId>
 <version>${scala.version}</version>
</dependency>



adding these has $mahout spark-shell working for me on spark 1.2.1.



On 02/24/2015 04:20 PM, Andrew Palumbo wrote:
> adding in the following dependencies to the math-scala pom.xml seems to fix the problem for me on the old master before the revert/1.2 branch:
> 
> <dependency>
>  <groupId>org.scala-lang</groupId>
>  <artifactId>scala-reflect</artifactId>
>  <version>${scala.version}</version>
> </dependency>
> <dependency>
>  <groupId>org.scala-lang</groupId>
>  <artifactId>scala-library</artifactId>
>  <version>${scala.version}</version>
> </dependency>
> <dependency>
>  <groupId>org.scala-lang</groupId>
>  <artifactId>scala-actors</artifactId>
>  <version>${scala.version}</version>
> </dependency>
> <dependency>
>  <groupId>org.scala-lang</groupId>
>  <artifactId>scalap</artifactId>
>  <version>${scala.version}</version>
> </dependency>
> 
> 
> I dont know if this will help with item-similarity, and doesnt seem to have anything to do with the crash that Andrew M. is experiencing on the reverted spark-1.1.0 master.
> 
> 
> 
> On 02/24/2015 03:59 PM, Andrew Musselman wrote:
>> Except after some time it blew up:
>> 
>> $ bin/mahout spark-shell
>> SLF4J: Class path contains multiple SLF4J bindings.
>> SLF4J: Found binding in
>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
>> SLF4J: Found binding in
>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
>> SLF4J: Found binding in
>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>> explanation.
>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>> 
>>                          _                 _
>>          _ __ ___   __ _| |__   ___  _   _| |_
>>         | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>         | | | | | | (_| | | | | (_) | |_| | |_
>>         |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>> 
>> 
>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>> 1.7.0_03)
>> Type in expressions to have them evaluated.
>> Type :help for more information.
>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a loopback
>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>> another address
>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
>> library for your platform... using builtin-java classes where applicable
>> Created spark context..
>> Mahout distributed context is available as "implicit val sdc".
>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId; local class incompatible: stream
>> classdesc serialVersionUID = 1677335532749418220, local class
>> serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
>> killed. Reason: Master removed our application: FAILED
>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>> cluster scheduler: Master removed our application: FAILED
>> 
>> 
>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>> andrew.musselman@gmail.com> wrote:
>> 
>>> Bingo, works off master now; thanks Dmitriy.
>>> 
>>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>> wrote:
>>> 
>>>> ok spark 1.2 is mirrored now.
>>>> and master should be also ok (back to 1.1)
>>>> 
>>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>>> andrew.musselman@gmail.com> wrote:
>>>> 
>>>>> I reset hard to the previous commit, created a branch, and pushed it.
>>>>> 
>>>>> I got email confirmation like so:
>>>>> Repository: mahout
>>>>> Updated Branches:
>>>>>   refs/heads/spark-1.2 [created] 901ef03b4
>>>>> 
>>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>> wrote:
>>>>> 
>>>>>> yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
>>>> iti
>>>>>> should be there eventually, give it a bit of time.
>>>>>> 
>>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
>>>>>> wrote:
>>>>>> 
>>>>>>> what exactly did you try to do?
>>>>>>> 
>>>>>>> just resetting HEAD will not work on remote branch -- you need
>>>>> force-sync
>>>>>>> that (with +) since it is a history-rewriting push, but asf git does
>>>>> not
>>>>>>> allow that.
>>>>>>> 
>>>>>>> ASF will mirror ALL branches afaik. I think i've done it before. so
>>>> if
>>>>>> you
>>>>>>> create a new one it should (eventually) get there.
>>>>>>> 
>>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>> 
>>>>>>>> Does ASF git get mirrored to GitHub?  I tried pushing a branch and
>>>>> don't
>>>>>>>> see it there yet.
>>>>>>>> 
>>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>>> dlieu.7@gmail.com
>>>>>>>> wrote:
>>>>>>>> 
>>>>>>>>> On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>>> pat@occamsmachete.com
>>>>>>>>> wrote:
>>>>>>>>> 
>>>>>>>>>> to be safe I’d “git reset —hard xyz” to the commit previous to
>>>> the
>>>>>>>> 1.2.1
>>>>>>>>> As i just explained, that resets are not possible with ASF git.
>>>>>>>> Reverting
>>>>>>>>> is the only option.
>>>>>>>>> 
>>>>>>>>> -d
>>>>>>>>> 
>>>>>>> 
>>> 
> 
> 



Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
it looks like a small thing.
please just run tests in spark and mahout-scala modules only (and in
spark-shell but i dont think it has any tests there). it should be bearable
(as opposed to including math and legacy tests as well). that's what i do.
If there are any further unexpected troubles, the CI will flash them out

On Tue, Feb 24, 2015 at 1:40 PM, Andrew Palumbo <ap...@outlook.com> wrote:

> sure- just looking at it now.  I haven't run any tests on it. just running
> a script locally to make sure it doesn't crash. Should I do a PR on the 1.2
> branch or just push it?
>
>
>
> On 02/24/2015 04:30 PM, Dmitriy Lyubimov wrote:
>
>> Andrew, perhaps you could commit a patch on top of 1.2 branch? much
>> appreciated.
>>
>> On Tue, Feb 24, 2015 at 1:25 PM, Andrew Palumbo <ap...@outlook.com>
>> wrote:
>>
>>  sorry- I left out the scala-compiler artifact (at the top)  it should
>>> read:
>>>
>>>   <dependency>
>>>    <groupId>org.scala-lang</groupId>
>>>    <artifactId>scala-compiler</artifactId>
>>>    <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>>    <groupId>org.scala-lang</groupId>
>>>    <artifactId>scala-reflect</artifactId>
>>>    <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>>    <groupId>org.scala-lang</groupId>
>>>    <artifactId>scala-library</artifactId>
>>>    <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>>    <groupId>org.scala-lang</groupId>
>>>    <artifactId>scala-actors</artifactId>
>>>    <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>>    <groupId>org.scala-lang</groupId>
>>>    <artifactId>scalap</artifactId>
>>>    <version>${scala.version}</version>
>>> </dependency>
>>>
>>>
>>>
>>> adding these has $mahout spark-shell working for me on spark 1.2.1.
>>>
>>>
>>>
>>>
>>> On 02/24/2015 04:20 PM, Andrew Palumbo wrote:
>>>
>>>  adding in the following dependencies to the math-scala pom.xml seems to
>>>> fix the problem for me on the old master before the revert/1.2 branch:
>>>>
>>>> <dependency>
>>>>    <groupId>org.scala-lang</groupId>
>>>>    <artifactId>scala-reflect</artifactId>
>>>>    <version>${scala.version}</version>
>>>> </dependency>
>>>> <dependency>
>>>>    <groupId>org.scala-lang</groupId>
>>>>    <artifactId>scala-library</artifactId>
>>>>    <version>${scala.version}</version>
>>>> </dependency>
>>>> <dependency>
>>>>    <groupId>org.scala-lang</groupId>
>>>>    <artifactId>scala-actors</artifactId>
>>>>    <version>${scala.version}</version>
>>>> </dependency>
>>>> <dependency>
>>>>    <groupId>org.scala-lang</groupId>
>>>>    <artifactId>scalap</artifactId>
>>>>    <version>${scala.version}</version>
>>>> </dependency>
>>>>
>>>>
>>>> I dont know if this will help with item-similarity, and doesnt seem to
>>>> have anything to do with the crash that Andrew M. is experiencing on the
>>>> reverted spark-1.1.0 master.
>>>>
>>>>
>>>>
>>>> On 02/24/2015 03:59 PM, Andrew Musselman wrote:
>>>>
>>>>  Except after some time it blew up:
>>>>>
>>>>> $ bin/mahout spark-shell
>>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>>> SLF4J: Found binding in
>>>>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-
>>>>> mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/
>>>>> StaticLoggerBinder.class]
>>>>> SLF4J: Found binding in
>>>>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.
>>>>> 10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>> SLF4J: Found binding in
>>>>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/
>>>>> spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/
>>>>> StaticLoggerBinder.class]
>>>>>
>>>>>
>>>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>>> explanation.
>>>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>>>
>>>>>                            _                 _
>>>>>            _ __ ___   __ _| |__   ___  _   _| |_
>>>>>           | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>>           | | | | | | (_| | | | | (_) | |_| | |_
>>>>>           |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>>>
>>>>>
>>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>>> 1.7.0_03)
>>>>> Type in expressions to have them evaluated.
>>>>> Type :help for more information.
>>>>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a
>>>>> loopback
>>>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>>>>> another address
>>>>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
>>>>> library for your platform... using builtin-java classes where
>>>>> applicable
>>>>> Created spark context..
>>>>> Mahout distributed context is available as "implicit val sdc".
>>>>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>>>>> stream
>>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>>> serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>> -7366074099953117729
>>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>>> ObjectStreamClass.java:604)
>>>>>       at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>>> ObjectInputStream.java:1514)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1750)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at
>>>>> java.io.ObjectInputStream.defaultReadFields(
>>>>> ObjectInputStream.java:1964)
>>>>>       at java.io.ObjectInputStream.readSerialData(
>>>>> ObjectInputStream.java:1888)
>>>>>       at
>>>>> java.io.ObjectInputStream.readOrdinaryObject(
>>>>> ObjectInputStream.java:1771)
>>>>>
>>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>>> java:1347)
>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>       at
>>>>> akka.serialization.JavaSerializer$$anonfun$1.
>>>>> apply(Serializer.scala:136)
>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>> scala:57)
>>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>>> Serializer.scala:136)
>>>>>       at
>>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>> Serialization.scala:104)
>>>>>
>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>       at akka.serialization.Serialization.deserialize(
>>>>> Serialization.scala:98)
>>>>>       at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>       at
>>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>>>> scala:55)
>>>>>
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> payload$1(Endpoint.scala:55)
>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>       at
>>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>> Endpoint.scala:764)
>>>>>
>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>       at
>>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>> AbstractDispatcher.scala:386)
>>>>>
>>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>>> ForkJoinTask.java:260)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>> ForkJoinPool.java:1979)
>>>>>       at
>>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>> ForkJoinWorkerThread.java:107)
>>>>>
>>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has
>>>>> been
>>>>> killed. Reason: Master removed our application: FAILED
>>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>>>>> cluster scheduler: Master removed our application: FAILED
>>>>>
>>>>>
>>>>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>>>>> andrew.musselman@gmail.com> wrote:
>>>>>
>>>>>   Bingo, works off master now; thanks Dmitriy.
>>>>>
>>>>>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
>>>>>> >
>>>>>> wrote:
>>>>>>
>>>>>>   ok spark 1.2 is mirrored now.
>>>>>>
>>>>>>> and master should be also ok (back to 1.1)
>>>>>>>
>>>>>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>
>>>>>>>   I reset hard to the previous commit, created a branch, and pushed
>>>>>>> it.
>>>>>>>
>>>>>>>> I got email confirmation like so:
>>>>>>>> Repository: mahout
>>>>>>>> Updated Branches:
>>>>>>>>     refs/heads/spark-1.2 [created] 901ef03b4
>>>>>>>>
>>>>>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <
>>>>>>>> dlieu.7@gmail.com
>>>>>>>> wrote:
>>>>>>>>
>>>>>>>>   yeah ok so you pushed 1.2 branch to asf but it is not yet in
>>>>>>>> github.
>>>>>>>> iti
>>>>>>>> should be there eventually, give it a bit of time.
>>>>>>>>
>>>>>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <
>>>>>>>>> dlieu.7@gmail.com
>>>>>>>>> wrote:
>>>>>>>>>
>>>>>>>>>   what exactly did you try to do?
>>>>>>>>>
>>>>>>>>>> just resetting HEAD will not work on remote branch -- you need
>>>>>>>>>>
>>>>>>>>>>  force-sync
>>>>>>>>> that (with +) since it is a history-rewriting push, but asf git
>>>>>>>>> does
>>>>>>>>> not
>>>>>>>>> allow that.
>>>>>>>>>
>>>>>>>>>> ASF will mirror ALL branches afaik. I think i've done it before.
>>>>>>>>>> so
>>>>>>>>>>
>>>>>>>>>>  if
>>>>>>>>>
>>>>>>>> you
>>>>>>>>
>>>>>>>>> create a new one it should (eventually) get there.
>>>>>>>>>>
>>>>>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>>>>
>>>>>>>>>>   Does ASF git get mirrored to GitHub?  I tried pushing a branch
>>>>>>>>>> and
>>>>>>>>>> don't
>>>>>>>>>>
>>>>>>>>> see it there yet.
>>>>>>>>>
>>>>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>>>>>>>>>>
>>>>>>>>>>>  dlieu.7@gmail.com
>>>>>>>>>>
>>>>>>>>> wrote:
>>>>>>>>
>>>>>>>>>   On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>>>>>>>>>> pat@occamsmachete.com
>>>>>>>>>>>
>>>>>>>>>> wrote:
>>>>>>>>
>>>>>>>>>   to be safe I’d “git reset —hard xyz” to the commit previous to
>>>>>>>>>>>> the
>>>>>>>>>>>>
>>>>>>>>>>> 1.2.1
>>>>>>>>
>>>>>>>>> As i just explained, that resets are not possible with ASF git.
>>>>>>>>>>>>
>>>>>>>>>>>>  Reverting
>>>>>>>>>>>
>>>>>>>>>>>  is the only option.
>>>>>>>>>>>>
>>>>>>>>>>>> -d
>>>>>>>>>>>>
>>>>>>>>>>>>
>>>>>>>>>>>>
>>>>
>

Re: Spark shell broken

Posted by Andrew Palumbo <ap...@outlook.com>.
sure- just looking at it now.  I haven't run any tests on it. just 
running a script locally to make sure it doesn't crash. Should I do a PR 
on the 1.2 branch or just push it?


On 02/24/2015 04:30 PM, Dmitriy Lyubimov wrote:
> Andrew, perhaps you could commit a patch on top of 1.2 branch? much
> appreciated.
>
> On Tue, Feb 24, 2015 at 1:25 PM, Andrew Palumbo <ap...@outlook.com> wrote:
>
>> sorry- I left out the scala-compiler artifact (at the top)  it should read:
>>
>>   <dependency>
>>    <groupId>org.scala-lang</groupId>
>>    <artifactId>scala-compiler</artifactId>
>>    <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>    <groupId>org.scala-lang</groupId>
>>    <artifactId>scala-reflect</artifactId>
>>    <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>    <groupId>org.scala-lang</groupId>
>>    <artifactId>scala-library</artifactId>
>>    <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>    <groupId>org.scala-lang</groupId>
>>    <artifactId>scala-actors</artifactId>
>>    <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>    <groupId>org.scala-lang</groupId>
>>    <artifactId>scalap</artifactId>
>>    <version>${scala.version}</version>
>> </dependency>
>>
>>
>>
>> adding these has $mahout spark-shell working for me on spark 1.2.1.
>>
>>
>>
>>
>> On 02/24/2015 04:20 PM, Andrew Palumbo wrote:
>>
>>> adding in the following dependencies to the math-scala pom.xml seems to
>>> fix the problem for me on the old master before the revert/1.2 branch:
>>>
>>> <dependency>
>>>    <groupId>org.scala-lang</groupId>
>>>    <artifactId>scala-reflect</artifactId>
>>>    <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>>    <groupId>org.scala-lang</groupId>
>>>    <artifactId>scala-library</artifactId>
>>>    <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>>    <groupId>org.scala-lang</groupId>
>>>    <artifactId>scala-actors</artifactId>
>>>    <version>${scala.version}</version>
>>> </dependency>
>>> <dependency>
>>>    <groupId>org.scala-lang</groupId>
>>>    <artifactId>scalap</artifactId>
>>>    <version>${scala.version}</version>
>>> </dependency>
>>>
>>>
>>> I dont know if this will help with item-similarity, and doesnt seem to
>>> have anything to do with the crash that Andrew M. is experiencing on the
>>> reverted spark-1.1.0 master.
>>>
>>>
>>>
>>> On 02/24/2015 03:59 PM, Andrew Musselman wrote:
>>>
>>>> Except after some time it blew up:
>>>>
>>>> $ bin/mahout spark-shell
>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>> SLF4J: Found binding in
>>>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-
>>>> mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>> SLF4J: Found binding in
>>>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.
>>>> 10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>> SLF4J: Found binding in
>>>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/
>>>> spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>
>>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>> explanation.
>>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>>
>>>>                            _                 _
>>>>            _ __ ___   __ _| |__   ___  _   _| |_
>>>>           | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>           | | | | | | (_| | | | | (_) | |_| | |_
>>>>           |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>>
>>>>
>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>> 1.7.0_03)
>>>> Type in expressions to have them evaluated.
>>>> Type :help for more information.
>>>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a
>>>> loopback
>>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>>>> another address
>>>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
>>>> library for your platform... using builtin-java classes where applicable
>>>> Created spark context..
>>>> Mahout distributed context is available as "implicit val sdc".
>>>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>>>> stream
>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>> serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.
>>>> BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>>       at java.io.ObjectStreamClass.initNonProxy(
>>>> ObjectStreamClass.java:604)
>>>>       at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>       at java.io.ObjectInputStream.readClassDesc(
>>>> ObjectInputStream.java:1514)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>       at java.io.ObjectInputStream.readSerialData(
>>>> ObjectInputStream.java:1888)
>>>>       at
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>       at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>>> java:1347)
>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>       at
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>       at akka.serialization.JavaSerializer.fromBinary(
>>>> Serializer.scala:136)
>>>>       at
>>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>       at akka.serialization.Serialization.deserialize(
>>>> Serialization.scala:98)
>>>>       at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>       at
>>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>       at akka.remote.DefaultMessageDispatcher.
>>>> payload$1(Endpoint.scala:55)
>>>>       at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>       at
>>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>       at
>>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>       at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>>> ForkJoinTask.java:260)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>       at
>>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
>>>> killed. Reason: Master removed our application: FAILED
>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>>>> cluster scheduler: Master removed our application: FAILED
>>>>
>>>>
>>>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>>>> andrew.musselman@gmail.com> wrote:
>>>>
>>>>   Bingo, works off master now; thanks Dmitriy.
>>>>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>> wrote:
>>>>>
>>>>>   ok spark 1.2 is mirrored now.
>>>>>> and master should be also ok (back to 1.1)
>>>>>>
>>>>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>
>>>>>>   I reset hard to the previous commit, created a branch, and pushed it.
>>>>>>> I got email confirmation like so:
>>>>>>> Repository: mahout
>>>>>>> Updated Branches:
>>>>>>>     refs/heads/spark-1.2 [created] 901ef03b4
>>>>>>>
>>>>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
>>>>>>> wrote:
>>>>>>>
>>>>>>>   yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
>>>>>>> iti
>>>>>>> should be there eventually, give it a bit of time.
>>>>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <
>>>>>>>> dlieu.7@gmail.com
>>>>>>>> wrote:
>>>>>>>>
>>>>>>>>   what exactly did you try to do?
>>>>>>>>> just resetting HEAD will not work on remote branch -- you need
>>>>>>>>>
>>>>>>>> force-sync
>>>>>>>> that (with +) since it is a history-rewriting push, but asf git does
>>>>>>>> not
>>>>>>>> allow that.
>>>>>>>>> ASF will mirror ALL branches afaik. I think i've done it before. so
>>>>>>>>>
>>>>>>>> if
>>>>>>> you
>>>>>>>>> create a new one it should (eventually) get there.
>>>>>>>>>
>>>>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>>>
>>>>>>>>>   Does ASF git get mirrored to GitHub?  I tried pushing a branch and
>>>>>>>>> don't
>>>>>>>> see it there yet.
>>>>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>>>>>>>>>
>>>>>>>>> dlieu.7@gmail.com
>>>>>>> wrote:
>>>>>>>>>>   On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>>>>>>>>> pat@occamsmachete.com
>>>>>>> wrote:
>>>>>>>>>>>   to be safe I’d “git reset —hard xyz” to the commit previous to
>>>>>>>>>>> the
>>>>>>> 1.2.1
>>>>>>>>>>> As i just explained, that resets are not possible with ASF git.
>>>>>>>>>>>
>>>>>>>>>> Reverting
>>>>>>>>>>
>>>>>>>>>>> is the only option.
>>>>>>>>>>>
>>>>>>>>>>> -d
>>>>>>>>>>>
>>>>>>>>>>>
>>>


Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
Andrew, perhaps you could commit a patch on top of 1.2 branch? much
appreciated.

On Tue, Feb 24, 2015 at 1:25 PM, Andrew Palumbo <ap...@outlook.com> wrote:

> sorry- I left out the scala-compiler artifact (at the top)  it should read:
>
>  <dependency>
>   <groupId>org.scala-lang</groupId>
>   <artifactId>scala-compiler</artifactId>
>   <version>${scala.version}</version>
> </dependency>
> <dependency>
>   <groupId>org.scala-lang</groupId>
>   <artifactId>scala-reflect</artifactId>
>   <version>${scala.version}</version>
> </dependency>
> <dependency>
>   <groupId>org.scala-lang</groupId>
>   <artifactId>scala-library</artifactId>
>   <version>${scala.version}</version>
> </dependency>
> <dependency>
>   <groupId>org.scala-lang</groupId>
>   <artifactId>scala-actors</artifactId>
>   <version>${scala.version}</version>
> </dependency>
> <dependency>
>   <groupId>org.scala-lang</groupId>
>   <artifactId>scalap</artifactId>
>   <version>${scala.version}</version>
> </dependency>
>
>
>
> adding these has $mahout spark-shell working for me on spark 1.2.1.
>
>
>
>
> On 02/24/2015 04:20 PM, Andrew Palumbo wrote:
>
>> adding in the following dependencies to the math-scala pom.xml seems to
>> fix the problem for me on the old master before the revert/1.2 branch:
>>
>> <dependency>
>>   <groupId>org.scala-lang</groupId>
>>   <artifactId>scala-reflect</artifactId>
>>   <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>   <groupId>org.scala-lang</groupId>
>>   <artifactId>scala-library</artifactId>
>>   <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>   <groupId>org.scala-lang</groupId>
>>   <artifactId>scala-actors</artifactId>
>>   <version>${scala.version}</version>
>> </dependency>
>> <dependency>
>>   <groupId>org.scala-lang</groupId>
>>   <artifactId>scalap</artifactId>
>>   <version>${scala.version}</version>
>> </dependency>
>>
>>
>> I dont know if this will help with item-similarity, and doesnt seem to
>> have anything to do with the crash that Andrew M. is experiencing on the
>> reverted spark-1.1.0 master.
>>
>>
>>
>> On 02/24/2015 03:59 PM, Andrew Musselman wrote:
>>
>>> Except after some time it blew up:
>>>
>>> $ bin/mahout spark-shell
>>> SLF4J: Class path contains multiple SLF4J bindings.
>>> SLF4J: Found binding in
>>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-
>>> mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>> SLF4J: Found binding in
>>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.
>>> 10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>> SLF4J: Found binding in
>>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/
>>> spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>
>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>> explanation.
>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>
>>>                           _                 _
>>>           _ __ ___   __ _| |__   ___  _   _| |_
>>>          | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>          | | | | | | (_| | | | | (_) | |_| | |_
>>>          |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>
>>>
>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>> 1.7.0_03)
>>> Type in expressions to have them evaluated.
>>> Type :help for more information.
>>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a
>>> loopback
>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>>> another address
>>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
>>> library for your platform... using builtin-java classes where applicable
>>> Created spark context..
>>> Mahout distributed context is available as "implicit val sdc".
>>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>>> stream
>>> classdesc serialVersionUID = 1677335532749418220, local class
>>> serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.
>>> BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>> local class incompatible: stream classdesc serialVersionUID =
>>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>>      at java.io.ObjectStreamClass.initNonProxy(
>>> ObjectStreamClass.java:604)
>>>      at
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>      at java.io.ObjectInputStream.readClassDesc(
>>> ObjectInputStream.java:1514)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>      at java.io.ObjectInputStream.readSerialData(
>>> ObjectInputStream.java:1888)
>>>      at
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>
>>>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.
>>> java:1347)
>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>      at
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>      at akka.serialization.JavaSerializer.fromBinary(
>>> Serializer.scala:136)
>>>      at
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>
>>>      at scala.util.Try$.apply(Try.scala:161)
>>>      at akka.serialization.Serialization.deserialize(
>>> Serialization.scala:98)
>>>      at
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>
>>>      at akka.remote.DefaultMessageDispatcher.
>>> payload$1(Endpoint.scala:55)
>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>      at
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>
>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>      at
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>
>>>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(
>>> ForkJoinTask.java:260)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>      at
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
>>> removed): remote Akka client disassociated
>>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
>>> killed. Reason: Master removed our application: FAILED
>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>>> cluster scheduler: Master removed our application: FAILED
>>>
>>>
>>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>>> andrew.musselman@gmail.com> wrote:
>>>
>>>  Bingo, works off master now; thanks Dmitriy.
>>>>
>>>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>> wrote:
>>>>
>>>>  ok spark 1.2 is mirrored now.
>>>>> and master should be also ok (back to 1.1)
>>>>>
>>>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>>>> andrew.musselman@gmail.com> wrote:
>>>>>
>>>>>  I reset hard to the previous commit, created a branch, and pushed it.
>>>>>>
>>>>>> I got email confirmation like so:
>>>>>> Repository: mahout
>>>>>> Updated Branches:
>>>>>>    refs/heads/spark-1.2 [created] 901ef03b4
>>>>>>
>>>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
>>>>>> >
>>>>>> wrote:
>>>>>>
>>>>>>  yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
>>>>>>>
>>>>>> iti
>>>>>
>>>>>> should be there eventually, give it a bit of time.
>>>>>>>
>>>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <
>>>>>>> dlieu.7@gmail.com
>>>>>>> wrote:
>>>>>>>
>>>>>>>  what exactly did you try to do?
>>>>>>>>
>>>>>>>> just resetting HEAD will not work on remote branch -- you need
>>>>>>>>
>>>>>>> force-sync
>>>>>>
>>>>>>> that (with +) since it is a history-rewriting push, but asf git does
>>>>>>>>
>>>>>>> not
>>>>>>
>>>>>>> allow that.
>>>>>>>>
>>>>>>>> ASF will mirror ALL branches afaik. I think i've done it before. so
>>>>>>>>
>>>>>>> if
>>>>>
>>>>>> you
>>>>>>>
>>>>>>>> create a new one it should (eventually) get there.
>>>>>>>>
>>>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>>
>>>>>>>>  Does ASF git get mirrored to GitHub?  I tried pushing a branch and
>>>>>>>>>
>>>>>>>> don't
>>>>>>
>>>>>>> see it there yet.
>>>>>>>>>
>>>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>>>>>>>>
>>>>>>>> dlieu.7@gmail.com
>>>>>
>>>>>> wrote:
>>>>>>>>>
>>>>>>>>>  On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>>>>>>>>>
>>>>>>>>> pat@occamsmachete.com
>>>>>
>>>>>> wrote:
>>>>>>>>>>
>>>>>>>>>>  to be safe I’d “git reset —hard xyz” to the commit previous to
>>>>>>>>>>>
>>>>>>>>>> the
>>>>>
>>>>>> 1.2.1
>>>>>>>>>
>>>>>>>>>> As i just explained, that resets are not possible with ASF git.
>>>>>>>>>>
>>>>>>>>> Reverting
>>>>>>>>>
>>>>>>>>>> is the only option.
>>>>>>>>>>
>>>>>>>>>> -d
>>>>>>>>>>
>>>>>>>>>>
>>>>>>>>
>>>>
>>
>>
>

Re: Spark shell broken

Posted by Andrew Palumbo <ap...@outlook.com>.
sorry- I left out the scala-compiler artifact (at the top)  it should read:

  <dependency>
   <groupId>org.scala-lang</groupId>
   <artifactId>scala-compiler</artifactId>
   <version>${scala.version}</version>
</dependency>
<dependency>
   <groupId>org.scala-lang</groupId>
   <artifactId>scala-reflect</artifactId>
   <version>${scala.version}</version>
</dependency>
<dependency>
   <groupId>org.scala-lang</groupId>
   <artifactId>scala-library</artifactId>
   <version>${scala.version}</version>
</dependency>
<dependency>
   <groupId>org.scala-lang</groupId>
   <artifactId>scala-actors</artifactId>
   <version>${scala.version}</version>
</dependency>
<dependency>
   <groupId>org.scala-lang</groupId>
   <artifactId>scalap</artifactId>
   <version>${scala.version}</version>
</dependency>



adding these has $mahout spark-shell working for me on spark 1.2.1.



On 02/24/2015 04:20 PM, Andrew Palumbo wrote:
> adding in the following dependencies to the math-scala pom.xml seems 
> to fix the problem for me on the old master before the revert/1.2 branch:
>
> <dependency>
>   <groupId>org.scala-lang</groupId>
>   <artifactId>scala-reflect</artifactId>
>   <version>${scala.version}</version>
> </dependency>
> <dependency>
>   <groupId>org.scala-lang</groupId>
>   <artifactId>scala-library</artifactId>
>   <version>${scala.version}</version>
> </dependency>
> <dependency>
>   <groupId>org.scala-lang</groupId>
>   <artifactId>scala-actors</artifactId>
>   <version>${scala.version}</version>
> </dependency>
> <dependency>
>   <groupId>org.scala-lang</groupId>
>   <artifactId>scalap</artifactId>
>   <version>${scala.version}</version>
> </dependency>
>
>
> I dont know if this will help with item-similarity, and doesnt seem to 
> have anything to do with the crash that Andrew M. is experiencing on 
> the reverted spark-1.1.0 master.
>
>
>
> On 02/24/2015 03:59 PM, Andrew Musselman wrote:
>> Except after some time it blew up:
>>
>> $ bin/mahout spark-shell
>> SLF4J: Class path contains multiple SLF4J bindings.
>> SLF4J: Found binding in
>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
>>
>> SLF4J: Found binding in
>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
>>
>> SLF4J: Found binding in
>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
>>
>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>> explanation.
>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>
>>                           _                 _
>>           _ __ ___   __ _| |__   ___  _   _| |_
>>          | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>          | | | | | | (_| | | | | (_) | |_| | |_
>>          |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>
>>
>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>> 1.7.0_03)
>> Type in expressions to have them evaluated.
>> Type :help for more information.
>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a 
>> loopback
>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>> another address
>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
>> library for your platform... using builtin-java classes where applicable
>> Created spark context..
>> Mahout distributed context is available as "implicit val sdc".
>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId; local class incompatible: 
>> stream
>> classdesc serialVersionUID = 1677335532749418220, local class
>> serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:24:50 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:26:00 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:26:33 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:27:43 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:28:16 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:29:26 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:29:59 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:31:09 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:31:42 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:32:51 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:33:24 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:34:34 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:35:07 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:36:17 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:36:50 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:38:00 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:38:33 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:39:43 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:40:16 ERROR Remoting: 
>> org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>      at 
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>      at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>      at 
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>      at 
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>      at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>
>>      at 
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>      at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>      at 
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>      at
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>
>>      at scala.util.Try$.apply(Try.scala:161)
>>      at 
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>      at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>      at
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>
>>      at 
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>      at
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>
>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>      at
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>
>>      at 
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>
>>      at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>      at
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>
>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has 
>> been
>> killed. Reason: Master removed our application: FAILED
>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>> cluster scheduler: Master removed our application: FAILED
>>
>>
>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>> andrew.musselman@gmail.com> wrote:
>>
>>> Bingo, works off master now; thanks Dmitriy.
>>>
>>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>> wrote:
>>>
>>>> ok spark 1.2 is mirrored now.
>>>> and master should be also ok (back to 1.1)
>>>>
>>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>>> andrew.musselman@gmail.com> wrote:
>>>>
>>>>> I reset hard to the previous commit, created a branch, and pushed it.
>>>>>
>>>>> I got email confirmation like so:
>>>>> Repository: mahout
>>>>> Updated Branches:
>>>>>    refs/heads/spark-1.2 [created] 901ef03b4
>>>>>
>>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov 
>>>>> <dl...@gmail.com>
>>>>> wrote:
>>>>>
>>>>>> yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
>>>> iti
>>>>>> should be there eventually, give it a bit of time.
>>>>>>
>>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov 
>>>>>> <dlieu.7@gmail.com
>>>>>> wrote:
>>>>>>
>>>>>>> what exactly did you try to do?
>>>>>>>
>>>>>>> just resetting HEAD will not work on remote branch -- you need
>>>>> force-sync
>>>>>>> that (with +) since it is a history-rewriting push, but asf git 
>>>>>>> does
>>>>> not
>>>>>>> allow that.
>>>>>>>
>>>>>>> ASF will mirror ALL branches afaik. I think i've done it before. so
>>>> if
>>>>>> you
>>>>>>> create a new one it should (eventually) get there.
>>>>>>>
>>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>
>>>>>>>> Does ASF git get mirrored to GitHub?  I tried pushing a branch and
>>>>> don't
>>>>>>>> see it there yet.
>>>>>>>>
>>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>>> dlieu.7@gmail.com
>>>>>>>> wrote:
>>>>>>>>
>>>>>>>>> On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>>> pat@occamsmachete.com
>>>>>>>>> wrote:
>>>>>>>>>
>>>>>>>>>> to be safe I’d “git reset —hard xyz” to the commit previous to
>>>> the
>>>>>>>> 1.2.1
>>>>>>>>> As i just explained, that resets are not possible with ASF git.
>>>>>>>> Reverting
>>>>>>>>> is the only option.
>>>>>>>>>
>>>>>>>>> -d
>>>>>>>>>
>>>>>>>
>>>
>
>


Re: Spark shell broken

Posted by Andrew Palumbo <ap...@outlook.com>.
adding in the following dependencies to the  math-scala pom.xml seems to 
fix the problem for me on the old master before the revert/1.2 branch:

<dependency>
   <groupId>org.scala-lang</groupId>
   <artifactId>scala-reflect</artifactId>
   <version>${scala.version}</version>
</dependency>
<dependency>
   <groupId>org.scala-lang</groupId>
   <artifactId>scala-library</artifactId>
   <version>${scala.version}</version>
</dependency>
<dependency>
   <groupId>org.scala-lang</groupId>
   <artifactId>scala-actors</artifactId>
   <version>${scala.version}</version>
</dependency>
<dependency>
   <groupId>org.scala-lang</groupId>
   <artifactId>scalap</artifactId>
   <version>${scala.version}</version>
</dependency>


I dont know if this will help with item-similarity, and doesnt seem to 
have anything to do with the crash that Andrew M. is experiencing on the 
reverted spark-1.1.0 master.



On 02/24/2015 03:59 PM, Andrew Musselman wrote:
> Except after some time it blew up:
>
> $ bin/mahout spark-shell
> SLF4J: Class path contains multiple SLF4J bindings.
> SLF4J: Found binding in
> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: Found binding in
> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: Found binding in
> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> explanation.
> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>
>                           _                 _
>           _ __ ___   __ _| |__   ___  _   _| |_
>          | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>          | | | | | | (_| | | | | (_) | |_| | |_
>          |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>
>
> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
> 1.7.0_03)
> Type in expressions to have them evaluated.
> Type :help for more information.
> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a loopback
> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
> another address
> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
> library for your platform... using builtin-java classes where applicable
> Created spark context..
> Mahout distributed context is available as "implicit val sdc".
> mahout> 15/02/24 12:24:17 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId; local class incompatible: stream
> classdesc serialVersionUID = 1677335532749418220, local class
> serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
> removed): remote Akka client disassociated
> 15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
> removed): remote Akka client disassociated
> 15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
> removed): remote Akka client disassociated
> 15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
> removed): remote Akka client disassociated
> 15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
> removed): remote Akka client disassociated
> 15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
> removed): remote Akka client disassociated
> 15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
> removed): remote Akka client disassociated
> 15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
> removed): remote Akka client disassociated
> 15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
> removed): remote Akka client disassociated
> 15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>      at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>      at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>      at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>      at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>      at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>      at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>      at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>      at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>      at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>      at scala.util.Try$.apply(Try.scala:161)
>      at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>      at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>      at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>      at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>      at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>      at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>      at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>      at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>      at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
> removed): remote Akka client disassociated
> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
> killed. Reason: Master removed our application: FAILED
> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
> cluster scheduler: Master removed our application: FAILED
>
>
> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
> andrew.musselman@gmail.com> wrote:
>
>> Bingo, works off master now; thanks Dmitriy.
>>
>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
>> wrote:
>>
>>> ok spark 1.2 is mirrored now.
>>> and master should be also ok (back to 1.1)
>>>
>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>> andrew.musselman@gmail.com> wrote:
>>>
>>>> I reset hard to the previous commit, created a branch, and pushed it.
>>>>
>>>> I got email confirmation like so:
>>>> Repository: mahout
>>>> Updated Branches:
>>>>    refs/heads/spark-1.2 [created] 901ef03b4
>>>>
>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>>> wrote:
>>>>
>>>>> yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
>>> iti
>>>>> should be there eventually, give it a bit of time.
>>>>>
>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
>>>>> wrote:
>>>>>
>>>>>> what exactly did you try to do?
>>>>>>
>>>>>> just resetting HEAD will not work on remote branch -- you need
>>>> force-sync
>>>>>> that (with +) since it is a history-rewriting push, but asf git does
>>>> not
>>>>>> allow that.
>>>>>>
>>>>>> ASF will mirror ALL branches afaik. I think i've done it before. so
>>> if
>>>>> you
>>>>>> create a new one it should (eventually) get there.
>>>>>>
>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>
>>>>>>> Does ASF git get mirrored to GitHub?  I tried pushing a branch and
>>>> don't
>>>>>>> see it there yet.
>>>>>>>
>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>> dlieu.7@gmail.com
>>>>>>> wrote:
>>>>>>>
>>>>>>>> On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>> pat@occamsmachete.com
>>>>>>>> wrote:
>>>>>>>>
>>>>>>>>> to be safe I’d “git reset —hard xyz” to the commit previous to
>>> the
>>>>>>> 1.2.1
>>>>>>>> As i just explained, that resets are not possible with ASF git.
>>>>>>> Reverting
>>>>>>>> is the only option.
>>>>>>>>
>>>>>>>> -d
>>>>>>>>
>>>>>>
>>


Re: Spark shell broken

Posted by Pat Ferrel <pa...@occamsmachete.com>.
@ap, I think that fixes the 1.2.1 build for me too. Going to test on my cluster and if jobs + shell work ok I’ll push it to the master.


On Feb 24, 2015, at 2:59 PM, Andrew Palumbo <ap...@outlook.com> wrote:

ok finally pushed it to the spark-1.2 branch. the shell on 1.2 should be working now also.

On 02/24/2015 05:45 PM, Andrew Musselman wrote:
> Using Spark v1.1.0 cleared up those errors.
> 
> On Tue, Feb 24, 2015 at 2:42 PM, Andrew Palumbo <ap...@outlook.com> wrote:
> 
>> yeah- i was working off of an old branch and couldn't figure out how to
>> get the mahout/spark-1.2 branch. I got it.  just double checking everything
>> and will push it shortly.
>> 
>> 
>> On 02/24/2015 05:32 PM, Dmitriy Lyubimov wrote:
>> 
>>> all i am saying if spark side tests work imo there's no need to go thru PR
>>> process, you can push your squashed patch directly, but if you feel like
>>> to
>>> do PR please go ahead.
>>> 
>>> On Tue, Feb 24, 2015 at 2:30 PM, Andrew Palumbo <ap...@outlook.com>
>>> wrote:
>>> 
>>>  or rather i have git@github.com:andrewpalumbo/mahout.git as my origin
>>>> and
>>>> there's no spark-1.2 branch there
>>>> 
>>>> On 02/24/2015 05:25 PM, Andrew Palumbo wrote:
>>>> 
>>>>  sorry- trying to get the shell fix out but having trouble pulling the
>>>>> remote spark 1.2 branch. i think its cause i have git.apache.org as my
>>>>> remote rather than github.com/apache/mahout
>>>>> 
>>>>> 
>>>>> On 02/24/2015 05:15 PM, Andrew Musselman wrote:
>>>>> 
>>>>>  Makes sense; I'm still getting those errors after restarting my rebuilt
>>>>>> spark..
>>>>>> 
>>>>>> On Tue, Feb 24, 2015 at 2:12 PM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>>> wrote:
>>>>>> 
>>>>>>   IIRC MAHOUT_LOCAL doesn't mean a thing with spark mode. It is purely
>>>>>> MR
>>>>>> 
>>>>>>> thing.
>>>>>>> 
>>>>>>> with shell, important things are MASTER (spark setting) and also other
>>>>>>> spark specific settings  that should be added in MAHOUT_OPT (but the
>>>>>>> latter
>>>>>>> is broken in public branch with spark shell -- one of the bugs that
>>>>>>> need
>>>>>>> fixing).
>>>>>>> 
>>>>>>> On Tue, Feb 24, 2015 at 2:03 PM, Andrew Musselman <
>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>> 
>>>>>>>   Ah, I had an old build of spark server running.
>>>>>>> 
>>>>>>>> On Tue, Feb 24, 2015 at 1:56 PM, Andrew Musselman <
>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>> 
>>>>>>>>   I only have one spark build that I know of, and it's still
>>>>>>>> reporting
>>>>>>>> 
>>>>>>>>> errors in Mahout local mode.
>>>>>>>>> 
>>>>>>>>> $ echo $SPARK_HOME
>>>>>>>>> /home/akm/spark
>>>>>>>>> 
>>>>>>>>> $ export MAHOUT_LOCAL="TRUE"
>>>>>>>>> $ bin/mahout spark-shell
>>>>>>>>> MAHOUT_LOCAL is set, so we don't add HADOOP_CONF_DIR to classpath.
>>>>>>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>>>>>>> SLF4J: Found binding in
>>>>>>>>> 
>>>>>>>>>   [jar:file:/home/akm/mahout/mrlegacy/target/mahout-
>>>>>>>>> 
>>>>>>>> mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/
>>>>>>> StaticLoggerBinder.class]
>>>>>>> 
>>>>>>> 
>>>>>>>  SLF4J: Found binding in
>>>>>>>>>   [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.
>>>>>>>>> 
>>>>>>>> 10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>>>>  SLF4J: Found binding in
>>>>>>>>>   [jar:file:/home/akm/spark/assembly/target/scala-2.10/
>>>>>>>>> 
>>>>>>>> spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/
>>>>>>> StaticLoggerBinder.class]
>>>>>>> 
>>>>>>> 
>>>>>>>  SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>>>>>>> explanation.
>>>>>>>>> SLF4J: Actual binding is of type [org.slf4j.impl.
>>>>>>>>> Log4jLoggerFactory]
>>>>>>>>> 
>>>>>>>>>                            _                 _
>>>>>>>>>            _ __ ___   __ _| |__   ___  _   _| |_
>>>>>>>>>           | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>>>>>>           | | | | | | (_| | | | | (_) | |_| | |_
>>>>>>>>>           |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>>>>>>> 
>>>>>>>>> 
>>>>>>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>>>>>>> 1.7.0_03)
>>>>>>>>> Type in expressions to have them evaluated.
>>>>>>>>> Type :help for more information.
>>>>>>>>> 15/02/24 13:42:42 WARN Utils: Your hostname, ubuntu resolves to a
>>>>>>>>> 
>>>>>>>>>  loopback
>>>>>>>>  address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>>>>>>> 15/02/24 13:42:42 WARN Utils: Set SPARK_LOCAL_IP if you need to bind
>>>>>>>>> to
>>>>>>>>> another address
>>>>>>>>> 15/02/24 13:42:54 WARN NativeCodeLoader: Unable to load
>>>>>>>>> native-hadoop
>>>>>>>>> library for your platform... using builtin-java classes where
>>>>>>>>> 
>>>>>>>>>  applicable
>>>>>>>> Created spark context..
>>>>>>>> 
>>>>>>>>> Mahout distributed context is available as "implicit val sdc".
>>>>>>>>> mahout> 15/02/24 13:43:32 ERROR Remoting:
>>>>>>>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>>>>>>>>> 
>>>>>>>>>  stream
>>>>>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>>>>>> 
>>>>>>>>> serialVersionUID = -7366074099953117729
>>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>>       at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>> 
>>>>>>>>> java:369)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>> 
>>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>> 
>>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>> 
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>> 
>>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>> 
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>> 
>>>>>>>  15/02/24 13:44:05 ERROR Remoting:
>>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>> 
>>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>> 
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>>       at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>> 
>>>>>>>>> java:369)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>> 
>>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>> 
>>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>> 
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>> 
>>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>> 
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>> 
>>>>>>>  15/02/24 13:44:38 ERROR TaskSchedulerImpl: Lost an executor 0
>>>>>>>> (already
>>>>>>>> 
>>>>>>>>> removed): remote Akka client disassociated
>>>>>>>>> 15/02/24 13:45:15 ERROR Remoting:
>>>>>>>>> 
>>>>>>>>>  org.apache.spark.storage.BlockManagerId;
>>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>> 
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>>       at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>> 
>>>>>>>>> java:369)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>> 
>>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>> 
>>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>> 
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>> 
>>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>> 
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>> 
>>>>>>>  15/02/24 13:45:48 ERROR Remoting:
>>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>> 
>>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>> 
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>>       at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>> 
>>>>>>>>> java:369)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>> 
>>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>> 
>>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>> 
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>> 
>>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>> 
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>> 
>>>>>>>  15/02/24 13:46:21 ERROR TaskSchedulerImpl: Lost an executor 1
>>>>>>>> (already
>>>>>>>> 
>>>>>>>>> removed): remote Akka client disassociated
>>>>>>>>> 15/02/24 13:46:57 ERROR Remoting:
>>>>>>>>> 
>>>>>>>>>  org.apache.spark.storage.BlockManagerId;
>>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>> 
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>>       at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>> 
>>>>>>>>> java:369)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>> 
>>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>> 
>>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>> 
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>> 
>>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>> 
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>> 
>>>>>>>  15/02/24 13:47:30 ERROR Remoting:
>>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>> 
>>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>> 
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>>       at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>> 
>>>>>>>>> java:369)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>> 
>>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>> 
>>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>> 
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>> 
>>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>> 
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>> 
>>>>>>>  15/02/24 13:48:04 ERROR TaskSchedulerImpl: Lost an executor 2
>>>>>>>> (already
>>>>>>>> 
>>>>>>>>> removed): remote Akka client disassociated
>>>>>>>>> 15/02/24 13:48:40 ERROR Remoting:
>>>>>>>>> 
>>>>>>>>>  org.apache.spark.storage.BlockManagerId;
>>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>> 
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>>       at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>> 
>>>>>>>>> java:369)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>> 
>>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>> 
>>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>> 
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>> 
>>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>> 
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>> 
>>>>>>>  15/02/24 13:49:14 ERROR Remoting:
>>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>> 
>>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>> 
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>>       at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>> 
>>>>>>>>> java:369)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>> 
>>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>> 
>>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>> 
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>> 
>>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>> 
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>> 
>>>>>>>  15/02/24 13:49:47 ERROR TaskSchedulerImpl: Lost an executor 3
>>>>>>>> (already
>>>>>>>> 
>>>>>>>>> removed): remote Akka client disassociated
>>>>>>>>> 15/02/24 13:50:23 ERROR Remoting:
>>>>>>>>> 
>>>>>>>>>  org.apache.spark.storage.BlockManagerId;
>>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>> 
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>>       at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>> 
>>>>>>>>> java:369)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>> 
>>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>> 
>>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>> 
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>> 
>>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>> 
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>> 
>>>>>>>  15/02/24 13:50:56 ERROR Remoting:
>>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>> 
>>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>> 
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>>       at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>> 
>>>>>>>>> java:369)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>> 
>>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>> 
>>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>> 
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>> 
>>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>> 
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>> 
>>>>>>>  15/02/24 13:51:30 ERROR TaskSchedulerImpl: Lost an executor 4
>>>>>>>> (already
>>>>>>>> 
>>>>>>>>> removed): remote Akka client disassociated
>>>>>>>>> 15/02/24 13:52:07 ERROR Remoting:
>>>>>>>>> 
>>>>>>>>>  org.apache.spark.storage.BlockManagerId;
>>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>> 
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>>       at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>> 
>>>>>>>>> java:369)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>> 
>>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>> 
>>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>> 
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>> 
>>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>> 
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>> 
>>>>>>>  15/02/24 13:52:40 ERROR Remoting:
>>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>> 
>>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>> 
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>> 
>>>>>>>>>  -7366074099953117729
>>>>>>>>       at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>> 
>>>>>>>>>       at
>>>>>>>>> 
>>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>> 
>>>>>>>>        at
>>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>> 
>>>>>>>        at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>       at
>>>>>>>> 
>>>>>>>> 
>>>>>>>> 



Re: Spark shell broken

Posted by Andrew Palumbo <ap...@outlook.com>.
ok finally pushed it to the spark-1.2 branch. the shell on 1.2 should be 
working now also.

On 02/24/2015 05:45 PM, Andrew Musselman wrote:
> Using Spark v1.1.0 cleared up those errors.
>
> On Tue, Feb 24, 2015 at 2:42 PM, Andrew Palumbo <ap...@outlook.com> wrote:
>
>> yeah- i was working off of an old branch and couldn't figure out how to
>> get the mahout/spark-1.2 branch. I got it.  just double checking everything
>> and will push it shortly.
>>
>>
>> On 02/24/2015 05:32 PM, Dmitriy Lyubimov wrote:
>>
>>> all i am saying if spark side tests work imo there's no need to go thru PR
>>> process, you can push your squashed patch directly, but if you feel like
>>> to
>>> do PR please go ahead.
>>>
>>> On Tue, Feb 24, 2015 at 2:30 PM, Andrew Palumbo <ap...@outlook.com>
>>> wrote:
>>>
>>>   or rather i have git@github.com:andrewpalumbo/mahout.git as my origin
>>>> and
>>>> there's no spark-1.2 branch there
>>>>
>>>> On 02/24/2015 05:25 PM, Andrew Palumbo wrote:
>>>>
>>>>   sorry- trying to get the shell fix out but having trouble pulling the
>>>>> remote spark 1.2 branch. i think its cause i have git.apache.org as my
>>>>> remote rather than github.com/apache/mahout
>>>>>
>>>>>
>>>>> On 02/24/2015 05:15 PM, Andrew Musselman wrote:
>>>>>
>>>>>   Makes sense; I'm still getting those errors after restarting my rebuilt
>>>>>> spark..
>>>>>>
>>>>>> On Tue, Feb 24, 2015 at 2:12 PM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>>> wrote:
>>>>>>
>>>>>>    IIRC MAHOUT_LOCAL doesn't mean a thing with spark mode. It is purely
>>>>>> MR
>>>>>>
>>>>>>> thing.
>>>>>>>
>>>>>>> with shell, important things are MASTER (spark setting) and also other
>>>>>>> spark specific settings  that should be added in MAHOUT_OPT (but the
>>>>>>> latter
>>>>>>> is broken in public branch with spark shell -- one of the bugs that
>>>>>>> need
>>>>>>> fixing).
>>>>>>>
>>>>>>> On Tue, Feb 24, 2015 at 2:03 PM, Andrew Musselman <
>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>
>>>>>>>    Ah, I had an old build of spark server running.
>>>>>>>
>>>>>>>> On Tue, Feb 24, 2015 at 1:56 PM, Andrew Musselman <
>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>>
>>>>>>>>    I only have one spark build that I know of, and it's still
>>>>>>>> reporting
>>>>>>>>
>>>>>>>>> errors in Mahout local mode.
>>>>>>>>>
>>>>>>>>> $ echo $SPARK_HOME
>>>>>>>>> /home/akm/spark
>>>>>>>>>
>>>>>>>>> $ export MAHOUT_LOCAL="TRUE"
>>>>>>>>> $ bin/mahout spark-shell
>>>>>>>>> MAHOUT_LOCAL is set, so we don't add HADOOP_CONF_DIR to classpath.
>>>>>>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>>>>>>> SLF4J: Found binding in
>>>>>>>>>
>>>>>>>>>    [jar:file:/home/akm/mahout/mrlegacy/target/mahout-
>>>>>>>>>
>>>>>>>> mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/
>>>>>>> StaticLoggerBinder.class]
>>>>>>>
>>>>>>>
>>>>>>>   SLF4J: Found binding in
>>>>>>>>>    [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.
>>>>>>>>>
>>>>>>>> 10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>>>>   SLF4J: Found binding in
>>>>>>>>>    [jar:file:/home/akm/spark/assembly/target/scala-2.10/
>>>>>>>>>
>>>>>>>> spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/
>>>>>>> StaticLoggerBinder.class]
>>>>>>>
>>>>>>>
>>>>>>>   SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>>>>>>> explanation.
>>>>>>>>> SLF4J: Actual binding is of type [org.slf4j.impl.
>>>>>>>>> Log4jLoggerFactory]
>>>>>>>>>
>>>>>>>>>                             _                 _
>>>>>>>>>             _ __ ___   __ _| |__   ___  _   _| |_
>>>>>>>>>            | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>>>>>>            | | | | | | (_| | | | | (_) | |_| | |_
>>>>>>>>>            |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>>>>>>>
>>>>>>>>>
>>>>>>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>>>>>>> 1.7.0_03)
>>>>>>>>> Type in expressions to have them evaluated.
>>>>>>>>> Type :help for more information.
>>>>>>>>> 15/02/24 13:42:42 WARN Utils: Your hostname, ubuntu resolves to a
>>>>>>>>>
>>>>>>>>>   loopback
>>>>>>>>   address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>>>>>>> 15/02/24 13:42:42 WARN Utils: Set SPARK_LOCAL_IP if you need to bind
>>>>>>>>> to
>>>>>>>>> another address
>>>>>>>>> 15/02/24 13:42:54 WARN NativeCodeLoader: Unable to load
>>>>>>>>> native-hadoop
>>>>>>>>> library for your platform... using builtin-java classes where
>>>>>>>>>
>>>>>>>>>   applicable
>>>>>>>> Created spark context..
>>>>>>>>
>>>>>>>>> Mahout distributed context is available as "implicit val sdc".
>>>>>>>>> mahout> 15/02/24 13:43:32 ERROR Remoting:
>>>>>>>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>>>>>>>>>
>>>>>>>>>   stream
>>>>>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>>>>>>
>>>>>>>>> serialVersionUID = -7366074099953117729
>>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>>        at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>>
>>>>>>>>> java:369)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>>
>>>>>>>         at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>>
>>>>>>>         at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>        at akka.remote.DefaultMessageDispatcher.
>>>>>>>>
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>>
>>>>>>>         at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>        at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>        at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>>
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>>
>>>>>>>   15/02/24 13:44:05 ERROR Remoting:
>>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>>
>>>>>>>>   local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>>
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>>        at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>>
>>>>>>>>> java:369)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>>
>>>>>>>         at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>>
>>>>>>>         at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>        at akka.remote.DefaultMessageDispatcher.
>>>>>>>>
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>>
>>>>>>>         at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>        at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>        at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>>
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>>
>>>>>>>   15/02/24 13:44:38 ERROR TaskSchedulerImpl: Lost an executor 0
>>>>>>>> (already
>>>>>>>>
>>>>>>>>> removed): remote Akka client disassociated
>>>>>>>>> 15/02/24 13:45:15 ERROR Remoting:
>>>>>>>>>
>>>>>>>>>   org.apache.spark.storage.BlockManagerId;
>>>>>>>>   local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>>
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>>        at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>>
>>>>>>>>> java:369)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>>
>>>>>>>         at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>>
>>>>>>>         at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>        at akka.remote.DefaultMessageDispatcher.
>>>>>>>>
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>>
>>>>>>>         at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>        at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>        at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>>
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>>
>>>>>>>   15/02/24 13:45:48 ERROR Remoting:
>>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>>
>>>>>>>>   local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>>
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>>        at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>>
>>>>>>>>> java:369)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>>
>>>>>>>         at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>>
>>>>>>>         at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>        at akka.remote.DefaultMessageDispatcher.
>>>>>>>>
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>>
>>>>>>>         at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>        at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>        at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>>
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>>
>>>>>>>   15/02/24 13:46:21 ERROR TaskSchedulerImpl: Lost an executor 1
>>>>>>>> (already
>>>>>>>>
>>>>>>>>> removed): remote Akka client disassociated
>>>>>>>>> 15/02/24 13:46:57 ERROR Remoting:
>>>>>>>>>
>>>>>>>>>   org.apache.spark.storage.BlockManagerId;
>>>>>>>>   local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>>
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>>        at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>>
>>>>>>>>> java:369)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>>
>>>>>>>         at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>>
>>>>>>>         at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>        at akka.remote.DefaultMessageDispatcher.
>>>>>>>>
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>>
>>>>>>>         at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>        at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>        at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>>
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>>
>>>>>>>   15/02/24 13:47:30 ERROR Remoting:
>>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>>
>>>>>>>>   local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>>
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>>        at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>>
>>>>>>>>> java:369)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>>
>>>>>>>         at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>>
>>>>>>>         at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>        at akka.remote.DefaultMessageDispatcher.
>>>>>>>>
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>>
>>>>>>>         at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>        at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>        at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>>
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>>
>>>>>>>   15/02/24 13:48:04 ERROR TaskSchedulerImpl: Lost an executor 2
>>>>>>>> (already
>>>>>>>>
>>>>>>>>> removed): remote Akka client disassociated
>>>>>>>>> 15/02/24 13:48:40 ERROR Remoting:
>>>>>>>>>
>>>>>>>>>   org.apache.spark.storage.BlockManagerId;
>>>>>>>>   local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>>
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>>        at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>>
>>>>>>>>> java:369)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>>
>>>>>>>         at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>>
>>>>>>>         at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>        at akka.remote.DefaultMessageDispatcher.
>>>>>>>>
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>>
>>>>>>>         at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>        at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>        at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>>
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>>
>>>>>>>   15/02/24 13:49:14 ERROR Remoting:
>>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>>
>>>>>>>>   local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>>
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>>        at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>>
>>>>>>>>> java:369)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>>
>>>>>>>         at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>>
>>>>>>>         at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>        at akka.remote.DefaultMessageDispatcher.
>>>>>>>>
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>>
>>>>>>>         at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>        at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>        at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>>
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>>
>>>>>>>   15/02/24 13:49:47 ERROR TaskSchedulerImpl: Lost an executor 3
>>>>>>>> (already
>>>>>>>>
>>>>>>>>> removed): remote Akka client disassociated
>>>>>>>>> 15/02/24 13:50:23 ERROR Remoting:
>>>>>>>>>
>>>>>>>>>   org.apache.spark.storage.BlockManagerId;
>>>>>>>>   local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>>
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>>        at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>>
>>>>>>>>> java:369)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>>
>>>>>>>         at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>>
>>>>>>>         at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>        at akka.remote.DefaultMessageDispatcher.
>>>>>>>>
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>>
>>>>>>>         at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>        at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>        at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>>
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>>
>>>>>>>   15/02/24 13:50:56 ERROR Remoting:
>>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>>
>>>>>>>>   local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>>
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>>        at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>>
>>>>>>>>> java:369)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>>
>>>>>>>         at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>>
>>>>>>>         at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>        at akka.remote.DefaultMessageDispatcher.
>>>>>>>>
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>>
>>>>>>>         at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>        at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>        at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>>
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>>
>>>>>>>   15/02/24 13:51:30 ERROR TaskSchedulerImpl: Lost an executor 4
>>>>>>>> (already
>>>>>>>>
>>>>>>>>> removed): remote Akka client disassociated
>>>>>>>>> 15/02/24 13:52:07 ERROR Remoting:
>>>>>>>>>
>>>>>>>>>   org.apache.spark.storage.BlockManagerId;
>>>>>>>>   local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>>
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>>        at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.defaultReadFields(
>>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>>
>>>>>>>>> java:369)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>>
>>>>>>>         at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>>> scala:57)
>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>>    akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>>> Serialization.scala:104)
>>>>>>>>>
>>>>>>>         at scala.util.Try$.apply(Try.scala:161)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   akka.serialization.Serialization.deserialize(
>>>>>>>> Serialization.scala:98)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.DefaultMessageDispatcher.
>>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>>        at akka.remote.DefaultMessageDispatcher.
>>>>>>>>
>>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>>> Endpoint.scala:764)
>>>>>>>>>
>>>>>>>         at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>>>        at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>>        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>>        at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>    akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>>
>>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>>
>>>>>>>         at
>>>>>>>>>    scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>>
>>>>>>>   15/02/24 13:52:40 ERROR Remoting:
>>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>>
>>>>>>>>   local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>>
>>>>>>>>> BlockManagerId;
>>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>>
>>>>>>>>>   -7366074099953117729
>>>>>>>>        at
>>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>>
>>>>>>>>>        at
>>>>>>>>>
>>>>>>>>>   java.io.ObjectInputStream.readClassDesc(
>>>>>>>> ObjectInputStream.java:1514)
>>>>>>>>
>>>>>>>>         at
>>>>>>>>>    java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>>
>>>>>>>         at
>>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>>        at
>>>>>>>>
>>>>>>>>
>>>>>>>>


Re: Spark shell broken

Posted by Andrew Musselman <an...@gmail.com>.
Using Spark v1.1.0 cleared up those errors.

On Tue, Feb 24, 2015 at 2:42 PM, Andrew Palumbo <ap...@outlook.com> wrote:

> yeah- i was working off of an old branch and couldn't figure out how to
> get the mahout/spark-1.2 branch. I got it.  just double checking everything
> and will push it shortly.
>
>
> On 02/24/2015 05:32 PM, Dmitriy Lyubimov wrote:
>
>> all i am saying if spark side tests work imo there's no need to go thru PR
>> process, you can push your squashed patch directly, but if you feel like
>> to
>> do PR please go ahead.
>>
>> On Tue, Feb 24, 2015 at 2:30 PM, Andrew Palumbo <ap...@outlook.com>
>> wrote:
>>
>>  or rather i have git@github.com:andrewpalumbo/mahout.git as my origin
>>> and
>>> there's no spark-1.2 branch there
>>>
>>> On 02/24/2015 05:25 PM, Andrew Palumbo wrote:
>>>
>>>  sorry- trying to get the shell fix out but having trouble pulling the
>>>> remote spark 1.2 branch. i think its cause i have git.apache.org as my
>>>> remote rather than github.com/apache/mahout
>>>>
>>>>
>>>> On 02/24/2015 05:15 PM, Andrew Musselman wrote:
>>>>
>>>>  Makes sense; I'm still getting those errors after restarting my rebuilt
>>>>> spark..
>>>>>
>>>>> On Tue, Feb 24, 2015 at 2:12 PM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>> wrote:
>>>>>
>>>>>   IIRC MAHOUT_LOCAL doesn't mean a thing with spark mode. It is purely
>>>>> MR
>>>>>
>>>>>> thing.
>>>>>>
>>>>>> with shell, important things are MASTER (spark setting) and also other
>>>>>> spark specific settings  that should be added in MAHOUT_OPT (but the
>>>>>> latter
>>>>>> is broken in public branch with spark shell -- one of the bugs that
>>>>>> need
>>>>>> fixing).
>>>>>>
>>>>>> On Tue, Feb 24, 2015 at 2:03 PM, Andrew Musselman <
>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>
>>>>>>   Ah, I had an old build of spark server running.
>>>>>>
>>>>>>> On Tue, Feb 24, 2015 at 1:56 PM, Andrew Musselman <
>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>
>>>>>>>   I only have one spark build that I know of, and it's still
>>>>>>> reporting
>>>>>>>
>>>>>>>> errors in Mahout local mode.
>>>>>>>>
>>>>>>>> $ echo $SPARK_HOME
>>>>>>>> /home/akm/spark
>>>>>>>>
>>>>>>>> $ export MAHOUT_LOCAL="TRUE"
>>>>>>>> $ bin/mahout spark-shell
>>>>>>>> MAHOUT_LOCAL is set, so we don't add HADOOP_CONF_DIR to classpath.
>>>>>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>>>>>> SLF4J: Found binding in
>>>>>>>>
>>>>>>>>   [jar:file:/home/akm/mahout/mrlegacy/target/mahout-
>>>>>>>>
>>>>>>> mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/
>>>>>> StaticLoggerBinder.class]
>>>>>>
>>>>>>
>>>>>>  SLF4J: Found binding in
>>>>>>>
>>>>>>>>   [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.
>>>>>>>>
>>>>>>> 10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>>>
>>>>>>  SLF4J: Found binding in
>>>>>>>
>>>>>>>>   [jar:file:/home/akm/spark/assembly/target/scala-2.10/
>>>>>>>>
>>>>>>> spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/
>>>>>> StaticLoggerBinder.class]
>>>>>>
>>>>>>
>>>>>>  SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>>>>>
>>>>>>>> explanation.
>>>>>>>> SLF4J: Actual binding is of type [org.slf4j.impl.
>>>>>>>> Log4jLoggerFactory]
>>>>>>>>
>>>>>>>>                            _                 _
>>>>>>>>            _ __ ___   __ _| |__   ___  _   _| |_
>>>>>>>>           | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>>>>>           | | | | | | (_| | | | | (_) | |_| | |_
>>>>>>>>           |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>>>>>>
>>>>>>>>
>>>>>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>>>>>> 1.7.0_03)
>>>>>>>> Type in expressions to have them evaluated.
>>>>>>>> Type :help for more information.
>>>>>>>> 15/02/24 13:42:42 WARN Utils: Your hostname, ubuntu resolves to a
>>>>>>>>
>>>>>>>>  loopback
>>>>>>>
>>>>>>>  address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>>>>>> 15/02/24 13:42:42 WARN Utils: Set SPARK_LOCAL_IP if you need to bind
>>>>>>>> to
>>>>>>>> another address
>>>>>>>> 15/02/24 13:42:54 WARN NativeCodeLoader: Unable to load
>>>>>>>> native-hadoop
>>>>>>>> library for your platform... using builtin-java classes where
>>>>>>>>
>>>>>>>>  applicable
>>>>>>> Created spark context..
>>>>>>>
>>>>>>>> Mahout distributed context is available as "implicit val sdc".
>>>>>>>> mahout> 15/02/24 13:43:32 ERROR Remoting:
>>>>>>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>>>>>>>>
>>>>>>>>  stream
>>>>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>>>>>
>>>>>>>> serialVersionUID = -7366074099953117729
>>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>> BlockManagerId;
>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>>       at
>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>       at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>> ObjectInputStream.java:1514)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at
>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>
>>>>>>>> java:369)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>> scala:57)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>       at
>>>>>>>
>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>> Serialization.scala:104)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>> Serialization.scala:98)
>>>>>>>
>>>>>>>        at
>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>
>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>> Endpoint.scala:764)
>>>>>>>>
>>>>>>>
>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>
>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>
>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>
>>>>>>>
>>>>>>  15/02/24 13:44:05 ERROR Remoting:
>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>
>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>
>>>>>>>> BlockManagerId;
>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>>       at
>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>       at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>> ObjectInputStream.java:1514)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at
>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>
>>>>>>>> java:369)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>> scala:57)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>       at
>>>>>>>
>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>> Serialization.scala:104)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>> Serialization.scala:98)
>>>>>>>
>>>>>>>        at
>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>
>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>> Endpoint.scala:764)
>>>>>>>>
>>>>>>>
>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>
>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>
>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>
>>>>>>>
>>>>>>  15/02/24 13:44:38 ERROR TaskSchedulerImpl: Lost an executor 0
>>>>>>> (already
>>>>>>>
>>>>>>>> removed): remote Akka client disassociated
>>>>>>>> 15/02/24 13:45:15 ERROR Remoting:
>>>>>>>>
>>>>>>>>  org.apache.spark.storage.BlockManagerId;
>>>>>>>
>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>
>>>>>>>> BlockManagerId;
>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>>       at
>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>       at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>> ObjectInputStream.java:1514)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at
>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>
>>>>>>>> java:369)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>> scala:57)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>       at
>>>>>>>
>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>> Serialization.scala:104)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>> Serialization.scala:98)
>>>>>>>
>>>>>>>        at
>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>
>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>> Endpoint.scala:764)
>>>>>>>>
>>>>>>>
>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>
>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>
>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>
>>>>>>>
>>>>>>  15/02/24 13:45:48 ERROR Remoting:
>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>
>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>
>>>>>>>> BlockManagerId;
>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>>       at
>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>       at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>> ObjectInputStream.java:1514)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at
>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>
>>>>>>>> java:369)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>> scala:57)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>       at
>>>>>>>
>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>> Serialization.scala:104)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>> Serialization.scala:98)
>>>>>>>
>>>>>>>        at
>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>
>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>> Endpoint.scala:764)
>>>>>>>>
>>>>>>>
>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>
>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>
>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>
>>>>>>>
>>>>>>  15/02/24 13:46:21 ERROR TaskSchedulerImpl: Lost an executor 1
>>>>>>> (already
>>>>>>>
>>>>>>>> removed): remote Akka client disassociated
>>>>>>>> 15/02/24 13:46:57 ERROR Remoting:
>>>>>>>>
>>>>>>>>  org.apache.spark.storage.BlockManagerId;
>>>>>>>
>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>
>>>>>>>> BlockManagerId;
>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>>       at
>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>       at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>> ObjectInputStream.java:1514)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at
>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>
>>>>>>>> java:369)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>> scala:57)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>       at
>>>>>>>
>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>> Serialization.scala:104)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>> Serialization.scala:98)
>>>>>>>
>>>>>>>        at
>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>
>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>> Endpoint.scala:764)
>>>>>>>>
>>>>>>>
>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>
>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>
>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>
>>>>>>>
>>>>>>  15/02/24 13:47:30 ERROR Remoting:
>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>
>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>
>>>>>>>> BlockManagerId;
>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>>       at
>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>       at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>> ObjectInputStream.java:1514)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at
>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>
>>>>>>>> java:369)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>> scala:57)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>       at
>>>>>>>
>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>> Serialization.scala:104)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>> Serialization.scala:98)
>>>>>>>
>>>>>>>        at
>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>
>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>> Endpoint.scala:764)
>>>>>>>>
>>>>>>>
>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>
>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>
>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>
>>>>>>>
>>>>>>  15/02/24 13:48:04 ERROR TaskSchedulerImpl: Lost an executor 2
>>>>>>> (already
>>>>>>>
>>>>>>>> removed): remote Akka client disassociated
>>>>>>>> 15/02/24 13:48:40 ERROR Remoting:
>>>>>>>>
>>>>>>>>  org.apache.spark.storage.BlockManagerId;
>>>>>>>
>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>
>>>>>>>> BlockManagerId;
>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>>       at
>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>       at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>> ObjectInputStream.java:1514)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at
>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>
>>>>>>>> java:369)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>> scala:57)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>       at
>>>>>>>
>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>> Serialization.scala:104)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>> Serialization.scala:98)
>>>>>>>
>>>>>>>        at
>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>
>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>> Endpoint.scala:764)
>>>>>>>>
>>>>>>>
>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>
>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>
>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>
>>>>>>>
>>>>>>  15/02/24 13:49:14 ERROR Remoting:
>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>
>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>
>>>>>>>> BlockManagerId;
>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>>       at
>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>       at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>> ObjectInputStream.java:1514)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at
>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>
>>>>>>>> java:369)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>> scala:57)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>       at
>>>>>>>
>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>> Serialization.scala:104)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>> Serialization.scala:98)
>>>>>>>
>>>>>>>        at
>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>
>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>> Endpoint.scala:764)
>>>>>>>>
>>>>>>>
>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>
>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>
>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>
>>>>>>>
>>>>>>  15/02/24 13:49:47 ERROR TaskSchedulerImpl: Lost an executor 3
>>>>>>> (already
>>>>>>>
>>>>>>>> removed): remote Akka client disassociated
>>>>>>>> 15/02/24 13:50:23 ERROR Remoting:
>>>>>>>>
>>>>>>>>  org.apache.spark.storage.BlockManagerId;
>>>>>>>
>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>
>>>>>>>> BlockManagerId;
>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>>       at
>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>       at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>> ObjectInputStream.java:1514)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at
>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>
>>>>>>>> java:369)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>> scala:57)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>       at
>>>>>>>
>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>> Serialization.scala:104)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>> Serialization.scala:98)
>>>>>>>
>>>>>>>        at
>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>
>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>> Endpoint.scala:764)
>>>>>>>>
>>>>>>>
>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>
>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>
>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>
>>>>>>>
>>>>>>  15/02/24 13:50:56 ERROR Remoting:
>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>
>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>
>>>>>>>> BlockManagerId;
>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>>       at
>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>       at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>> ObjectInputStream.java:1514)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at
>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>
>>>>>>>> java:369)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>> scala:57)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>       at
>>>>>>>
>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>> Serialization.scala:104)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>> Serialization.scala:98)
>>>>>>>
>>>>>>>        at
>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>
>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>> Endpoint.scala:764)
>>>>>>>>
>>>>>>>
>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>
>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>
>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>
>>>>>>>
>>>>>>  15/02/24 13:51:30 ERROR TaskSchedulerImpl: Lost an executor 4
>>>>>>> (already
>>>>>>>
>>>>>>>> removed): remote Akka client disassociated
>>>>>>>> 15/02/24 13:52:07 ERROR Remoting:
>>>>>>>>
>>>>>>>>  org.apache.spark.storage.BlockManagerId;
>>>>>>>
>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>
>>>>>>>> BlockManagerId;
>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>>       at
>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>       at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>> ObjectInputStream.java:1514)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at
>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.defaultReadFields(
>>>>>>>> ObjectInputStream.java:1964)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readSerialData(
>>>>>>>> ObjectInputStream.java:1888)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1771)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>>
>>>>>>>> java:369)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.
>>>>>>>> apply(Serializer.scala:136)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.DynamicVariable.withValue(DynamicVariable.
>>>>>>> scala:57)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>       at
>>>>>>>
>>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>>>>>>> Serialization.scala:104)
>>>>>>>>
>>>>>>>
>>>>>>        at scala.util.Try$.apply(Try.scala:161)
>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  akka.serialization.Serialization.deserialize(
>>>>>>> Serialization.scala:98)
>>>>>>>
>>>>>>>        at
>>>>>>>> akka.remote.MessageSerializer$.deserialize(
>>>>>>>> MessageSerializer.scala:23)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.DefaultMessageDispatcher.
>>>>>>>> payload$lzycompute$1(Endpoint.scala:55)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>>
>>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>>>>>>> Endpoint.scala:764)
>>>>>>>>
>>>>>>>
>>>>>>        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>
>>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>>       at
>>>>>>>>
>>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>>>>>>> AbstractDispatcher.scala:386)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>>>>>
>>>>>>> runTask(ForkJoinPool.java:1339)
>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>>>>>>> ForkJoinPool.java:1979)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>>
>>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>>>>>>> ForkJoinWorkerThread.java:107)
>>>>>>>>
>>>>>>>
>>>>>>  15/02/24 13:52:40 ERROR Remoting:
>>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>>
>>>>>>>  local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>>
>>>>>>>> BlockManagerId;
>>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>>
>>>>>>>>  -7366074099953117729
>>>>>>>       at
>>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>       at
>>>>>>>
>>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>>>>> ObjectInputStream.java:1601)
>>>>>>>>
>>>>>>>>       at
>>>>>>>>
>>>>>>>>  java.io.ObjectInputStream.readClassDesc(
>>>>>>> ObjectInputStream.java:1514)
>>>>>>>
>>>>>>>        at
>>>>>>>>
>>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(
>>>>>>>> ObjectInputStream.java:1750)
>>>>>>>>
>>>>>>>
>>>>>>        at
>>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>       at
>>>>>>>
>>>>>>>
>>>>>>>
>

Re: Spark shell broken

Posted by Andrew Palumbo <ap...@outlook.com>.
yeah- i was working off of an old branch and couldn't figure out how to 
get the mahout/spark-1.2 branch. I got it.  just double checking 
everything and will push it shortly.

On 02/24/2015 05:32 PM, Dmitriy Lyubimov wrote:
> all i am saying if spark side tests work imo there's no need to go thru PR
> process, you can push your squashed patch directly, but if you feel like to
> do PR please go ahead.
>
> On Tue, Feb 24, 2015 at 2:30 PM, Andrew Palumbo <ap...@outlook.com> wrote:
>
>> or rather i have git@github.com:andrewpalumbo/mahout.git as my origin and
>> there's no spark-1.2 branch there
>>
>> On 02/24/2015 05:25 PM, Andrew Palumbo wrote:
>>
>>> sorry- trying to get the shell fix out but having trouble pulling the
>>> remote spark 1.2 branch. i think its cause i have git.apache.org as my
>>> remote rather than github.com/apache/mahout
>>>
>>>
>>> On 02/24/2015 05:15 PM, Andrew Musselman wrote:
>>>
>>>> Makes sense; I'm still getting those errors after restarting my rebuilt
>>>> spark..
>>>>
>>>> On Tue, Feb 24, 2015 at 2:12 PM, Dmitriy Lyubimov <dl...@gmail.com>
>>>> wrote:
>>>>
>>>>   IIRC MAHOUT_LOCAL doesn't mean a thing with spark mode. It is purely MR
>>>>> thing.
>>>>>
>>>>> with shell, important things are MASTER (spark setting) and also other
>>>>> spark specific settings  that should be added in MAHOUT_OPT (but the
>>>>> latter
>>>>> is broken in public branch with spark shell -- one of the bugs that need
>>>>> fixing).
>>>>>
>>>>> On Tue, Feb 24, 2015 at 2:03 PM, Andrew Musselman <
>>>>> andrew.musselman@gmail.com> wrote:
>>>>>
>>>>>   Ah, I had an old build of spark server running.
>>>>>> On Tue, Feb 24, 2015 at 1:56 PM, Andrew Musselman <
>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>
>>>>>>   I only have one spark build that I know of, and it's still reporting
>>>>>>> errors in Mahout local mode.
>>>>>>>
>>>>>>> $ echo $SPARK_HOME
>>>>>>> /home/akm/spark
>>>>>>>
>>>>>>> $ export MAHOUT_LOCAL="TRUE"
>>>>>>> $ bin/mahout spark-shell
>>>>>>> MAHOUT_LOCAL is set, so we don't add HADOOP_CONF_DIR to classpath.
>>>>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>>>>> SLF4J: Found binding in
>>>>>>>
>>>>>>>   [jar:file:/home/akm/mahout/mrlegacy/target/mahout-
>>>>> mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>>
>>>>>
>>>>>> SLF4J: Found binding in
>>>>>>>   [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.
>>>>> 10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>>
>>>>>> SLF4J: Found binding in
>>>>>>>   [jar:file:/home/akm/spark/assembly/target/scala-2.10/
>>>>> spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>>
>>>>>
>>>>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>>>>> explanation.
>>>>>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>>>>>
>>>>>>>                            _                 _
>>>>>>>            _ __ ___   __ _| |__   ___  _   _| |_
>>>>>>>           | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>>>>           | | | | | | (_| | | | | (_) | |_| | |_
>>>>>>>           |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>>>>>
>>>>>>>
>>>>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>>>>> 1.7.0_03)
>>>>>>> Type in expressions to have them evaluated.
>>>>>>> Type :help for more information.
>>>>>>> 15/02/24 13:42:42 WARN Utils: Your hostname, ubuntu resolves to a
>>>>>>>
>>>>>> loopback
>>>>>>
>>>>>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>>>>> 15/02/24 13:42:42 WARN Utils: Set SPARK_LOCAL_IP if you need to bind
>>>>>>> to
>>>>>>> another address
>>>>>>> 15/02/24 13:42:54 WARN NativeCodeLoader: Unable to load native-hadoop
>>>>>>> library for your platform... using builtin-java classes where
>>>>>>>
>>>>>> applicable
>>>>>> Created spark context..
>>>>>>> Mahout distributed context is available as "implicit val sdc".
>>>>>>> mahout> 15/02/24 13:43:32 ERROR Remoting:
>>>>>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>>>>>>>
>>>>>> stream
>>>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>>>>> serialVersionUID = -7366074099953117729
>>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>> BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>>       at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at
>>>>>>>   java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>> java:369)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>
>>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>       at
>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>
>>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>
>>>>>>>       at
>>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>
>>>>>>       at
>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>
>>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>
>>>>>>       at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>
>>>>>> 15/02/24 13:44:05 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>> BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>>       at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at
>>>>>>>   java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>> java:369)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>
>>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>       at
>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>
>>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>
>>>>>>>       at
>>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>
>>>>>>       at
>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>
>>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>
>>>>>>       at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>
>>>>>> 15/02/24 13:44:38 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 13:45:15 ERROR Remoting:
>>>>>>>
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>> BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>>       at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at
>>>>>>>   java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>> java:369)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>
>>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>       at
>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>
>>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>
>>>>>>>       at
>>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>
>>>>>>       at
>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>
>>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>
>>>>>>       at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>
>>>>>> 15/02/24 13:45:48 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>> BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>>       at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at
>>>>>>>   java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>> java:369)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>
>>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>       at
>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>
>>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>
>>>>>>>       at
>>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>
>>>>>>       at
>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>
>>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>
>>>>>>       at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>
>>>>>> 15/02/24 13:46:21 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 13:46:57 ERROR Remoting:
>>>>>>>
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>> BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>>       at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at
>>>>>>>   java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>> java:369)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>
>>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>       at
>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>
>>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>
>>>>>>>       at
>>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>
>>>>>>       at
>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>
>>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>
>>>>>>       at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>
>>>>>> 15/02/24 13:47:30 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>> BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>>       at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at
>>>>>>>   java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>> java:369)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>
>>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>       at
>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>
>>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>
>>>>>>>       at
>>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>
>>>>>>       at
>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>
>>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>
>>>>>>       at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>
>>>>>> 15/02/24 13:48:04 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 13:48:40 ERROR Remoting:
>>>>>>>
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>> BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>>       at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at
>>>>>>>   java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>> java:369)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>
>>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>       at
>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>
>>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>
>>>>>>>       at
>>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>
>>>>>>       at
>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>
>>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>
>>>>>>       at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>
>>>>>> 15/02/24 13:49:14 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>> BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>>       at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at
>>>>>>>   java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>> java:369)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>
>>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>       at
>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>
>>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>
>>>>>>>       at
>>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>
>>>>>>       at
>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>
>>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>
>>>>>>       at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>
>>>>>> 15/02/24 13:49:47 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 13:50:23 ERROR Remoting:
>>>>>>>
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>> BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>>       at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at
>>>>>>>   java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>> java:369)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>
>>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>       at
>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>
>>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>
>>>>>>>       at
>>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>
>>>>>>       at
>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>
>>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>
>>>>>>       at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>
>>>>>> 15/02/24 13:50:56 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>> BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>>       at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at
>>>>>>>   java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>> java:369)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>
>>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>       at
>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>
>>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>
>>>>>>>       at
>>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>
>>>>>>       at
>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>
>>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>
>>>>>>       at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>
>>>>>> 15/02/24 13:51:30 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 13:52:07 ERROR Remoting:
>>>>>>>
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>> BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>>       at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at
>>>>>>>   java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>>> java:369)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>
>>>>>>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>       at
>>>>>>>   akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>
>>>>>>       at scala.util.Try$.apply(Try.scala:161)
>>>>>>>       at
>>>>>>>
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>
>>>>>>>       at
>>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>
>>>>>>       at
>>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>       at akka.remote.DefaultMessageDispatcher.
>>>>>>> dispatch(Endpoint.scala:73)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>
>>>>>>       at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>       at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>       at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>       at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>       at
>>>>>>>
>>>>>>>   akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>
>>>>>>       at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>>> runTask(ForkJoinPool.java:1339)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>
>>>>>>       at
>>>>>>>   scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>
>>>>>> 15/02/24 13:52:40 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>>> BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>>
>>>>>> -7366074099953117729
>>>>>>       at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>       at
>>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>
>>>>>>>       at
>>>>>>>
>>>>>>>   java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>
>>>>>>       at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>       at
>>>>>>
>>>>>>


Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
all i am saying if spark side tests work imo there's no need to go thru PR
process, you can push your squashed patch directly, but if you feel like to
do PR please go ahead.

On Tue, Feb 24, 2015 at 2:30 PM, Andrew Palumbo <ap...@outlook.com> wrote:

> or rather i have git@github.com:andrewpalumbo/mahout.git as my origin and
> there's no spark-1.2 branch there
>
> On 02/24/2015 05:25 PM, Andrew Palumbo wrote:
>
>> sorry- trying to get the shell fix out but having trouble pulling the
>> remote spark 1.2 branch. i think its cause i have git.apache.org as my
>> remote rather than github.com/apache/mahout
>>
>>
>> On 02/24/2015 05:15 PM, Andrew Musselman wrote:
>>
>>> Makes sense; I'm still getting those errors after restarting my rebuilt
>>> spark..
>>>
>>> On Tue, Feb 24, 2015 at 2:12 PM, Dmitriy Lyubimov <dl...@gmail.com>
>>> wrote:
>>>
>>>  IIRC MAHOUT_LOCAL doesn't mean a thing with spark mode. It is purely MR
>>>> thing.
>>>>
>>>> with shell, important things are MASTER (spark setting) and also other
>>>> spark specific settings  that should be added in MAHOUT_OPT (but the
>>>> latter
>>>> is broken in public branch with spark shell -- one of the bugs that need
>>>> fixing).
>>>>
>>>> On Tue, Feb 24, 2015 at 2:03 PM, Andrew Musselman <
>>>> andrew.musselman@gmail.com> wrote:
>>>>
>>>>  Ah, I had an old build of spark server running.
>>>>>
>>>>> On Tue, Feb 24, 2015 at 1:56 PM, Andrew Musselman <
>>>>> andrew.musselman@gmail.com> wrote:
>>>>>
>>>>>  I only have one spark build that I know of, and it's still reporting
>>>>>> errors in Mahout local mode.
>>>>>>
>>>>>> $ echo $SPARK_HOME
>>>>>> /home/akm/spark
>>>>>>
>>>>>> $ export MAHOUT_LOCAL="TRUE"
>>>>>> $ bin/mahout spark-shell
>>>>>> MAHOUT_LOCAL is set, so we don't add HADOOP_CONF_DIR to classpath.
>>>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>>>> SLF4J: Found binding in
>>>>>>
>>>>>>  [jar:file:/home/akm/mahout/mrlegacy/target/mahout-
>>>> mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>
>>>>
>>>>> SLF4J: Found binding in
>>>>>>
>>>>>>  [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.
>>>> 10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>
>>>>> SLF4J: Found binding in
>>>>>>
>>>>>>  [jar:file:/home/akm/spark/assembly/target/scala-2.10/
>>>> spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>
>>>>
>>>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>>>> explanation.
>>>>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>>>>
>>>>>>                           _                 _
>>>>>>           _ __ ___   __ _| |__   ___  _   _| |_
>>>>>>          | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>>>          | | | | | | (_| | | | | (_) | |_| | |_
>>>>>>          |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>>>>
>>>>>>
>>>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>>>> 1.7.0_03)
>>>>>> Type in expressions to have them evaluated.
>>>>>> Type :help for more information.
>>>>>> 15/02/24 13:42:42 WARN Utils: Your hostname, ubuntu resolves to a
>>>>>>
>>>>> loopback
>>>>>
>>>>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>>>> 15/02/24 13:42:42 WARN Utils: Set SPARK_LOCAL_IP if you need to bind
>>>>>> to
>>>>>> another address
>>>>>> 15/02/24 13:42:54 WARN NativeCodeLoader: Unable to load native-hadoop
>>>>>> library for your platform... using builtin-java classes where
>>>>>>
>>>>> applicable
>>>>
>>>>> Created spark context..
>>>>>> Mahout distributed context is available as "implicit val sdc".
>>>>>> mahout> 15/02/24 13:43:32 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>>>>>>
>>>>> stream
>>>>
>>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>>>> serialVersionUID = -7366074099953117729
>>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>> BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>
>>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>> java:369)
>>>>>>      at
>>>>>>
>>>>>>  akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>
>>>>
>>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>
>>>>>      at
>>>>>>
>>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>
>>>>>>      at
>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>
>>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>>> dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>>
>>>>> 15/02/24 13:44:05 ERROR Remoting:
>>>>>>
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>> BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>
>>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>> java:369)
>>>>>>      at
>>>>>>
>>>>>>  akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>
>>>>
>>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>
>>>>>      at
>>>>>>
>>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>
>>>>>>      at
>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>
>>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>>> dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>>
>>>>> 15/02/24 13:44:38 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 13:45:15 ERROR Remoting:
>>>>>>
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>> BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>
>>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>> java:369)
>>>>>>      at
>>>>>>
>>>>>>  akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>
>>>>
>>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>
>>>>>      at
>>>>>>
>>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>
>>>>>>      at
>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>
>>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>>> dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>>
>>>>> 15/02/24 13:45:48 ERROR Remoting:
>>>>>>
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>> BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>
>>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>> java:369)
>>>>>>      at
>>>>>>
>>>>>>  akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>
>>>>
>>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>
>>>>>      at
>>>>>>
>>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>
>>>>>>      at
>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>
>>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>>> dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>>
>>>>> 15/02/24 13:46:21 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 13:46:57 ERROR Remoting:
>>>>>>
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>> BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>
>>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>> java:369)
>>>>>>      at
>>>>>>
>>>>>>  akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>
>>>>
>>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>
>>>>>      at
>>>>>>
>>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>
>>>>>>      at
>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>
>>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>>> dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>>
>>>>> 15/02/24 13:47:30 ERROR Remoting:
>>>>>>
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>> BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>
>>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>> java:369)
>>>>>>      at
>>>>>>
>>>>>>  akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>
>>>>
>>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>
>>>>>      at
>>>>>>
>>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>
>>>>>>      at
>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>
>>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>>> dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>>
>>>>> 15/02/24 13:48:04 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 13:48:40 ERROR Remoting:
>>>>>>
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>> BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>
>>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>> java:369)
>>>>>>      at
>>>>>>
>>>>>>  akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>
>>>>
>>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>
>>>>>      at
>>>>>>
>>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>
>>>>>>      at
>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>
>>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>>> dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>>
>>>>> 15/02/24 13:49:14 ERROR Remoting:
>>>>>>
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>> BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>
>>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>> java:369)
>>>>>>      at
>>>>>>
>>>>>>  akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>
>>>>
>>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>
>>>>>      at
>>>>>>
>>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>
>>>>>>      at
>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>
>>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>>> dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>>
>>>>> 15/02/24 13:49:47 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 13:50:23 ERROR Remoting:
>>>>>>
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>> BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>
>>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>> java:369)
>>>>>>      at
>>>>>>
>>>>>>  akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>
>>>>
>>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>
>>>>>      at
>>>>>>
>>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>
>>>>>>      at
>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>
>>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>>> dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>>
>>>>> 15/02/24 13:50:56 ERROR Remoting:
>>>>>>
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>> BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>
>>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>> java:369)
>>>>>>      at
>>>>>>
>>>>>>  akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>
>>>>
>>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>
>>>>>      at
>>>>>>
>>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>
>>>>>>      at
>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>
>>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>>> dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>>
>>>>> 15/02/24 13:51:30 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 13:52:07 ERROR Remoting:
>>>>>>
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>> BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>
>>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>>> java:369)
>>>>>>      at
>>>>>>
>>>>>>  akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>
>>>>
>>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>
>>>>>      at
>>>>>>
>>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>
>>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>>>
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>
>>>>>>      at
>>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>
>>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>>> dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>
>>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>>> runTask(ForkJoinPool.java:1339)
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>
>>>>
>>>>> 15/02/24 13:52:40 ERROR Remoting:
>>>>>>
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>>> BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>>
>>>>> -7366074099953117729
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>
>>>>>      at
>>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>
>>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>
>>>>>>      at
>>>>>>
>>>>>>  java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>
>>>>
>>>>>      at
>>>>>>
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>
>>>>>      at
>>>>>
>>>>>

Re: Spark shell broken

Posted by Andrew Palumbo <ap...@outlook.com>.
or rather i have git@github.com:andrewpalumbo/mahout.git as my origin 
and there's no spark-1.2 branch there

On 02/24/2015 05:25 PM, Andrew Palumbo wrote:
> sorry- trying to get the shell fix out but having trouble pulling the 
> remote spark 1.2 branch. i think its cause i have git.apache.org as my 
> remote rather than github.com/apache/mahout
>
>
> On 02/24/2015 05:15 PM, Andrew Musselman wrote:
>> Makes sense; I'm still getting those errors after restarting my rebuilt
>> spark..
>>
>> On Tue, Feb 24, 2015 at 2:12 PM, Dmitriy Lyubimov <dl...@gmail.com> 
>> wrote:
>>
>>> IIRC MAHOUT_LOCAL doesn't mean a thing with spark mode. It is purely MR
>>> thing.
>>>
>>> with shell, important things are MASTER (spark setting) and also other
>>> spark specific settings  that should be added in MAHOUT_OPT (but the 
>>> latter
>>> is broken in public branch with spark shell -- one of the bugs that 
>>> need
>>> fixing).
>>>
>>> On Tue, Feb 24, 2015 at 2:03 PM, Andrew Musselman <
>>> andrew.musselman@gmail.com> wrote:
>>>
>>>> Ah, I had an old build of spark server running.
>>>>
>>>> On Tue, Feb 24, 2015 at 1:56 PM, Andrew Musselman <
>>>> andrew.musselman@gmail.com> wrote:
>>>>
>>>>> I only have one spark build that I know of, and it's still reporting
>>>>> errors in Mahout local mode.
>>>>>
>>>>> $ echo $SPARK_HOME
>>>>> /home/akm/spark
>>>>>
>>>>> $ export MAHOUT_LOCAL="TRUE"
>>>>> $ bin/mahout spark-shell
>>>>> MAHOUT_LOCAL is set, so we don't add HADOOP_CONF_DIR to classpath.
>>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>>> SLF4J: Found binding in
>>>>>
>>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
>>>
>>>>> SLF4J: Found binding in
>>>>>
>>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
>>>
>>>>> SLF4J: Found binding in
>>>>>
>>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
>>>
>>>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>>> explanation.
>>>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>>>
>>>>>                           _                 _
>>>>>           _ __ ___   __ _| |__   ___  _   _| |_
>>>>>          | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>>          | | | | | | (_| | | | | (_) | |_| | |_
>>>>>          |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>>>
>>>>>
>>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>>> 1.7.0_03)
>>>>> Type in expressions to have them evaluated.
>>>>> Type :help for more information.
>>>>> 15/02/24 13:42:42 WARN Utils: Your hostname, ubuntu resolves to a
>>>> loopback
>>>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>>> 15/02/24 13:42:42 WARN Utils: Set SPARK_LOCAL_IP if you need to 
>>>>> bind to
>>>>> another address
>>>>> 15/02/24 13:42:54 WARN NativeCodeLoader: Unable to load native-hadoop
>>>>> library for your platform... using builtin-java classes where
>>> applicable
>>>>> Created spark context..
>>>>> Mahout distributed context is available as "implicit val sdc".
>>>>> mahout> 15/02/24 13:43:32 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>>> stream
>>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>>> serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: 
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>      at
>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>>
>>>>>      at
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>
>>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at 
>>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>      at
>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>
>>>>>      at 
>>>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>      at
>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) 
>>>>>
>>>>>      at
>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>      at 
>>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>> 15/02/24 13:44:05 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>> java.io.InvalidClassException: 
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>      at
>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>>
>>>>>      at
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>
>>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at 
>>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>      at
>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>
>>>>>      at 
>>>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>      at
>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) 
>>>>>
>>>>>      at
>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>      at 
>>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>> 15/02/24 13:44:38 ERROR TaskSchedulerImpl: Lost an executor 0 
>>>>> (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 13:45:15 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>> java.io.InvalidClassException: 
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>      at
>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>>
>>>>>      at
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>
>>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at 
>>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>      at
>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>
>>>>>      at 
>>>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>      at
>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) 
>>>>>
>>>>>      at
>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>      at 
>>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>> 15/02/24 13:45:48 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>> java.io.InvalidClassException: 
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>      at
>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>>
>>>>>      at
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>
>>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at 
>>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>      at
>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>
>>>>>      at 
>>>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>      at
>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) 
>>>>>
>>>>>      at
>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>      at 
>>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>> 15/02/24 13:46:21 ERROR TaskSchedulerImpl: Lost an executor 1 
>>>>> (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 13:46:57 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>> java.io.InvalidClassException: 
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>      at
>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>>
>>>>>      at
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>
>>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at 
>>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>      at
>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>
>>>>>      at 
>>>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>      at
>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) 
>>>>>
>>>>>      at
>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>      at 
>>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>> 15/02/24 13:47:30 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>> java.io.InvalidClassException: 
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>      at
>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>>
>>>>>      at
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>
>>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at 
>>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>      at
>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>
>>>>>      at 
>>>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>      at
>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) 
>>>>>
>>>>>      at
>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>      at 
>>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>> 15/02/24 13:48:04 ERROR TaskSchedulerImpl: Lost an executor 2 
>>>>> (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 13:48:40 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>> java.io.InvalidClassException: 
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>      at
>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>>
>>>>>      at
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>
>>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at 
>>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>      at
>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>
>>>>>      at 
>>>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>      at
>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) 
>>>>>
>>>>>      at
>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>      at 
>>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>> 15/02/24 13:49:14 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>> java.io.InvalidClassException: 
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>      at
>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>>
>>>>>      at
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>
>>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at 
>>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>      at
>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>
>>>>>      at 
>>>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>      at
>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) 
>>>>>
>>>>>      at
>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>      at 
>>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>> 15/02/24 13:49:47 ERROR TaskSchedulerImpl: Lost an executor 3 
>>>>> (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 13:50:23 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>> java.io.InvalidClassException: 
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>      at
>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>>
>>>>>      at
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>
>>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at 
>>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>      at
>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>
>>>>>      at 
>>>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>      at
>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) 
>>>>>
>>>>>      at
>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>      at 
>>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>> 15/02/24 13:50:56 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>> java.io.InvalidClassException: 
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>      at
>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>>
>>>>>      at
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>
>>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at 
>>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>      at
>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>
>>>>>      at 
>>>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>      at
>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) 
>>>>>
>>>>>      at
>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>      at 
>>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>> 15/02/24 13:51:30 ERROR TaskSchedulerImpl: Lost an executor 4 
>>>>> (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 13:52:07 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>> java.io.InvalidClassException: 
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>      at
>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>>
>>>>>      at
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>
>>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at 
>>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>      at
>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>
>>>>>      at 
>>>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>      at
>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) 
>>>>>
>>>>>      at
>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>      at 
>>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>> 15/02/24 13:52:40 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>> java.io.InvalidClassException: 
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>      at
>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>>
>>>>>      at
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>
>>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at 
>>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>      at
>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>
>>>>>      at 
>>>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>      at
>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) 
>>>>>
>>>>>      at
>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>      at 
>>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>> 15/02/24 13:53:13 ERROR TaskSchedulerImpl: Lost an executor 5 
>>>>> (already
>>>>> removed): remote Akka client disassociated
>>>>>
>>>>> mahout>
>>>>> mahout>
>>>>> mahout> 15/02/24 13:53:49 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>>> stream
>>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>>> serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: 
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>      at
>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>>
>>>>>      at
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>
>>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>
>>>>>      at
>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>      at 
>>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>      at
>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>
>>>>>      at 
>>>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>      at
>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) 
>>>>>
>>>>>      at
>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>      at 
>>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>      at
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>
>>>>>      at
>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>> On Tue, Feb 24, 2015 at 1:08 PM, Dmitriy Lyubimov <dl...@gmail.com>
>>>>> wrote:
>>>>>
>>>>>> seems like different builds on client and backend.
>>>>>>
>>>>>> shell is using your local spark setup (pointed to with SPARK_HOME).
>>> make
>>>>>> sure it points to identical binaries (not just spark version) to 
>>>>>> what
>>> is
>>>>>> used in the backend.
>>>>>>
>>>>>> the reason is spark is not binary-canonical w.r.t. to release 
>>>>>> version,
>>>> it
>>>>>> implies custom builds, perhaps jvm bytecode levels, hadoop
>>> dependencies
>>>>>> etc. etc.
>>>>>>
>>>>>> On Tue, Feb 24, 2015 at 12:59 PM, Andrew Musselman <
>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>
>>>>>>> Except after some time it blew up:
>>>>>>>
>>>>>>> $ bin/mahout spark-shell
>>>>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>>>>> SLF4J: Found binding in
>>>>>>>
>>>>>>>
>>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
>>>
>>>>>>> SLF4J: Found binding in
>>>>>>>
>>>>>>>
>>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
>>>
>>>>>>> SLF4J: Found binding in
>>>>>>>
>>>>>>>
>>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
>>>
>>>>>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>>>>> explanation.
>>>>>>> SLF4J: Actual binding is of type 
>>>>>>> [org.slf4j.impl.Log4jLoggerFactory]
>>>>>>>
>>>>>>>                           _                 _
>>>>>>>           _ __ ___   __ _| |__   ___  _   _| |_
>>>>>>>          | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>>>>          | | | | | | (_| | | | | (_) | |_| | |_
>>>>>>>          |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__| version 1.0
>>>>>>>
>>>>>>>
>>>>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>>>>> 1.7.0_03)
>>>>>>> Type in expressions to have them evaluated.
>>>>>>> Type :help for more information.
>>>>>>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a
>>>>>> loopback
>>>>>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>>>>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to 
>>>>>>> bind
>>>> to
>>>>>>> another address
>>>>>>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load
>>> native-hadoop
>>>>>>> library for your platform... using builtin-java classes where
>>>> applicable
>>>>>>> Created spark context..
>>>>>>> Mahout distributed context is available as "implicit val sdc".
>>>>>>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>>>>>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>>>>>> stream
>>>>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>>>>> serialVersionUID = -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:24:50 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0
>>> (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 12:26:00 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:26:33 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1
>>> (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 12:27:43 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:28:16 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2
>>> (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 12:29:26 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:29:59 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3
>>> (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 12:31:09 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:31:42 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4
>>> (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 12:32:51 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:33:24 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5
>>> (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 12:34:34 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:35:07 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6
>>> (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 12:36:17 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:36:50 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7
>>> (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 12:38:00 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:38:33 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8
>>> (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 12:39:43 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:40:16 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>> java.io.InvalidClassException:
>>>> org.apache.spark.storage.BlockManagerId;
>>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>>> 1677335532749418220, local class serialVersionUID =
>>>> -7366074099953117729
>>>>>>>      at
>>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964) 
>>>>
>>>>>>>      at
>>>>>>>
>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>>      at
>>>>>>>
>>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) 
>>>>
>>>>>>>      at
>>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>>      at
>>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>>      at
>>>>>>>
>>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) 
>>>>
>>>>>>>      at
>>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>>      at
>>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) 
>>>
>>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>>      at
>>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>>      at
>>>>>>>
>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) 
>>>
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>>      at
>>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) 
>>>
>>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) 
>>>
>>>>>>>      at
>>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
>>>
>>>>>>>      at
>>>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
>>>>
>>>>>>>      at
>>>>>>>
>>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
>>>
>>>>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9
>>> (already
>>>>>>> removed): remote Akka client disassociated
>>>>>>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application 
>>>>>>> has
>>>>>> been
>>>>>>> killed. Reason: Master removed our application: FAILED
>>>>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error 
>>>>>>> from
>>>>>>> cluster scheduler: Master removed our application: FAILED
>>>>>>>
>>>>>>>
>>>>>>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>
>>>>>>>> Bingo, works off master now; thanks Dmitriy.
>>>>>>>>
>>>>>>>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <
>>>> dlieu.7@gmail.com
>>>>>>>> wrote:
>>>>>>>>
>>>>>>>>> ok spark 1.2 is mirrored now.
>>>>>>>>> and master should be also ok (back to 1.1)
>>>>>>>>>
>>>>>>>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>>>
>>>>>>>>>> I reset hard to the previous commit, created a branch, and
>>> pushed
>>>>>> it.
>>>>>>>>>> I got email confirmation like so:
>>>>>>>>>> Repository: mahout
>>>>>>>>>> Updated Branches:
>>>>>>>>>>    refs/heads/spark-1.2 [created] 901ef03b4
>>>>>>>>>>
>>>>>>>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <
>>>>>> dlieu.7@gmail.com
>>>>>>>>>> wrote:
>>>>>>>>>>
>>>>>>>>>>> yeah ok so you pushed 1.2 branch to asf but it is not yet in
>>>>>> github.
>>>>>>>>> iti
>>>>>>>>>>> should be there eventually, give it a bit of time.
>>>>>>>>>>>
>>>>>>>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <
>>>>>>> dlieu.7@gmail.com
>>>>>>>>>>> wrote:
>>>>>>>>>>>
>>>>>>>>>>>> what exactly did you try to do?
>>>>>>>>>>>>
>>>>>>>>>>>> just resetting HEAD will not work on remote branch -- you
>>>> need
>>>>>>>>>> force-sync
>>>>>>>>>>>> that (with +) since it is a history-rewriting push, but asf
>>>> git
>>>>>>> does
>>>>>>>>>> not
>>>>>>>>>>>> allow that.
>>>>>>>>>>>>
>>>>>>>>>>>> ASF will mirror ALL branches afaik. I think i've done it
>>>>>> before.
>>>>>>> so
>>>>>>>>> if
>>>>>>>>>>> you
>>>>>>>>>>>> create a new one it should (eventually) get there.
>>>>>>>>>>>>
>>>>>>>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>>>>>>
>>>>>>>>>>>>> Does ASF git get mirrored to GitHub?  I tried pushing a
>>>> branch
>>>>>>> and
>>>>>>>>>> don't
>>>>>>>>>>>>> see it there yet.
>>>>>>>>>>>>>
>>>>>>>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>>>>>>>> dlieu.7@gmail.com
>>>>>>>>>>>>> wrote:
>>>>>>>>>>>>>
>>>>>>>>>>>>>> On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>>>>>>>> pat@occamsmachete.com
>>>>>>>>>>>>>> wrote:
>>>>>>>>>>>>>>
>>>>>>>>>>>>>>> to be safe I’d “git reset —hard xyz” to the commit
>>>>>> previous
>>>>>>> to
>>>>>>>>> the
>>>>>>>>>>>>> 1.2.1
>>>>>>>>>>>>>> As i just explained, that resets are not possible with
>>> ASF
>>>>>> git.
>>>>>>>>>>>>> Reverting
>>>>>>>>>>>>>> is the only option.
>>>>>>>>>>>>>>
>>>>>>>>>>>>>> -d
>>>>>>>>>>>>>>
>>>>>>>>>>>>
>>>>>>>>
>>>>>
>


Re: Spark shell broken

Posted by Andrew Musselman <an...@gmail.com>.
I'm re-trying with Spark v1.1.0 since that is known to work.

On Tue, Feb 24, 2015 at 2:25 PM, Andrew Palumbo <ap...@outlook.com> wrote:

> sorry- trying to get the shell fix out but having trouble pulling the
> remote spark 1.2 branch. i think its cause i have git.apache.org as my
> remote rather than github.com/apache/mahout
>
>
> On 02/24/2015 05:15 PM, Andrew Musselman wrote:
>
>> Makes sense; I'm still getting those errors after restarting my rebuilt
>> spark..
>>
>> On Tue, Feb 24, 2015 at 2:12 PM, Dmitriy Lyubimov <dl...@gmail.com>
>> wrote:
>>
>>  IIRC MAHOUT_LOCAL doesn't mean a thing with spark mode. It is purely MR
>>> thing.
>>>
>>> with shell, important things are MASTER (spark setting) and also other
>>> spark specific settings  that should be added in MAHOUT_OPT (but the
>>> latter
>>> is broken in public branch with spark shell -- one of the bugs that need
>>> fixing).
>>>
>>> On Tue, Feb 24, 2015 at 2:03 PM, Andrew Musselman <
>>> andrew.musselman@gmail.com> wrote:
>>>
>>>  Ah, I had an old build of spark server running.
>>>>
>>>> On Tue, Feb 24, 2015 at 1:56 PM, Andrew Musselman <
>>>> andrew.musselman@gmail.com> wrote:
>>>>
>>>>  I only have one spark build that I know of, and it's still reporting
>>>>> errors in Mahout local mode.
>>>>>
>>>>> $ echo $SPARK_HOME
>>>>> /home/akm/spark
>>>>>
>>>>> $ export MAHOUT_LOCAL="TRUE"
>>>>> $ bin/mahout spark-shell
>>>>> MAHOUT_LOCAL is set, so we don't add HADOOP_CONF_DIR to classpath.
>>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>>> SLF4J: Found binding in
>>>>>
>>>>>  [jar:file:/home/akm/mahout/mrlegacy/target/mahout-
>>> mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>
>>>> SLF4J: Found binding in
>>>>>
>>>>>  [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.
>>> 10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>
>>>> SLF4J: Found binding in
>>>>>
>>>>>  [jar:file:/home/akm/spark/assembly/target/scala-2.10/
>>> spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/
>>> StaticLoggerBinder.class]
>>>
>>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>>> explanation.
>>>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>>>
>>>>>                           _                 _
>>>>>           _ __ ___   __ _| |__   ___  _   _| |_
>>>>>          | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>>          | | | | | | (_| | | | | (_) | |_| | |_
>>>>>          |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>>>
>>>>>
>>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>>> 1.7.0_03)
>>>>> Type in expressions to have them evaluated.
>>>>> Type :help for more information.
>>>>> 15/02/24 13:42:42 WARN Utils: Your hostname, ubuntu resolves to a
>>>>>
>>>> loopback
>>>>
>>>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>>> 15/02/24 13:42:42 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>>>>> another address
>>>>> 15/02/24 13:42:54 WARN NativeCodeLoader: Unable to load native-hadoop
>>>>> library for your platform... using builtin-java classes where
>>>>>
>>>> applicable
>>>
>>>> Created spark context..
>>>>> Mahout distributed context is available as "implicit val sdc".
>>>>> mahout> 15/02/24 13:43:32 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>>>>>
>>>> stream
>>>
>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>>> serialVersionUID = -7366074099953117729
>>>>> java.io.InvalidClassException: org.apache.spark.storage.
>>>>> BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1750)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.defaultReadFields(
>>> ObjectInputStream.java:1964)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1771)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>      at
>>>>>
>>>>>  akka.serialization.JavaSerializer$$anonfun$1.
>>> apply(Serializer.scala:136)
>>>
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>>>>
>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>
>>>>      at
>>>>>
>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>> Serialization.scala:104)
>>>
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>>>
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>      at
>>>>>
>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>> scala:55)
>>>
>>>>      at
>>>>>
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>
>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>> Endpoint.scala:764)
>>>
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>> AbstractDispatcher.scala:386)
>>>
>>>>      at
>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>
>>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>> ForkJoinPool.java:1979)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>> ForkJoinWorkerThread.java:107)
>>>
>>>> 15/02/24 13:44:05 ERROR Remoting:
>>>>>
>>>> org.apache.spark.storage.BlockManagerId;
>>>>
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1750)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.defaultReadFields(
>>> ObjectInputStream.java:1964)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1771)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>      at
>>>>>
>>>>>  akka.serialization.JavaSerializer$$anonfun$1.
>>> apply(Serializer.scala:136)
>>>
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>>>>
>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>
>>>>      at
>>>>>
>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>> Serialization.scala:104)
>>>
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>>>
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>      at
>>>>>
>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>> scala:55)
>>>
>>>>      at
>>>>>
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>
>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>> Endpoint.scala:764)
>>>
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>> AbstractDispatcher.scala:386)
>>>
>>>>      at
>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>
>>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>> ForkJoinPool.java:1979)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>> ForkJoinWorkerThread.java:107)
>>>
>>>> 15/02/24 13:44:38 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 13:45:15 ERROR Remoting:
>>>>>
>>>> org.apache.spark.storage.BlockManagerId;
>>>>
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1750)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.defaultReadFields(
>>> ObjectInputStream.java:1964)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1771)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>      at
>>>>>
>>>>>  akka.serialization.JavaSerializer$$anonfun$1.
>>> apply(Serializer.scala:136)
>>>
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>>>>
>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>
>>>>      at
>>>>>
>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>> Serialization.scala:104)
>>>
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>>>
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>      at
>>>>>
>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>> scala:55)
>>>
>>>>      at
>>>>>
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>
>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>> Endpoint.scala:764)
>>>
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>> AbstractDispatcher.scala:386)
>>>
>>>>      at
>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>
>>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>> ForkJoinPool.java:1979)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>> ForkJoinWorkerThread.java:107)
>>>
>>>> 15/02/24 13:45:48 ERROR Remoting:
>>>>>
>>>> org.apache.spark.storage.BlockManagerId;
>>>>
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1750)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.defaultReadFields(
>>> ObjectInputStream.java:1964)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1771)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>      at
>>>>>
>>>>>  akka.serialization.JavaSerializer$$anonfun$1.
>>> apply(Serializer.scala:136)
>>>
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>>>>
>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>
>>>>      at
>>>>>
>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>> Serialization.scala:104)
>>>
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>>>
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>      at
>>>>>
>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>> scala:55)
>>>
>>>>      at
>>>>>
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>
>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>> Endpoint.scala:764)
>>>
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>> AbstractDispatcher.scala:386)
>>>
>>>>      at
>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>
>>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>> ForkJoinPool.java:1979)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>> ForkJoinWorkerThread.java:107)
>>>
>>>> 15/02/24 13:46:21 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 13:46:57 ERROR Remoting:
>>>>>
>>>> org.apache.spark.storage.BlockManagerId;
>>>>
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1750)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.defaultReadFields(
>>> ObjectInputStream.java:1964)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1771)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>      at
>>>>>
>>>>>  akka.serialization.JavaSerializer$$anonfun$1.
>>> apply(Serializer.scala:136)
>>>
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>>>>
>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>
>>>>      at
>>>>>
>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>> Serialization.scala:104)
>>>
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>>>
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>      at
>>>>>
>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>> scala:55)
>>>
>>>>      at
>>>>>
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>
>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>> Endpoint.scala:764)
>>>
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>> AbstractDispatcher.scala:386)
>>>
>>>>      at
>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>
>>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>> ForkJoinPool.java:1979)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>> ForkJoinWorkerThread.java:107)
>>>
>>>> 15/02/24 13:47:30 ERROR Remoting:
>>>>>
>>>> org.apache.spark.storage.BlockManagerId;
>>>>
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1750)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.defaultReadFields(
>>> ObjectInputStream.java:1964)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1771)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>      at
>>>>>
>>>>>  akka.serialization.JavaSerializer$$anonfun$1.
>>> apply(Serializer.scala:136)
>>>
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>>>>
>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>
>>>>      at
>>>>>
>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>> Serialization.scala:104)
>>>
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>>>
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>      at
>>>>>
>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>> scala:55)
>>>
>>>>      at
>>>>>
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>
>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>> Endpoint.scala:764)
>>>
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>> AbstractDispatcher.scala:386)
>>>
>>>>      at
>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>
>>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>> ForkJoinPool.java:1979)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>> ForkJoinWorkerThread.java:107)
>>>
>>>> 15/02/24 13:48:04 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 13:48:40 ERROR Remoting:
>>>>>
>>>> org.apache.spark.storage.BlockManagerId;
>>>>
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1750)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.defaultReadFields(
>>> ObjectInputStream.java:1964)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1771)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>      at
>>>>>
>>>>>  akka.serialization.JavaSerializer$$anonfun$1.
>>> apply(Serializer.scala:136)
>>>
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>>>>
>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>
>>>>      at
>>>>>
>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>> Serialization.scala:104)
>>>
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>>>
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>      at
>>>>>
>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>> scala:55)
>>>
>>>>      at
>>>>>
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>
>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>> Endpoint.scala:764)
>>>
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>> AbstractDispatcher.scala:386)
>>>
>>>>      at
>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>
>>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>> ForkJoinPool.java:1979)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>> ForkJoinWorkerThread.java:107)
>>>
>>>> 15/02/24 13:49:14 ERROR Remoting:
>>>>>
>>>> org.apache.spark.storage.BlockManagerId;
>>>>
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1750)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.defaultReadFields(
>>> ObjectInputStream.java:1964)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1771)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>      at
>>>>>
>>>>>  akka.serialization.JavaSerializer$$anonfun$1.
>>> apply(Serializer.scala:136)
>>>
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>>>>
>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>
>>>>      at
>>>>>
>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>> Serialization.scala:104)
>>>
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>>>
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>      at
>>>>>
>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>> scala:55)
>>>
>>>>      at
>>>>>
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>
>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>> Endpoint.scala:764)
>>>
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>> AbstractDispatcher.scala:386)
>>>
>>>>      at
>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>
>>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>> ForkJoinPool.java:1979)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>> ForkJoinWorkerThread.java:107)
>>>
>>>> 15/02/24 13:49:47 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 13:50:23 ERROR Remoting:
>>>>>
>>>> org.apache.spark.storage.BlockManagerId;
>>>>
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1750)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.defaultReadFields(
>>> ObjectInputStream.java:1964)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1771)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>      at
>>>>>
>>>>>  akka.serialization.JavaSerializer$$anonfun$1.
>>> apply(Serializer.scala:136)
>>>
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>>>>
>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>
>>>>      at
>>>>>
>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>> Serialization.scala:104)
>>>
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>>>
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>      at
>>>>>
>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>> scala:55)
>>>
>>>>      at
>>>>>
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>
>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>> Endpoint.scala:764)
>>>
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>> AbstractDispatcher.scala:386)
>>>
>>>>      at
>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>
>>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>> ForkJoinPool.java:1979)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>> ForkJoinWorkerThread.java:107)
>>>
>>>> 15/02/24 13:50:56 ERROR Remoting:
>>>>>
>>>> org.apache.spark.storage.BlockManagerId;
>>>>
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1750)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.defaultReadFields(
>>> ObjectInputStream.java:1964)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1771)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>      at
>>>>>
>>>>>  akka.serialization.JavaSerializer$$anonfun$1.
>>> apply(Serializer.scala:136)
>>>
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>>>>
>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>
>>>>      at
>>>>>
>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>> Serialization.scala:104)
>>>
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>>>
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>      at
>>>>>
>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>> scala:55)
>>>
>>>>      at
>>>>>
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>
>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>> Endpoint.scala:764)
>>>
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>> AbstractDispatcher.scala:386)
>>>
>>>>      at
>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>
>>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>> ForkJoinPool.java:1979)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>> ForkJoinWorkerThread.java:107)
>>>
>>>> 15/02/24 13:51:30 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>>>>> removed): remote Akka client disassociated
>>>>> 15/02/24 13:52:07 ERROR Remoting:
>>>>>
>>>> org.apache.spark.storage.BlockManagerId;
>>>>
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1750)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.defaultReadFields(
>>> ObjectInputStream.java:1964)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1771)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.
>>>>> java:369)
>>>>>      at
>>>>>
>>>>>  akka.serialization.JavaSerializer$$anonfun$1.
>>> apply(Serializer.scala:136)
>>>
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>      at
>>>>>
>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>
>>>>      at
>>>>>
>>>>>  akka.serialization.Serialization$$anonfun$deserialize$1.apply(
>>> Serialization.scala:104)
>>>
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>      at
>>>>>
>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>
>>>>>      at
>>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>      at
>>>>>
>>>>>  akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.
>>> scala:55)
>>>
>>>>      at
>>>>>
>>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>
>>>>      at akka.remote.DefaultMessageDispatcher.
>>>>> dispatch(Endpoint.scala:73)
>>>>>      at
>>>>>
>>>>>  akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(
>>> Endpoint.scala:764)
>>>
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>      at
>>>>>
>>>>>  akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
>>> AbstractDispatcher.scala:386)
>>>
>>>>      at
>>>>>
>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>
>>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.
>>> runTask(ForkJoinPool.java:1339)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinPool.runWorker(
>>> ForkJoinPool.java:1979)
>>>
>>>>      at
>>>>>
>>>>>  scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
>>> ForkJoinWorkerThread.java:107)
>>>
>>>> 15/02/24 13:52:40 ERROR Remoting:
>>>>>
>>>> org.apache.spark.storage.BlockManagerId;
>>>>
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>> 1677335532749418220, local class serialVersionUID =
>>>>>
>>>> -7366074099953117729
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readNonProxyDesc(
>>>>> ObjectInputStream.java:1601)
>>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>
>>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.readOrdinaryObject(
>>> ObjectInputStream.java:1750)
>>>
>>>>      at
>>>>>
>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>
>>>>      at
>>>>>
>>>>>  java.io.ObjectInputStream.defaultReadFields(
>>> ObjectInputStream.java:1964)
>>>
>>>>      at
>>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>
>>>>

Re: Spark shell broken

Posted by Andrew Palumbo <ap...@outlook.com>.
sorry- trying to get the shell fix out but having trouble pulling the 
remote spark 1.2 branch. i think its cause i have git.apache.org as my 
remote rather than github.com/apache/mahout


On 02/24/2015 05:15 PM, Andrew Musselman wrote:
> Makes sense; I'm still getting those errors after restarting my rebuilt
> spark..
>
> On Tue, Feb 24, 2015 at 2:12 PM, Dmitriy Lyubimov <dl...@gmail.com> wrote:
>
>> IIRC MAHOUT_LOCAL doesn't mean a thing with spark mode. It is purely MR
>> thing.
>>
>> with shell, important things are MASTER (spark setting) and also other
>> spark specific settings  that should be added in MAHOUT_OPT (but the latter
>> is broken in public branch with spark shell -- one of the bugs that need
>> fixing).
>>
>> On Tue, Feb 24, 2015 at 2:03 PM, Andrew Musselman <
>> andrew.musselman@gmail.com> wrote:
>>
>>> Ah, I had an old build of spark server running.
>>>
>>> On Tue, Feb 24, 2015 at 1:56 PM, Andrew Musselman <
>>> andrew.musselman@gmail.com> wrote:
>>>
>>>> I only have one spark build that I know of, and it's still reporting
>>>> errors in Mahout local mode.
>>>>
>>>> $ echo $SPARK_HOME
>>>> /home/akm/spark
>>>>
>>>> $ export MAHOUT_LOCAL="TRUE"
>>>> $ bin/mahout spark-shell
>>>> MAHOUT_LOCAL is set, so we don't add HADOOP_CONF_DIR to classpath.
>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>> SLF4J: Found binding in
>>>>
>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>> SLF4J: Found binding in
>>>>
>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>> SLF4J: Found binding in
>>>>
>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>> explanation.
>>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>>
>>>>                           _                 _
>>>>           _ __ ___   __ _| |__   ___  _   _| |_
>>>>          | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>          | | | | | | (_| | | | | (_) | |_| | |_
>>>>          |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>>
>>>>
>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>> 1.7.0_03)
>>>> Type in expressions to have them evaluated.
>>>> Type :help for more information.
>>>> 15/02/24 13:42:42 WARN Utils: Your hostname, ubuntu resolves to a
>>> loopback
>>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>> 15/02/24 13:42:42 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>>>> another address
>>>> 15/02/24 13:42:54 WARN NativeCodeLoader: Unable to load native-hadoop
>>>> library for your platform... using builtin-java classes where
>> applicable
>>>> Created spark context..
>>>> Mahout distributed context is available as "implicit val sdc".
>>>> mahout> 15/02/24 13:43:32 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>> stream
>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>> serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>>      at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at
>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>>
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at
>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>>
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at
>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at
>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 13:44:05 ERROR Remoting:
>>> org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>>      at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at
>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>>
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at
>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>>
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at
>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at
>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 13:44:38 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 13:45:15 ERROR Remoting:
>>> org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>>      at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at
>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>>
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at
>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>>
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at
>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at
>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 13:45:48 ERROR Remoting:
>>> org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>>      at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at
>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>>
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at
>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>>
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at
>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at
>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 13:46:21 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 13:46:57 ERROR Remoting:
>>> org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>>      at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at
>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>>
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at
>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>>
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at
>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at
>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 13:47:30 ERROR Remoting:
>>> org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>>      at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at
>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>>
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at
>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>>
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at
>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at
>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 13:48:04 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 13:48:40 ERROR Remoting:
>>> org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>>      at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at
>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>>
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at
>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>>
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at
>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at
>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 13:49:14 ERROR Remoting:
>>> org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>>      at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at
>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>>
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at
>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>>
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at
>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at
>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 13:49:47 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 13:50:23 ERROR Remoting:
>>> org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>>      at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at
>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>>
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at
>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>>
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at
>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at
>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 13:50:56 ERROR Remoting:
>>> org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>>      at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at
>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>>
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at
>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>>
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at
>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at
>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 13:51:30 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>>>> removed): remote Akka client disassociated
>>>> 15/02/24 13:52:07 ERROR Remoting:
>>> org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>>      at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at
>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>>
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at
>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>>
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at
>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at
>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 13:52:40 ERROR Remoting:
>>> org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>>      at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at
>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>>
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at
>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>>
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at
>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at
>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> 15/02/24 13:53:13 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>>>> removed): remote Akka client disassociated
>>>>
>>>> mahout>
>>>> mahout>
>>>> mahout> 15/02/24 13:53:49 ERROR Remoting:
>>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>> stream
>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>> serialVersionUID = -7366074099953117729
>>>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>>>> local class incompatible: stream classdesc serialVersionUID =
>>>> 1677335532749418220, local class serialVersionUID =
>> -7366074099953117729
>>>>      at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>      at
>>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>      at
>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at
>>>>
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>      at
>>>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>      at
>>>>
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>      at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>      at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>      at
>>>>
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>      at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>      at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>      at
>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>      at
>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>      at
>>>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>      at
>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>      at
>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>      at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>      at
>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>      at
>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>      at
>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>      at
>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>> On Tue, Feb 24, 2015 at 1:08 PM, Dmitriy Lyubimov <dl...@gmail.com>
>>>> wrote:
>>>>
>>>>> seems like different builds on client and backend.
>>>>>
>>>>> shell is using your local spark setup (pointed to with SPARK_HOME).
>> make
>>>>> sure it points to identical binaries (not just spark version) to what
>> is
>>>>> used in the backend.
>>>>>
>>>>> the reason is spark is not binary-canonical w.r.t. to release version,
>>> it
>>>>> implies custom builds, perhaps jvm bytecode levels, hadoop
>> dependencies
>>>>> etc. etc.
>>>>>
>>>>> On Tue, Feb 24, 2015 at 12:59 PM, Andrew Musselman <
>>>>> andrew.musselman@gmail.com> wrote:
>>>>>
>>>>>> Except after some time it blew up:
>>>>>>
>>>>>> $ bin/mahout spark-shell
>>>>>> SLF4J: Class path contains multiple SLF4J bindings.
>>>>>> SLF4J: Found binding in
>>>>>>
>>>>>>
>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>>> SLF4J: Found binding in
>>>>>>
>>>>>>
>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>>> SLF4J: Found binding in
>>>>>>
>>>>>>
>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>>>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>>>>> explanation.
>>>>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>>>>>
>>>>>>                           _                 _
>>>>>>           _ __ ___   __ _| |__   ___  _   _| |_
>>>>>>          | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>>>>>          | | | | | | (_| | | | | (_) | |_| | |_
>>>>>>          |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>>>>>
>>>>>>
>>>>>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>>>>>> 1.7.0_03)
>>>>>> Type in expressions to have them evaluated.
>>>>>> Type :help for more information.
>>>>>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a
>>>>> loopback
>>>>>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>>>>>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind
>>> to
>>>>>> another address
>>>>>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load
>> native-hadoop
>>>>>> library for your platform... using builtin-java classes where
>>> applicable
>>>>>> Created spark context..
>>>>>> Mahout distributed context is available as "implicit val sdc".
>>>>>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>>>>>> org.apache.spark.storage.BlockManagerId; local class incompatible:
>>>>> stream
>>>>>> classdesc serialVersionUID = 1677335532749418220, local class
>>>>>> serialVersionUID = -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:24:50 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0
>> (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 12:26:00 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:26:33 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1
>> (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 12:27:43 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:28:16 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2
>> (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 12:29:26 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:29:59 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3
>> (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 12:31:09 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:31:42 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4
>> (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 12:32:51 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:33:24 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5
>> (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 12:34:34 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:35:07 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6
>> (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 12:36:17 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:36:50 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7
>> (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 12:38:00 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:38:33 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8
>> (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 12:39:43 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:40:16 ERROR Remoting:
>>>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>> java.io.InvalidClassException:
>>> org.apache.spark.storage.BlockManagerId;
>>>>>> local class incompatible: stream classdesc serialVersionUID =
>>>>>> 1677335532749418220, local class serialVersionUID =
>>> -7366074099953117729
>>>>>>      at
>>>>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>>>>>      at
>>>>>>
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>>>>>      at
>>>>>>
>>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>>>>>      at
>>>>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>>>>>      at
>>> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>>>>>      at
>>>>>>
>>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>>>>>      at
>> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>>>>>      at
>>>>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>>>>>      at scala.util.Try$.apply(Try.scala:161)
>>>>>>      at
>>>>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>>>>>      at
>>>>>>
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>>>>>      at
>>> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>>>>>      at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>>>>>      at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>>>>>      at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>>>>>      at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>>>>>      at
>>>>>>
>>>>>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>>>>>      at
>>>>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>>>>      at
>>>>>>
>>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>>>>      at
>>>>>>
>>>>>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9
>> (already
>>>>>> removed): remote Akka client disassociated
>>>>>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has
>>>>> been
>>>>>> killed. Reason: Master removed our application: FAILED
>>>>>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>>>>>> cluster scheduler: Master removed our application: FAILED
>>>>>>
>>>>>>
>>>>>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>
>>>>>>> Bingo, works off master now; thanks Dmitriy.
>>>>>>>
>>>>>>> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <
>>> dlieu.7@gmail.com
>>>>>>> wrote:
>>>>>>>
>>>>>>>> ok spark 1.2 is mirrored now.
>>>>>>>> and master should be also ok (back to 1.1)
>>>>>>>>
>>>>>>>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>>
>>>>>>>>> I reset hard to the previous commit, created a branch, and
>> pushed
>>>>> it.
>>>>>>>>> I got email confirmation like so:
>>>>>>>>> Repository: mahout
>>>>>>>>> Updated Branches:
>>>>>>>>>    refs/heads/spark-1.2 [created] 901ef03b4
>>>>>>>>>
>>>>>>>>> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <
>>>>> dlieu.7@gmail.com
>>>>>>>>> wrote:
>>>>>>>>>
>>>>>>>>>> yeah ok so you pushed 1.2 branch to asf but it is not yet in
>>>>> github.
>>>>>>>> iti
>>>>>>>>>> should be there eventually, give it a bit of time.
>>>>>>>>>>
>>>>>>>>>> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <
>>>>>> dlieu.7@gmail.com
>>>>>>>>>> wrote:
>>>>>>>>>>
>>>>>>>>>>> what exactly did you try to do?
>>>>>>>>>>>
>>>>>>>>>>> just resetting HEAD will not work on remote branch -- you
>>> need
>>>>>>>>> force-sync
>>>>>>>>>>> that (with +) since it is a history-rewriting push, but asf
>>> git
>>>>>> does
>>>>>>>>> not
>>>>>>>>>>> allow that.
>>>>>>>>>>>
>>>>>>>>>>> ASF will mirror ALL branches afaik. I think i've done it
>>>>> before.
>>>>>> so
>>>>>>>> if
>>>>>>>>>> you
>>>>>>>>>>> create a new one it should (eventually) get there.
>>>>>>>>>>>
>>>>>>>>>>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>>>>>>>>>>> andrew.musselman@gmail.com> wrote:
>>>>>>>>>>>
>>>>>>>>>>>> Does ASF git get mirrored to GitHub?  I tried pushing a
>>> branch
>>>>>> and
>>>>>>>>> don't
>>>>>>>>>>>> see it there yet.
>>>>>>>>>>>>
>>>>>>>>>>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>>>>>>>> dlieu.7@gmail.com
>>>>>>>>>>>> wrote:
>>>>>>>>>>>>
>>>>>>>>>>>>> On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>>>>>>>> pat@occamsmachete.com
>>>>>>>>>>>>> wrote:
>>>>>>>>>>>>>
>>>>>>>>>>>>>> to be safe I’d “git reset —hard xyz” to the commit
>>>>> previous
>>>>>> to
>>>>>>>> the
>>>>>>>>>>>> 1.2.1
>>>>>>>>>>>>> As i just explained, that resets are not possible with
>> ASF
>>>>> git.
>>>>>>>>>>>> Reverting
>>>>>>>>>>>>> is the only option.
>>>>>>>>>>>>>
>>>>>>>>>>>>> -d
>>>>>>>>>>>>>
>>>>>>>>>>>
>>>>>>>
>>>>


Re: Spark shell broken

Posted by Andrew Musselman <an...@gmail.com>.
Makes sense; I'm still getting those errors after restarting my rebuilt
spark..

On Tue, Feb 24, 2015 at 2:12 PM, Dmitriy Lyubimov <dl...@gmail.com> wrote:

> IIRC MAHOUT_LOCAL doesn't mean a thing with spark mode. It is purely MR
> thing.
>
> with shell, important things are MASTER (spark setting) and also other
> spark specific settings  that should be added in MAHOUT_OPT (but the latter
> is broken in public branch with spark shell -- one of the bugs that need
> fixing).
>
> On Tue, Feb 24, 2015 at 2:03 PM, Andrew Musselman <
> andrew.musselman@gmail.com> wrote:
>
> > Ah, I had an old build of spark server running.
> >
> > On Tue, Feb 24, 2015 at 1:56 PM, Andrew Musselman <
> > andrew.musselman@gmail.com> wrote:
> >
> > > I only have one spark build that I know of, and it's still reporting
> > > errors in Mahout local mode.
> > >
> > > $ echo $SPARK_HOME
> > > /home/akm/spark
> > >
> > > $ export MAHOUT_LOCAL="TRUE"
> > > $ bin/mahout spark-shell
> > > MAHOUT_LOCAL is set, so we don't add HADOOP_CONF_DIR to classpath.
> > > SLF4J: Class path contains multiple SLF4J bindings.
> > > SLF4J: Found binding in
> > >
> >
> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > > SLF4J: Found binding in
> > >
> >
> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > > SLF4J: Found binding in
> > >
> >
> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > > SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> > > explanation.
> > > SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
> > >
> > >                          _                 _
> > >          _ __ ___   __ _| |__   ___  _   _| |_
> > >         | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
> > >         | | | | | | (_| | | | | (_) | |_| | |_
> > >         |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
> > >
> > >
> > > Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
> > > 1.7.0_03)
> > > Type in expressions to have them evaluated.
> > > Type :help for more information.
> > > 15/02/24 13:42:42 WARN Utils: Your hostname, ubuntu resolves to a
> > loopback
> > > address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
> > > 15/02/24 13:42:42 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
> > > another address
> > > 15/02/24 13:42:54 WARN NativeCodeLoader: Unable to load native-hadoop
> > > library for your platform... using builtin-java classes where
> applicable
> > > Created spark context..
> > > Mahout distributed context is available as "implicit val sdc".
> > > mahout> 15/02/24 13:43:32 ERROR Remoting:
> > > org.apache.spark.storage.BlockManagerId; local class incompatible:
> stream
> > > classdesc serialVersionUID = 1677335532749418220, local class
> > > serialVersionUID = -7366074099953117729
> > > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > >     at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >     at
> > > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >     at
> > java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at
> > >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >     at
> > > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >     at
> > >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >     at
> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >     at
> > >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >     at scala.util.Try$.apply(Try.scala:161)
> > >     at
> > akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >     at
> > > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >     at
> > >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >     at
> > >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >     at
> > >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >     at
> > scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >     at
> > >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > > 15/02/24 13:44:05 ERROR Remoting:
> > org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > >     at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >     at
> > > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >     at
> > java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at
> > >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >     at
> > > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >     at
> > >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >     at
> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >     at
> > >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >     at scala.util.Try$.apply(Try.scala:161)
> > >     at
> > akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >     at
> > > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >     at
> > >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >     at
> > >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >     at
> > >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >     at
> > scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >     at
> > >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > > 15/02/24 13:44:38 ERROR TaskSchedulerImpl: Lost an executor 0 (already
> > > removed): remote Akka client disassociated
> > > 15/02/24 13:45:15 ERROR Remoting:
> > org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > >     at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >     at
> > > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >     at
> > java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at
> > >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >     at
> > > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >     at
> > >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >     at
> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >     at
> > >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >     at scala.util.Try$.apply(Try.scala:161)
> > >     at
> > akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >     at
> > > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >     at
> > >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >     at
> > >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >     at
> > >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >     at
> > scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >     at
> > >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > > 15/02/24 13:45:48 ERROR Remoting:
> > org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > >     at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >     at
> > > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >     at
> > java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at
> > >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >     at
> > > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >     at
> > >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >     at
> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >     at
> > >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >     at scala.util.Try$.apply(Try.scala:161)
> > >     at
> > akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >     at
> > > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >     at
> > >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >     at
> > >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >     at
> > >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >     at
> > scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >     at
> > >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > > 15/02/24 13:46:21 ERROR TaskSchedulerImpl: Lost an executor 1 (already
> > > removed): remote Akka client disassociated
> > > 15/02/24 13:46:57 ERROR Remoting:
> > org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > >     at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >     at
> > > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >     at
> > java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at
> > >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >     at
> > > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >     at
> > >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >     at
> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >     at
> > >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >     at scala.util.Try$.apply(Try.scala:161)
> > >     at
> > akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >     at
> > > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >     at
> > >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >     at
> > >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >     at
> > >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >     at
> > scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >     at
> > >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > > 15/02/24 13:47:30 ERROR Remoting:
> > org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > >     at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >     at
> > > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >     at
> > java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at
> > >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >     at
> > > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >     at
> > >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >     at
> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >     at
> > >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >     at scala.util.Try$.apply(Try.scala:161)
> > >     at
> > akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >     at
> > > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >     at
> > >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >     at
> > >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >     at
> > >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >     at
> > scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >     at
> > >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > > 15/02/24 13:48:04 ERROR TaskSchedulerImpl: Lost an executor 2 (already
> > > removed): remote Akka client disassociated
> > > 15/02/24 13:48:40 ERROR Remoting:
> > org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > >     at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >     at
> > > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >     at
> > java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at
> > >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >     at
> > > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >     at
> > >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >     at
> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >     at
> > >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >     at scala.util.Try$.apply(Try.scala:161)
> > >     at
> > akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >     at
> > > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >     at
> > >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >     at
> > >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >     at
> > >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >     at
> > scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >     at
> > >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > > 15/02/24 13:49:14 ERROR Remoting:
> > org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > >     at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >     at
> > > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >     at
> > java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at
> > >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >     at
> > > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >     at
> > >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >     at
> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >     at
> > >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >     at scala.util.Try$.apply(Try.scala:161)
> > >     at
> > akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >     at
> > > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >     at
> > >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >     at
> > >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >     at
> > >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >     at
> > scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >     at
> > >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > > 15/02/24 13:49:47 ERROR TaskSchedulerImpl: Lost an executor 3 (already
> > > removed): remote Akka client disassociated
> > > 15/02/24 13:50:23 ERROR Remoting:
> > org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > >     at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >     at
> > > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >     at
> > java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at
> > >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >     at
> > > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >     at
> > >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >     at
> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >     at
> > >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >     at scala.util.Try$.apply(Try.scala:161)
> > >     at
> > akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >     at
> > > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >     at
> > >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >     at
> > >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >     at
> > >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >     at
> > scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >     at
> > >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > > 15/02/24 13:50:56 ERROR Remoting:
> > org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > >     at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >     at
> > > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >     at
> > java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at
> > >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >     at
> > > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >     at
> > >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >     at
> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >     at
> > >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >     at scala.util.Try$.apply(Try.scala:161)
> > >     at
> > akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >     at
> > > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >     at
> > >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >     at
> > >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >     at
> > >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >     at
> > scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >     at
> > >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > > 15/02/24 13:51:30 ERROR TaskSchedulerImpl: Lost an executor 4 (already
> > > removed): remote Akka client disassociated
> > > 15/02/24 13:52:07 ERROR Remoting:
> > org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > >     at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >     at
> > > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >     at
> > java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at
> > >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >     at
> > > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >     at
> > >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >     at
> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >     at
> > >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >     at scala.util.Try$.apply(Try.scala:161)
> > >     at
> > akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >     at
> > > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >     at
> > >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >     at
> > >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >     at
> > >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >     at
> > scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >     at
> > >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > > 15/02/24 13:52:40 ERROR Remoting:
> > org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > >     at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >     at
> > > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >     at
> > java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at
> > >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >     at
> > > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >     at
> > >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >     at
> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >     at
> > >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >     at scala.util.Try$.apply(Try.scala:161)
> > >     at
> > akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >     at
> > > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >     at
> > >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >     at
> > >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >     at
> > >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >     at
> > scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >     at
> > >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > > 15/02/24 13:53:13 ERROR TaskSchedulerImpl: Lost an executor 5 (already
> > > removed): remote Akka client disassociated
> > >
> > > mahout>
> > > mahout>
> > > mahout> 15/02/24 13:53:49 ERROR Remoting:
> > > org.apache.spark.storage.BlockManagerId; local class incompatible:
> stream
> > > classdesc serialVersionUID = 1677335532749418220, local class
> > > serialVersionUID = -7366074099953117729
> > > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > > local class incompatible: stream classdesc serialVersionUID =
> > > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> > >     at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >     at
> > > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >     at
> > java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at
> > >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >     at
> > > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >     at
> > >
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >     at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >     at
> > >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >     at
> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >     at
> > >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >     at scala.util.Try$.apply(Try.scala:161)
> > >     at
> > akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >     at
> > > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >     at
> > >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >     at
> > >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >     at
> > >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >     at
> > scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >     at
> > >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >     at
> > >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >
> > > On Tue, Feb 24, 2015 at 1:08 PM, Dmitriy Lyubimov <dl...@gmail.com>
> > > wrote:
> > >
> > >> seems like different builds on client and backend.
> > >>
> > >> shell is using your local spark setup (pointed to with SPARK_HOME).
> make
> > >> sure it points to identical binaries (not just spark version) to what
> is
> > >> used in the backend.
> > >>
> > >> the reason is spark is not binary-canonical w.r.t. to release version,
> > it
> > >> implies custom builds, perhaps jvm bytecode levels, hadoop
> dependencies
> > >> etc. etc.
> > >>
> > >> On Tue, Feb 24, 2015 at 12:59 PM, Andrew Musselman <
> > >> andrew.musselman@gmail.com> wrote:
> > >>
> > >> > Except after some time it blew up:
> > >> >
> > >> > $ bin/mahout spark-shell
> > >> > SLF4J: Class path contains multiple SLF4J bindings.
> > >> > SLF4J: Found binding in
> > >> >
> > >> >
> > >>
> >
> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > >> > SLF4J: Found binding in
> > >> >
> > >> >
> > >>
> >
> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > >> > SLF4J: Found binding in
> > >> >
> > >> >
> > >>
> >
> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > >> > SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> > >> > explanation.
> > >> > SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
> > >> >
> > >> >                          _                 _
> > >> >          _ __ ___   __ _| |__   ___  _   _| |_
> > >> >         | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
> > >> >         | | | | | | (_| | | | | (_) | |_| | |_
> > >> >         |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
> > >> >
> > >> >
> > >> > Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
> > >> > 1.7.0_03)
> > >> > Type in expressions to have them evaluated.
> > >> > Type :help for more information.
> > >> > 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a
> > >> loopback
> > >> > address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
> > >> > 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind
> > to
> > >> > another address
> > >> > 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load
> native-hadoop
> > >> > library for your platform... using builtin-java classes where
> > applicable
> > >> > Created spark context..
> > >> > Mahout distributed context is available as "implicit val sdc".
> > >> > mahout> 15/02/24 12:24:17 ERROR Remoting:
> > >> > org.apache.spark.storage.BlockManagerId; local class incompatible:
> > >> stream
> > >> > classdesc serialVersionUID = 1677335532749418220, local class
> > >> > serialVersionUID = -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:24:50 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0
> (already
> > >> > removed): remote Akka client disassociated
> > >> > 15/02/24 12:26:00 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:26:33 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1
> (already
> > >> > removed): remote Akka client disassociated
> > >> > 15/02/24 12:27:43 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:28:16 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2
> (already
> > >> > removed): remote Akka client disassociated
> > >> > 15/02/24 12:29:26 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:29:59 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3
> (already
> > >> > removed): remote Akka client disassociated
> > >> > 15/02/24 12:31:09 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:31:42 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4
> (already
> > >> > removed): remote Akka client disassociated
> > >> > 15/02/24 12:32:51 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:33:24 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5
> (already
> > >> > removed): remote Akka client disassociated
> > >> > 15/02/24 12:34:34 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:35:07 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6
> (already
> > >> > removed): remote Akka client disassociated
> > >> > 15/02/24 12:36:17 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:36:50 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7
> (already
> > >> > removed): remote Akka client disassociated
> > >> > 15/02/24 12:38:00 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:38:33 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8
> (already
> > >> > removed): remote Akka client disassociated
> > >> > 15/02/24 12:39:43 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:40:16 ERROR Remoting:
> > >> org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> > java.io.InvalidClassException:
> > org.apache.spark.storage.BlockManagerId;
> > >> > local class incompatible: stream classdesc serialVersionUID =
> > >> > 1677335532749418220, local class serialVersionUID =
> > -7366074099953117729
> > >> >     at
> > >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> > >> >     at
> > >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > >> >
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> > >> >     at
> > >> >
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> > >> >     at
> > >> >
> > >>
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> > >> >     at
> > >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> > >> >     at
> > java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> > >> >     at
> > >> >
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> > >> >     at
> scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> > >> >     at
> > >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> > >> >     at scala.util.Try$.apply(Try.scala:161)
> > >> >     at
> > >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> > >> >     at
> > >> >
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> > >> >     at
> > akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> > >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> > >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> > >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> > >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> > >> >     at
> > >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> > >> >     at
> > >> >
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> > >> >     at
> > >> >
> > >> >
> > >>
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > >> > 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9
> (already
> > >> > removed): remote Akka client disassociated
> > >> > 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has
> > >> been
> > >> > killed. Reason: Master removed our application: FAILED
> > >> > 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
> > >> > cluster scheduler: Master removed our application: FAILED
> > >> >
> > >> >
> > >> > On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
> > >> > andrew.musselman@gmail.com> wrote:
> > >> >
> > >> > > Bingo, works off master now; thanks Dmitriy.
> > >> > >
> > >> > > On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <
> > dlieu.7@gmail.com
> > >> >
> > >> > > wrote:
> > >> > >
> > >> > >> ok spark 1.2 is mirrored now.
> > >> > >> and master should be also ok (back to 1.1)
> > >> > >>
> > >> > >> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
> > >> > >> andrew.musselman@gmail.com> wrote:
> > >> > >>
> > >> > >> > I reset hard to the previous commit, created a branch, and
> pushed
> > >> it.
> > >> > >> >
> > >> > >> > I got email confirmation like so:
> > >> > >> > Repository: mahout
> > >> > >> > Updated Branches:
> > >> > >> >   refs/heads/spark-1.2 [created] 901ef03b4
> > >> > >> >
> > >> > >> > On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <
> > >> dlieu.7@gmail.com
> > >> > >
> > >> > >> > wrote:
> > >> > >> >
> > >> > >> > > yeah ok so you pushed 1.2 branch to asf but it is not yet in
> > >> github.
> > >> > >> iti
> > >> > >> > > should be there eventually, give it a bit of time.
> > >> > >> > >
> > >> > >> > > On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <
> > >> > dlieu.7@gmail.com
> > >> > >> >
> > >> > >> > > wrote:
> > >> > >> > >
> > >> > >> > > > what exactly did you try to do?
> > >> > >> > > >
> > >> > >> > > > just resetting HEAD will not work on remote branch -- you
> > need
> > >> > >> > force-sync
> > >> > >> > > > that (with +) since it is a history-rewriting push, but asf
> > git
> > >> > does
> > >> > >> > not
> > >> > >> > > > allow that.
> > >> > >> > > >
> > >> > >> > > > ASF will mirror ALL branches afaik. I think i've done it
> > >> before.
> > >> > so
> > >> > >> if
> > >> > >> > > you
> > >> > >> > > > create a new one it should (eventually) get there.
> > >> > >> > > >
> > >> > >> > > > On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
> > >> > >> > > > andrew.musselman@gmail.com> wrote:
> > >> > >> > > >
> > >> > >> > > >> Does ASF git get mirrored to GitHub?  I tried pushing a
> > branch
> > >> > and
> > >> > >> > don't
> > >> > >> > > >> see it there yet.
> > >> > >> > > >>
> > >> > >> > > >> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
> > >> > >> dlieu.7@gmail.com
> > >> > >> > >
> > >> > >> > > >> wrote:
> > >> > >> > > >>
> > >> > >> > > >> > On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
> > >> > >> pat@occamsmachete.com
> > >> > >> > >
> > >> > >> > > >> > wrote:
> > >> > >> > > >> >
> > >> > >> > > >> > > to be safe I’d “git reset —hard xyz” to the commit
> > >> previous
> > >> > to
> > >> > >> the
> > >> > >> > > >> 1.2.1
> > >> > >> > > >> > >
> > >> > >> > > >> >
> > >> > >> > > >> > As i just explained, that resets are not possible with
> ASF
> > >> git.
> > >> > >> > > >> Reverting
> > >> > >> > > >> > is the only option.
> > >> > >> > > >> >
> > >> > >> > > >> > -d
> > >> > >> > > >> >
> > >> > >> > > >>
> > >> > >> > > >
> > >> > >> > > >
> > >> > >> > >
> > >> > >> >
> > >> > >>
> > >> > >
> > >> > >
> > >> >
> > >>
> > >
> > >
> >
>

Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
IIRC MAHOUT_LOCAL doesn't mean a thing with spark mode. It is purely MR
thing.

with shell, important things are MASTER (spark setting) and also other
spark specific settings  that should be added in MAHOUT_OPT (but the latter
is broken in public branch with spark shell -- one of the bugs that need
fixing).

On Tue, Feb 24, 2015 at 2:03 PM, Andrew Musselman <
andrew.musselman@gmail.com> wrote:

> Ah, I had an old build of spark server running.
>
> On Tue, Feb 24, 2015 at 1:56 PM, Andrew Musselman <
> andrew.musselman@gmail.com> wrote:
>
> > I only have one spark build that I know of, and it's still reporting
> > errors in Mahout local mode.
> >
> > $ echo $SPARK_HOME
> > /home/akm/spark
> >
> > $ export MAHOUT_LOCAL="TRUE"
> > $ bin/mahout spark-shell
> > MAHOUT_LOCAL is set, so we don't add HADOOP_CONF_DIR to classpath.
> > SLF4J: Class path contains multiple SLF4J bindings.
> > SLF4J: Found binding in
> >
> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > SLF4J: Found binding in
> >
> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > SLF4J: Found binding in
> >
> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> > explanation.
> > SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
> >
> >                          _                 _
> >          _ __ ___   __ _| |__   ___  _   _| |_
> >         | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
> >         | | | | | | (_| | | | | (_) | |_| | |_
> >         |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
> >
> >
> > Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
> > 1.7.0_03)
> > Type in expressions to have them evaluated.
> > Type :help for more information.
> > 15/02/24 13:42:42 WARN Utils: Your hostname, ubuntu resolves to a
> loopback
> > address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
> > 15/02/24 13:42:42 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
> > another address
> > 15/02/24 13:42:54 WARN NativeCodeLoader: Unable to load native-hadoop
> > library for your platform... using builtin-java classes where applicable
> > Created spark context..
> > Mahout distributed context is available as "implicit val sdc".
> > mahout> 15/02/24 13:43:32 ERROR Remoting:
> > org.apache.spark.storage.BlockManagerId; local class incompatible: stream
> > classdesc serialVersionUID = 1677335532749418220, local class
> > serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 13:44:05 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 13:44:38 ERROR TaskSchedulerImpl: Lost an executor 0 (already
> > removed): remote Akka client disassociated
> > 15/02/24 13:45:15 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 13:45:48 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 13:46:21 ERROR TaskSchedulerImpl: Lost an executor 1 (already
> > removed): remote Akka client disassociated
> > 15/02/24 13:46:57 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 13:47:30 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 13:48:04 ERROR TaskSchedulerImpl: Lost an executor 2 (already
> > removed): remote Akka client disassociated
> > 15/02/24 13:48:40 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 13:49:14 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 13:49:47 ERROR TaskSchedulerImpl: Lost an executor 3 (already
> > removed): remote Akka client disassociated
> > 15/02/24 13:50:23 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 13:50:56 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 13:51:30 ERROR TaskSchedulerImpl: Lost an executor 4 (already
> > removed): remote Akka client disassociated
> > 15/02/24 13:52:07 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 13:52:40 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 13:53:13 ERROR TaskSchedulerImpl: Lost an executor 5 (already
> > removed): remote Akka client disassociated
> >
> > mahout>
> > mahout>
> > mahout> 15/02/24 13:53:49 ERROR Remoting:
> > org.apache.spark.storage.BlockManagerId; local class incompatible: stream
> > classdesc serialVersionUID = 1677335532749418220, local class
> > serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >
> > On Tue, Feb 24, 2015 at 1:08 PM, Dmitriy Lyubimov <dl...@gmail.com>
> > wrote:
> >
> >> seems like different builds on client and backend.
> >>
> >> shell is using your local spark setup (pointed to with SPARK_HOME). make
> >> sure it points to identical binaries (not just spark version) to what is
> >> used in the backend.
> >>
> >> the reason is spark is not binary-canonical w.r.t. to release version,
> it
> >> implies custom builds, perhaps jvm bytecode levels, hadoop dependencies
> >> etc. etc.
> >>
> >> On Tue, Feb 24, 2015 at 12:59 PM, Andrew Musselman <
> >> andrew.musselman@gmail.com> wrote:
> >>
> >> > Except after some time it blew up:
> >> >
> >> > $ bin/mahout spark-shell
> >> > SLF4J: Class path contains multiple SLF4J bindings.
> >> > SLF4J: Found binding in
> >> >
> >> >
> >>
> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> >> > SLF4J: Found binding in
> >> >
> >> >
> >>
> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> >> > SLF4J: Found binding in
> >> >
> >> >
> >>
> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> >> > SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> >> > explanation.
> >> > SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
> >> >
> >> >                          _                 _
> >> >          _ __ ___   __ _| |__   ___  _   _| |_
> >> >         | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
> >> >         | | | | | | (_| | | | | (_) | |_| | |_
> >> >         |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
> >> >
> >> >
> >> > Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
> >> > 1.7.0_03)
> >> > Type in expressions to have them evaluated.
> >> > Type :help for more information.
> >> > 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a
> >> loopback
> >> > address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
> >> > 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind
> to
> >> > another address
> >> > 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
> >> > library for your platform... using builtin-java classes where
> applicable
> >> > Created spark context..
> >> > Mahout distributed context is available as "implicit val sdc".
> >> > mahout> 15/02/24 12:24:17 ERROR Remoting:
> >> > org.apache.spark.storage.BlockManagerId; local class incompatible:
> >> stream
> >> > classdesc serialVersionUID = 1677335532749418220, local class
> >> > serialVersionUID = -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:24:50 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
> >> > removed): remote Akka client disassociated
> >> > 15/02/24 12:26:00 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:26:33 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
> >> > removed): remote Akka client disassociated
> >> > 15/02/24 12:27:43 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:28:16 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
> >> > removed): remote Akka client disassociated
> >> > 15/02/24 12:29:26 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:29:59 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
> >> > removed): remote Akka client disassociated
> >> > 15/02/24 12:31:09 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:31:42 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
> >> > removed): remote Akka client disassociated
> >> > 15/02/24 12:32:51 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:33:24 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
> >> > removed): remote Akka client disassociated
> >> > 15/02/24 12:34:34 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:35:07 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
> >> > removed): remote Akka client disassociated
> >> > 15/02/24 12:36:17 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:36:50 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
> >> > removed): remote Akka client disassociated
> >> > 15/02/24 12:38:00 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:38:33 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
> >> > removed): remote Akka client disassociated
> >> > 15/02/24 12:39:43 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:40:16 ERROR Remoting:
> >> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> > java.io.InvalidClassException:
> org.apache.spark.storage.BlockManagerId;
> >> > local class incompatible: stream classdesc serialVersionUID =
> >> > 1677335532749418220, local class serialVersionUID =
> -7366074099953117729
> >> >     at
> >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >> >     at
> >> >
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >> >     at
> >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> >> >
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >> >     at
> >> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >> >     at
> >> >
> >>
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >> >     at
> >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >> >     at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >> >     at
> >> >
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >> >     at
> >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >> >     at
> >> >
> >> >
> >>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >> >     at scala.util.Try$.apply(Try.scala:161)
> >> >     at
> >> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >> >     at
> >> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >> >     at
> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >> >     at
> >> >
> >> >
> >>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >> >     at
> >> >
> >> >
> >>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >> >     at
> >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >> >     at
> >> >
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >> >     at
> >> >
> >> >
> >>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> >> > 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
> >> > removed): remote Akka client disassociated
> >> > 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has
> >> been
> >> > killed. Reason: Master removed our application: FAILED
> >> > 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
> >> > cluster scheduler: Master removed our application: FAILED
> >> >
> >> >
> >> > On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
> >> > andrew.musselman@gmail.com> wrote:
> >> >
> >> > > Bingo, works off master now; thanks Dmitriy.
> >> > >
> >> > > On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <
> dlieu.7@gmail.com
> >> >
> >> > > wrote:
> >> > >
> >> > >> ok spark 1.2 is mirrored now.
> >> > >> and master should be also ok (back to 1.1)
> >> > >>
> >> > >> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
> >> > >> andrew.musselman@gmail.com> wrote:
> >> > >>
> >> > >> > I reset hard to the previous commit, created a branch, and pushed
> >> it.
> >> > >> >
> >> > >> > I got email confirmation like so:
> >> > >> > Repository: mahout
> >> > >> > Updated Branches:
> >> > >> >   refs/heads/spark-1.2 [created] 901ef03b4
> >> > >> >
> >> > >> > On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <
> >> dlieu.7@gmail.com
> >> > >
> >> > >> > wrote:
> >> > >> >
> >> > >> > > yeah ok so you pushed 1.2 branch to asf but it is not yet in
> >> github.
> >> > >> iti
> >> > >> > > should be there eventually, give it a bit of time.
> >> > >> > >
> >> > >> > > On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <
> >> > dlieu.7@gmail.com
> >> > >> >
> >> > >> > > wrote:
> >> > >> > >
> >> > >> > > > what exactly did you try to do?
> >> > >> > > >
> >> > >> > > > just resetting HEAD will not work on remote branch -- you
> need
> >> > >> > force-sync
> >> > >> > > > that (with +) since it is a history-rewriting push, but asf
> git
> >> > does
> >> > >> > not
> >> > >> > > > allow that.
> >> > >> > > >
> >> > >> > > > ASF will mirror ALL branches afaik. I think i've done it
> >> before.
> >> > so
> >> > >> if
> >> > >> > > you
> >> > >> > > > create a new one it should (eventually) get there.
> >> > >> > > >
> >> > >> > > > On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
> >> > >> > > > andrew.musselman@gmail.com> wrote:
> >> > >> > > >
> >> > >> > > >> Does ASF git get mirrored to GitHub?  I tried pushing a
> branch
> >> > and
> >> > >> > don't
> >> > >> > > >> see it there yet.
> >> > >> > > >>
> >> > >> > > >> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
> >> > >> dlieu.7@gmail.com
> >> > >> > >
> >> > >> > > >> wrote:
> >> > >> > > >>
> >> > >> > > >> > On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
> >> > >> pat@occamsmachete.com
> >> > >> > >
> >> > >> > > >> > wrote:
> >> > >> > > >> >
> >> > >> > > >> > > to be safe I’d “git reset —hard xyz” to the commit
> >> previous
> >> > to
> >> > >> the
> >> > >> > > >> 1.2.1
> >> > >> > > >> > >
> >> > >> > > >> >
> >> > >> > > >> > As i just explained, that resets are not possible with ASF
> >> git.
> >> > >> > > >> Reverting
> >> > >> > > >> > is the only option.
> >> > >> > > >> >
> >> > >> > > >> > -d
> >> > >> > > >> >
> >> > >> > > >>
> >> > >> > > >
> >> > >> > > >
> >> > >> > >
> >> > >> >
> >> > >>
> >> > >
> >> > >
> >> >
> >>
> >
> >
>

Re: Spark shell broken

Posted by Andrew Musselman <an...@gmail.com>.
Ah, I had an old build of spark server running.

On Tue, Feb 24, 2015 at 1:56 PM, Andrew Musselman <
andrew.musselman@gmail.com> wrote:

> I only have one spark build that I know of, and it's still reporting
> errors in Mahout local mode.
>
> $ echo $SPARK_HOME
> /home/akm/spark
>
> $ export MAHOUT_LOCAL="TRUE"
> $ bin/mahout spark-shell
> MAHOUT_LOCAL is set, so we don't add HADOOP_CONF_DIR to classpath.
> SLF4J: Class path contains multiple SLF4J bindings.
> SLF4J: Found binding in
> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: Found binding in
> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: Found binding in
> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> explanation.
> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>
>                          _                 _
>          _ __ ___   __ _| |__   ___  _   _| |_
>         | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>         | | | | | | (_| | | | | (_) | |_| | |_
>         |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>
>
> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
> 1.7.0_03)
> Type in expressions to have them evaluated.
> Type :help for more information.
> 15/02/24 13:42:42 WARN Utils: Your hostname, ubuntu resolves to a loopback
> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
> 15/02/24 13:42:42 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
> another address
> 15/02/24 13:42:54 WARN NativeCodeLoader: Unable to load native-hadoop
> library for your platform... using builtin-java classes where applicable
> Created spark context..
> Mahout distributed context is available as "implicit val sdc".
> mahout> 15/02/24 13:43:32 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId; local class incompatible: stream
> classdesc serialVersionUID = 1677335532749418220, local class
> serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 13:44:05 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 13:44:38 ERROR TaskSchedulerImpl: Lost an executor 0 (already
> removed): remote Akka client disassociated
> 15/02/24 13:45:15 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 13:45:48 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 13:46:21 ERROR TaskSchedulerImpl: Lost an executor 1 (already
> removed): remote Akka client disassociated
> 15/02/24 13:46:57 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 13:47:30 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 13:48:04 ERROR TaskSchedulerImpl: Lost an executor 2 (already
> removed): remote Akka client disassociated
> 15/02/24 13:48:40 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 13:49:14 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 13:49:47 ERROR TaskSchedulerImpl: Lost an executor 3 (already
> removed): remote Akka client disassociated
> 15/02/24 13:50:23 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 13:50:56 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 13:51:30 ERROR TaskSchedulerImpl: Lost an executor 4 (already
> removed): remote Akka client disassociated
> 15/02/24 13:52:07 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 13:52:40 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 13:53:13 ERROR TaskSchedulerImpl: Lost an executor 5 (already
> removed): remote Akka client disassociated
>
> mahout>
> mahout>
> mahout> 15/02/24 13:53:49 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId; local class incompatible: stream
> classdesc serialVersionUID = 1677335532749418220, local class
> serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>
> On Tue, Feb 24, 2015 at 1:08 PM, Dmitriy Lyubimov <dl...@gmail.com>
> wrote:
>
>> seems like different builds on client and backend.
>>
>> shell is using your local spark setup (pointed to with SPARK_HOME). make
>> sure it points to identical binaries (not just spark version) to what is
>> used in the backend.
>>
>> the reason is spark is not binary-canonical w.r.t. to release version, it
>> implies custom builds, perhaps jvm bytecode levels, hadoop dependencies
>> etc. etc.
>>
>> On Tue, Feb 24, 2015 at 12:59 PM, Andrew Musselman <
>> andrew.musselman@gmail.com> wrote:
>>
>> > Except after some time it blew up:
>> >
>> > $ bin/mahout spark-shell
>> > SLF4J: Class path contains multiple SLF4J bindings.
>> > SLF4J: Found binding in
>> >
>> >
>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> > SLF4J: Found binding in
>> >
>> >
>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> > SLF4J: Found binding in
>> >
>> >
>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> > SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>> > explanation.
>> > SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>> >
>> >                          _                 _
>> >          _ __ ___   __ _| |__   ___  _   _| |_
>> >         | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>> >         | | | | | | (_| | | | | (_) | |_| | |_
>> >         |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>> >
>> >
>> > Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>> > 1.7.0_03)
>> > Type in expressions to have them evaluated.
>> > Type :help for more information.
>> > 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a
>> loopback
>> > address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>> > 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>> > another address
>> > 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
>> > library for your platform... using builtin-java classes where applicable
>> > Created spark context..
>> > Mahout distributed context is available as "implicit val sdc".
>> > mahout> 15/02/24 12:24:17 ERROR Remoting:
>> > org.apache.spark.storage.BlockManagerId; local class incompatible:
>> stream
>> > classdesc serialVersionUID = 1677335532749418220, local class
>> > serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:24:50 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>> > removed): remote Akka client disassociated
>> > 15/02/24 12:26:00 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:26:33 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>> > removed): remote Akka client disassociated
>> > 15/02/24 12:27:43 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:28:16 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>> > removed): remote Akka client disassociated
>> > 15/02/24 12:29:26 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:29:59 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>> > removed): remote Akka client disassociated
>> > 15/02/24 12:31:09 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:31:42 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>> > removed): remote Akka client disassociated
>> > 15/02/24 12:32:51 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:33:24 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>> > removed): remote Akka client disassociated
>> > 15/02/24 12:34:34 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:35:07 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
>> > removed): remote Akka client disassociated
>> > 15/02/24 12:36:17 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:36:50 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
>> > removed): remote Akka client disassociated
>> > 15/02/24 12:38:00 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:38:33 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
>> > removed): remote Akka client disassociated
>> > 15/02/24 12:39:43 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:40:16 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> > local class incompatible: stream classdesc serialVersionUID =
>> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> >     at
>> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>> >     at
>> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>> >     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at
>> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>> >     at
>> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>> >     at
>> >
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>> >     at
>> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>> >     at
>> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>> >     at
>> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>> >     at
>> >
>> >
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>> >     at scala.util.Try$.apply(Try.scala:161)
>> >     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>> >     at
>> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>> >     at
>> >
>> >
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>> >     at
>> >
>> >
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>> >     at
>> >
>> >
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>> >     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>> >     at
>> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>> >     at
>> >
>> >
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> > 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
>> > removed): remote Akka client disassociated
>> > 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has
>> been
>> > killed. Reason: Master removed our application: FAILED
>> > 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>> > cluster scheduler: Master removed our application: FAILED
>> >
>> >
>> > On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>> > andrew.musselman@gmail.com> wrote:
>> >
>> > > Bingo, works off master now; thanks Dmitriy.
>> > >
>> > > On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
>> >
>> > > wrote:
>> > >
>> > >> ok spark 1.2 is mirrored now.
>> > >> and master should be also ok (back to 1.1)
>> > >>
>> > >> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>> > >> andrew.musselman@gmail.com> wrote:
>> > >>
>> > >> > I reset hard to the previous commit, created a branch, and pushed
>> it.
>> > >> >
>> > >> > I got email confirmation like so:
>> > >> > Repository: mahout
>> > >> > Updated Branches:
>> > >> >   refs/heads/spark-1.2 [created] 901ef03b4
>> > >> >
>> > >> > On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <
>> dlieu.7@gmail.com
>> > >
>> > >> > wrote:
>> > >> >
>> > >> > > yeah ok so you pushed 1.2 branch to asf but it is not yet in
>> github.
>> > >> iti
>> > >> > > should be there eventually, give it a bit of time.
>> > >> > >
>> > >> > > On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <
>> > dlieu.7@gmail.com
>> > >> >
>> > >> > > wrote:
>> > >> > >
>> > >> > > > what exactly did you try to do?
>> > >> > > >
>> > >> > > > just resetting HEAD will not work on remote branch -- you need
>> > >> > force-sync
>> > >> > > > that (with +) since it is a history-rewriting push, but asf git
>> > does
>> > >> > not
>> > >> > > > allow that.
>> > >> > > >
>> > >> > > > ASF will mirror ALL branches afaik. I think i've done it
>> before.
>> > so
>> > >> if
>> > >> > > you
>> > >> > > > create a new one it should (eventually) get there.
>> > >> > > >
>> > >> > > > On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>> > >> > > > andrew.musselman@gmail.com> wrote:
>> > >> > > >
>> > >> > > >> Does ASF git get mirrored to GitHub?  I tried pushing a branch
>> > and
>> > >> > don't
>> > >> > > >> see it there yet.
>> > >> > > >>
>> > >> > > >> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>> > >> dlieu.7@gmail.com
>> > >> > >
>> > >> > > >> wrote:
>> > >> > > >>
>> > >> > > >> > On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>> > >> pat@occamsmachete.com
>> > >> > >
>> > >> > > >> > wrote:
>> > >> > > >> >
>> > >> > > >> > > to be safe I’d “git reset —hard xyz” to the commit
>> previous
>> > to
>> > >> the
>> > >> > > >> 1.2.1
>> > >> > > >> > >
>> > >> > > >> >
>> > >> > > >> > As i just explained, that resets are not possible with ASF
>> git.
>> > >> > > >> Reverting
>> > >> > > >> > is the only option.
>> > >> > > >> >
>> > >> > > >> > -d
>> > >> > > >> >
>> > >> > > >>
>> > >> > > >
>> > >> > > >
>> > >> > >
>> > >> >
>> > >>
>> > >
>> > >
>> >
>>
>
>

Re: Spark shell broken

Posted by Andrew Musselman <an...@gmail.com>.
I only have one spark build that I know of, and it's still reporting errors
in Mahout local mode.

$ echo $SPARK_HOME
/home/akm/spark

$ export MAHOUT_LOCAL="TRUE"
$ bin/mahout spark-shell
MAHOUT_LOCAL is set, so we don't add HADOOP_CONF_DIR to classpath.
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in
[jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]

                         _                 _
         _ __ ___   __ _| |__   ___  _   _| |_
        | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
        | | | | | | (_| | | | | (_) | |_| | |_
        |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0


Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
1.7.0_03)
Type in expressions to have them evaluated.
Type :help for more information.
15/02/24 13:42:42 WARN Utils: Your hostname, ubuntu resolves to a loopback
address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
15/02/24 13:42:42 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
another address
15/02/24 13:42:54 WARN NativeCodeLoader: Unable to load native-hadoop
library for your platform... using builtin-java classes where applicable
Created spark context..
Mahout distributed context is available as "implicit val sdc".
mahout> 15/02/24 13:43:32 ERROR Remoting:
org.apache.spark.storage.BlockManagerId; local class incompatible: stream
classdesc serialVersionUID = 1677335532749418220, local class
serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 13:44:05 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 13:44:38 ERROR TaskSchedulerImpl: Lost an executor 0 (already
removed): remote Akka client disassociated
15/02/24 13:45:15 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 13:45:48 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 13:46:21 ERROR TaskSchedulerImpl: Lost an executor 1 (already
removed): remote Akka client disassociated
15/02/24 13:46:57 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 13:47:30 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 13:48:04 ERROR TaskSchedulerImpl: Lost an executor 2 (already
removed): remote Akka client disassociated
15/02/24 13:48:40 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 13:49:14 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 13:49:47 ERROR TaskSchedulerImpl: Lost an executor 3 (already
removed): remote Akka client disassociated
15/02/24 13:50:23 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 13:50:56 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 13:51:30 ERROR TaskSchedulerImpl: Lost an executor 4 (already
removed): remote Akka client disassociated
15/02/24 13:52:07 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 13:52:40 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 13:53:13 ERROR TaskSchedulerImpl: Lost an executor 5 (already
removed): remote Akka client disassociated

mahout>
mahout>
mahout> 15/02/24 13:53:49 ERROR Remoting:
org.apache.spark.storage.BlockManagerId; local class incompatible: stream
classdesc serialVersionUID = 1677335532749418220, local class
serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)

On Tue, Feb 24, 2015 at 1:08 PM, Dmitriy Lyubimov <dl...@gmail.com> wrote:

> seems like different builds on client and backend.
>
> shell is using your local spark setup (pointed to with SPARK_HOME). make
> sure it points to identical binaries (not just spark version) to what is
> used in the backend.
>
> the reason is spark is not binary-canonical w.r.t. to release version, it
> implies custom builds, perhaps jvm bytecode levels, hadoop dependencies
> etc. etc.
>
> On Tue, Feb 24, 2015 at 12:59 PM, Andrew Musselman <
> andrew.musselman@gmail.com> wrote:
>
> > Except after some time it blew up:
> >
> > $ bin/mahout spark-shell
> > SLF4J: Class path contains multiple SLF4J bindings.
> > SLF4J: Found binding in
> >
> >
> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > SLF4J: Found binding in
> >
> >
> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > SLF4J: Found binding in
> >
> >
> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> > explanation.
> > SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
> >
> >                          _                 _
> >          _ __ ___   __ _| |__   ___  _   _| |_
> >         | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
> >         | | | | | | (_| | | | | (_) | |_| | |_
> >         |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
> >
> >
> > Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
> > 1.7.0_03)
> > Type in expressions to have them evaluated.
> > Type :help for more information.
> > 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a
> loopback
> > address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
> > 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
> > another address
> > 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
> > library for your platform... using builtin-java classes where applicable
> > Created spark context..
> > Mahout distributed context is available as "implicit val sdc".
> > mahout> 15/02/24 12:24:17 ERROR Remoting:
> > org.apache.spark.storage.BlockManagerId; local class incompatible: stream
> > classdesc serialVersionUID = 1677335532749418220, local class
> > serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:24:50 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
> > removed): remote Akka client disassociated
> > 15/02/24 12:26:00 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:26:33 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
> > removed): remote Akka client disassociated
> > 15/02/24 12:27:43 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:28:16 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
> > removed): remote Akka client disassociated
> > 15/02/24 12:29:26 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:29:59 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
> > removed): remote Akka client disassociated
> > 15/02/24 12:31:09 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:31:42 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
> > removed): remote Akka client disassociated
> > 15/02/24 12:32:51 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:33:24 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
> > removed): remote Akka client disassociated
> > 15/02/24 12:34:34 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:35:07 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
> > removed): remote Akka client disassociated
> > 15/02/24 12:36:17 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:36:50 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
> > removed): remote Akka client disassociated
> > 15/02/24 12:38:00 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:38:33 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
> > removed): remote Akka client disassociated
> > 15/02/24 12:39:43 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:40:16 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> > java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> > local class incompatible: stream classdesc serialVersionUID =
> > 1677335532749418220, local class serialVersionUID = -7366074099953117729
> >     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
> >     at
> > java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
> >     at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at
> > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
> >     at
> > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
> >     at
> > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
> >     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
> >     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
> >     at
> > akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
> >     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
> >     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
> >     at
> >
> >
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
> >     at scala.util.Try$.apply(Try.scala:161)
> >     at
> akka.serialization.Serialization.deserialize(Serialization.scala:98)
> >     at
> > akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
> >     at
> >
> >
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
> >     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
> >     at
> >
> >
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
> >     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> >     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> >     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> >     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> >     at
> >
> >
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> >     at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> >     at
> > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> >     at
> >
> >
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> > 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
> > removed): remote Akka client disassociated
> > 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
> > killed. Reason: Master removed our application: FAILED
> > 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
> > cluster scheduler: Master removed our application: FAILED
> >
> >
> > On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
> > andrew.musselman@gmail.com> wrote:
> >
> > > Bingo, works off master now; thanks Dmitriy.
> > >
> > > On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
> > > wrote:
> > >
> > >> ok spark 1.2 is mirrored now.
> > >> and master should be also ok (back to 1.1)
> > >>
> > >> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
> > >> andrew.musselman@gmail.com> wrote:
> > >>
> > >> > I reset hard to the previous commit, created a branch, and pushed
> it.
> > >> >
> > >> > I got email confirmation like so:
> > >> > Repository: mahout
> > >> > Updated Branches:
> > >> >   refs/heads/spark-1.2 [created] 901ef03b4
> > >> >
> > >> > On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <
> dlieu.7@gmail.com
> > >
> > >> > wrote:
> > >> >
> > >> > > yeah ok so you pushed 1.2 branch to asf but it is not yet in
> github.
> > >> iti
> > >> > > should be there eventually, give it a bit of time.
> > >> > >
> > >> > > On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <
> > dlieu.7@gmail.com
> > >> >
> > >> > > wrote:
> > >> > >
> > >> > > > what exactly did you try to do?
> > >> > > >
> > >> > > > just resetting HEAD will not work on remote branch -- you need
> > >> > force-sync
> > >> > > > that (with +) since it is a history-rewriting push, but asf git
> > does
> > >> > not
> > >> > > > allow that.
> > >> > > >
> > >> > > > ASF will mirror ALL branches afaik. I think i've done it before.
> > so
> > >> if
> > >> > > you
> > >> > > > create a new one it should (eventually) get there.
> > >> > > >
> > >> > > > On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
> > >> > > > andrew.musselman@gmail.com> wrote:
> > >> > > >
> > >> > > >> Does ASF git get mirrored to GitHub?  I tried pushing a branch
> > and
> > >> > don't
> > >> > > >> see it there yet.
> > >> > > >>
> > >> > > >> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
> > >> dlieu.7@gmail.com
> > >> > >
> > >> > > >> wrote:
> > >> > > >>
> > >> > > >> > On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
> > >> pat@occamsmachete.com
> > >> > >
> > >> > > >> > wrote:
> > >> > > >> >
> > >> > > >> > > to be safe I’d “git reset —hard xyz” to the commit previous
> > to
> > >> the
> > >> > > >> 1.2.1
> > >> > > >> > >
> > >> > > >> >
> > >> > > >> > As i just explained, that resets are not possible with ASF
> git.
> > >> > > >> Reverting
> > >> > > >> > is the only option.
> > >> > > >> >
> > >> > > >> > -d
> > >> > > >> >
> > >> > > >>
> > >> > > >
> > >> > > >
> > >> > >
> > >> >
> > >>
> > >
> > >
> >
>

Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
is it local or standalone? local should not have these types of errors. for
anything else it is likely what i said.

On Tue, Feb 24, 2015 at 1:08 PM, Dmitriy Lyubimov <dl...@gmail.com> wrote:

> seems like different builds on client and backend.
>
> shell is using your local spark setup (pointed to with SPARK_HOME). make
> sure it points to identical binaries (not just spark version) to what is
> used in the backend.
>
> the reason is spark is not binary-canonical w.r.t. to release version, it
> implies custom builds, perhaps jvm bytecode levels, hadoop dependencies
> etc. etc.
>
> On Tue, Feb 24, 2015 at 12:59 PM, Andrew Musselman <
> andrew.musselman@gmail.com> wrote:
>
>> Except after some time it blew up:
>>
>> $ bin/mahout spark-shell
>> SLF4J: Class path contains multiple SLF4J bindings.
>> SLF4J: Found binding in
>>
>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> SLF4J: Found binding in
>>
>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> SLF4J: Found binding in
>>
>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>> explanation.
>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>
>>                          _                 _
>>          _ __ ___   __ _| |__   ___  _   _| |_
>>         | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>>         | | | | | | (_| | | | | (_) | |_| | |_
>>         |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>>
>>
>> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
>> 1.7.0_03)
>> Type in expressions to have them evaluated.
>> Type :help for more information.
>> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a loopback
>> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
>> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
>> another address
>> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
>> library for your platform... using builtin-java classes where applicable
>> Created spark context..
>> Mahout distributed context is available as "implicit val sdc".
>> mahout> 15/02/24 12:24:17 ERROR Remoting:
>> org.apache.spark.storage.BlockManagerId; local class incompatible: stream
>> classdesc serialVersionUID = 1677335532749418220, local class
>> serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
>> local class incompatible: stream classdesc serialVersionUID =
>> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>>     at
>> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>>     at
>> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at
>> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>>     at
>> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>>     at
>> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>>     at
>> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>>     at
>>
>> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>>     at scala.util.Try$.apply(Try.scala:161)
>>     at
>> akka.serialization.Serialization.deserialize(Serialization.scala:98)
>>     at
>> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>>     at
>>
>> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>>     at
>>
>> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>>     at
>>
>> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>>     at
>> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>     at
>> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>     at
>>
>> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
>> removed): remote Akka client disassociated
>> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
>> killed. Reason: Master removed our application: FAILED
>> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
>> cluster scheduler: Master removed our application: FAILED
>>
>>
>> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
>> andrew.musselman@gmail.com> wrote:
>>
>> > Bingo, works off master now; thanks Dmitriy.
>> >
>> > On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
>> > wrote:
>> >
>> >> ok spark 1.2 is mirrored now.
>> >> and master should be also ok (back to 1.1)
>> >>
>> >> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>> >> andrew.musselman@gmail.com> wrote:
>> >>
>> >> > I reset hard to the previous commit, created a branch, and pushed it.
>> >> >
>> >> > I got email confirmation like so:
>> >> > Repository: mahout
>> >> > Updated Branches:
>> >> >   refs/heads/spark-1.2 [created] 901ef03b4
>> >> >
>> >> > On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <
>> dlieu.7@gmail.com>
>> >> > wrote:
>> >> >
>> >> > > yeah ok so you pushed 1.2 branch to asf but it is not yet in
>> github.
>> >> iti
>> >> > > should be there eventually, give it a bit of time.
>> >> > >
>> >> > > On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <
>> dlieu.7@gmail.com
>> >> >
>> >> > > wrote:
>> >> > >
>> >> > > > what exactly did you try to do?
>> >> > > >
>> >> > > > just resetting HEAD will not work on remote branch -- you need
>> >> > force-sync
>> >> > > > that (with +) since it is a history-rewriting push, but asf git
>> does
>> >> > not
>> >> > > > allow that.
>> >> > > >
>> >> > > > ASF will mirror ALL branches afaik. I think i've done it before.
>> so
>> >> if
>> >> > > you
>> >> > > > create a new one it should (eventually) get there.
>> >> > > >
>> >> > > > On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>> >> > > > andrew.musselman@gmail.com> wrote:
>> >> > > >
>> >> > > >> Does ASF git get mirrored to GitHub?  I tried pushing a branch
>> and
>> >> > don't
>> >> > > >> see it there yet.
>> >> > > >>
>> >> > > >> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>> >> dlieu.7@gmail.com
>> >> > >
>> >> > > >> wrote:
>> >> > > >>
>> >> > > >> > On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>> >> pat@occamsmachete.com
>> >> > >
>> >> > > >> > wrote:
>> >> > > >> >
>> >> > > >> > > to be safe I’d “git reset —hard xyz” to the commit previous
>> to
>> >> the
>> >> > > >> 1.2.1
>> >> > > >> > >
>> >> > > >> >
>> >> > > >> > As i just explained, that resets are not possible with ASF
>> git.
>> >> > > >> Reverting
>> >> > > >> > is the only option.
>> >> > > >> >
>> >> > > >> > -d
>> >> > > >> >
>> >> > > >>
>> >> > > >
>> >> > > >
>> >> > >
>> >> >
>> >>
>> >
>> >
>>
>
>

Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
seems like different builds on client and backend.

shell is using your local spark setup (pointed to with SPARK_HOME). make
sure it points to identical binaries (not just spark version) to what is
used in the backend.

the reason is spark is not binary-canonical w.r.t. to release version, it
implies custom builds, perhaps jvm bytecode levels, hadoop dependencies
etc. etc.

On Tue, Feb 24, 2015 at 12:59 PM, Andrew Musselman <
andrew.musselman@gmail.com> wrote:

> Except after some time it blew up:
>
> $ bin/mahout spark-shell
> SLF4J: Class path contains multiple SLF4J bindings.
> SLF4J: Found binding in
>
> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: Found binding in
>
> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: Found binding in
>
> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> explanation.
> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>
>                          _                 _
>          _ __ ___   __ _| |__   ___  _   _| |_
>         | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
>         | | | | | | (_| | | | | (_) | |_| | |_
>         |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0
>
>
> Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
> 1.7.0_03)
> Type in expressions to have them evaluated.
> Type :help for more information.
> 15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a loopback
> address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
> 15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
> another address
> 15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
> library for your platform... using builtin-java classes where applicable
> Created spark context..
> Mahout distributed context is available as "implicit val sdc".
> mahout> 15/02/24 12:24:17 ERROR Remoting:
> org.apache.spark.storage.BlockManagerId; local class incompatible: stream
> classdesc serialVersionUID = 1677335532749418220, local class
> serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
> removed): remote Akka client disassociated
> 15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
> removed): remote Akka client disassociated
> 15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
> removed): remote Akka client disassociated
> 15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
> removed): remote Akka client disassociated
> 15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
> removed): remote Akka client disassociated
> 15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
> removed): remote Akka client disassociated
> 15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
> removed): remote Akka client disassociated
> 15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
> removed): remote Akka client disassociated
> 15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
> removed): remote Akka client disassociated
> 15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
> local class incompatible: stream classdesc serialVersionUID =
> 1677335532749418220, local class serialVersionUID = -7366074099953117729
>     at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
>     at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
>     at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
>     at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
>     at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
>     at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
>     at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
>     at
> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
>     at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>     at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
>     at
>
> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
>     at scala.util.Try$.apply(Try.scala:161)
>     at akka.serialization.Serialization.deserialize(Serialization.scala:98)
>     at
> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
>     at
>
> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
>     at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
>     at
>
> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
>     at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>     at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>     at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>     at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>     at
>
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
>     at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>     at
>
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>     at
> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>     at
>
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
> removed): remote Akka client disassociated
> 15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
> killed. Reason: Master removed our application: FAILED
> 15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
> cluster scheduler: Master removed our application: FAILED
>
>
> On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
> andrew.musselman@gmail.com> wrote:
>
> > Bingo, works off master now; thanks Dmitriy.
> >
> > On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
> > wrote:
> >
> >> ok spark 1.2 is mirrored now.
> >> and master should be also ok (back to 1.1)
> >>
> >> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
> >> andrew.musselman@gmail.com> wrote:
> >>
> >> > I reset hard to the previous commit, created a branch, and pushed it.
> >> >
> >> > I got email confirmation like so:
> >> > Repository: mahout
> >> > Updated Branches:
> >> >   refs/heads/spark-1.2 [created] 901ef03b4
> >> >
> >> > On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
> >
> >> > wrote:
> >> >
> >> > > yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
> >> iti
> >> > > should be there eventually, give it a bit of time.
> >> > >
> >> > > On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <
> dlieu.7@gmail.com
> >> >
> >> > > wrote:
> >> > >
> >> > > > what exactly did you try to do?
> >> > > >
> >> > > > just resetting HEAD will not work on remote branch -- you need
> >> > force-sync
> >> > > > that (with +) since it is a history-rewriting push, but asf git
> does
> >> > not
> >> > > > allow that.
> >> > > >
> >> > > > ASF will mirror ALL branches afaik. I think i've done it before.
> so
> >> if
> >> > > you
> >> > > > create a new one it should (eventually) get there.
> >> > > >
> >> > > > On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
> >> > > > andrew.musselman@gmail.com> wrote:
> >> > > >
> >> > > >> Does ASF git get mirrored to GitHub?  I tried pushing a branch
> and
> >> > don't
> >> > > >> see it there yet.
> >> > > >>
> >> > > >> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
> >> dlieu.7@gmail.com
> >> > >
> >> > > >> wrote:
> >> > > >>
> >> > > >> > On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
> >> pat@occamsmachete.com
> >> > >
> >> > > >> > wrote:
> >> > > >> >
> >> > > >> > > to be safe I’d “git reset —hard xyz” to the commit previous
> to
> >> the
> >> > > >> 1.2.1
> >> > > >> > >
> >> > > >> >
> >> > > >> > As i just explained, that resets are not possible with ASF git.
> >> > > >> Reverting
> >> > > >> > is the only option.
> >> > > >> >
> >> > > >> > -d
> >> > > >> >
> >> > > >>
> >> > > >
> >> > > >
> >> > >
> >> >
> >>
> >
> >
>

Re: Spark shell broken

Posted by Andrew Musselman <an...@gmail.com>.
Except after some time it blew up:

$ bin/mahout spark-shell
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in
[jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]

                         _                 _
         _ __ ___   __ _| |__   ___  _   _| |_
        | '_ ` _ \ / _` | '_ \ / _ \| | | | __|
        | | | | | | (_| | | | | (_) | |_| | |_
        |_| |_| |_|\__,_|_| |_|\___/ \__,_|\__|  version 1.0


Using Scala version 2.10.0 (Java HotSpot(TM) 64-Bit Server VM, Java
1.7.0_03)
Type in expressions to have them evaluated.
Type :help for more information.
15/02/24 12:23:26 WARN Utils: Your hostname, ubuntu resolves to a loopback
address: 127.0.1.1; using 5.5.8.1 instead (on interface as0t1)
15/02/24 12:23:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
another address
15/02/24 12:23:39 WARN NativeCodeLoader: Unable to load native-hadoop
library for your platform... using builtin-java classes where applicable
Created spark context..
Mahout distributed context is available as "implicit val sdc".
mahout> 15/02/24 12:24:17 ERROR Remoting:
org.apache.spark.storage.BlockManagerId; local class incompatible: stream
classdesc serialVersionUID = 1677335532749418220, local class
serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:24:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:25:23 ERROR TaskSchedulerImpl: Lost an executor 0 (already
removed): remote Akka client disassociated
15/02/24 12:26:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:26:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:27:06 ERROR TaskSchedulerImpl: Lost an executor 1 (already
removed): remote Akka client disassociated
15/02/24 12:27:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:28:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:28:49 ERROR TaskSchedulerImpl: Lost an executor 2 (already
removed): remote Akka client disassociated
15/02/24 12:29:26 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:29:59 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:30:32 ERROR TaskSchedulerImpl: Lost an executor 3 (already
removed): remote Akka client disassociated
15/02/24 12:31:09 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:31:42 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:32:15 ERROR TaskSchedulerImpl: Lost an executor 4 (already
removed): remote Akka client disassociated
15/02/24 12:32:51 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:33:24 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:33:58 ERROR TaskSchedulerImpl: Lost an executor 5 (already
removed): remote Akka client disassociated
15/02/24 12:34:34 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:35:07 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:35:41 ERROR TaskSchedulerImpl: Lost an executor 6 (already
removed): remote Akka client disassociated
15/02/24 12:36:17 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:36:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:37:23 ERROR TaskSchedulerImpl: Lost an executor 7 (already
removed): remote Akka client disassociated
15/02/24 12:38:00 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:38:33 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:39:06 ERROR TaskSchedulerImpl: Lost an executor 8 (already
removed): remote Akka client disassociated
15/02/24 12:39:43 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:40:16 ERROR Remoting: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId;
local class incompatible: stream classdesc serialVersionUID =
1677335532749418220, local class serialVersionUID = -7366074099953117729
    at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:604)
    at
java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1601)
    at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1514)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1750)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1964)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1888)
    at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369)
    at
akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    at akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136)
    at
akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104)
    at scala.util.Try$.apply(Try.scala:161)
    at akka.serialization.Serialization.deserialize(Serialization.scala:98)
    at
akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23)
    at
akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55)
    at akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73)
    at
akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764)
    at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
    at akka.actor.ActorCell.invoke(ActorCell.scala:456)
    at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
    at akka.dispatch.Mailbox.run(Mailbox.scala:219)
    at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
    at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
    at
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
    at
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
    at
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
15/02/24 12:40:49 ERROR TaskSchedulerImpl: Lost an executor 9 (already
removed): remote Akka client disassociated
15/02/24 12:40:49 ERROR SparkDeploySchedulerBackend: Application has been
killed. Reason: Master removed our application: FAILED
15/02/24 12:40:49 ERROR TaskSchedulerImpl: Exiting due to error from
cluster scheduler: Master removed our application: FAILED


On Tue, Feb 24, 2015 at 12:24 PM, Andrew Musselman <
andrew.musselman@gmail.com> wrote:

> Bingo, works off master now; thanks Dmitriy.
>
> On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
> wrote:
>
>> ok spark 1.2 is mirrored now.
>> and master should be also ok (back to 1.1)
>>
>> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
>> andrew.musselman@gmail.com> wrote:
>>
>> > I reset hard to the previous commit, created a branch, and pushed it.
>> >
>> > I got email confirmation like so:
>> > Repository: mahout
>> > Updated Branches:
>> >   refs/heads/spark-1.2 [created] 901ef03b4
>> >
>> > On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dl...@gmail.com>
>> > wrote:
>> >
>> > > yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
>> iti
>> > > should be there eventually, give it a bit of time.
>> > >
>> > > On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
>> >
>> > > wrote:
>> > >
>> > > > what exactly did you try to do?
>> > > >
>> > > > just resetting HEAD will not work on remote branch -- you need
>> > force-sync
>> > > > that (with +) since it is a history-rewriting push, but asf git does
>> > not
>> > > > allow that.
>> > > >
>> > > > ASF will mirror ALL branches afaik. I think i've done it before. so
>> if
>> > > you
>> > > > create a new one it should (eventually) get there.
>> > > >
>> > > > On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>> > > > andrew.musselman@gmail.com> wrote:
>> > > >
>> > > >> Does ASF git get mirrored to GitHub?  I tried pushing a branch and
>> > don't
>> > > >> see it there yet.
>> > > >>
>> > > >> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
>> dlieu.7@gmail.com
>> > >
>> > > >> wrote:
>> > > >>
>> > > >> > On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
>> pat@occamsmachete.com
>> > >
>> > > >> > wrote:
>> > > >> >
>> > > >> > > to be safe I’d “git reset —hard xyz” to the commit previous to
>> the
>> > > >> 1.2.1
>> > > >> > >
>> > > >> >
>> > > >> > As i just explained, that resets are not possible with ASF git.
>> > > >> Reverting
>> > > >> > is the only option.
>> > > >> >
>> > > >> > -d
>> > > >> >
>> > > >>
>> > > >
>> > > >
>> > >
>> >
>>
>
>

Re: Spark shell broken

Posted by Andrew Musselman <an...@gmail.com>.
Bingo, works off master now; thanks Dmitriy.

On Tue, Feb 24, 2015 at 11:54 AM, Dmitriy Lyubimov <dl...@gmail.com>
wrote:

> ok spark 1.2 is mirrored now.
> and master should be also ok (back to 1.1)
>
> On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
> andrew.musselman@gmail.com> wrote:
>
> > I reset hard to the previous commit, created a branch, and pushed it.
> >
> > I got email confirmation like so:
> > Repository: mahout
> > Updated Branches:
> >   refs/heads/spark-1.2 [created] 901ef03b4
> >
> > On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dl...@gmail.com>
> > wrote:
> >
> > > yeah ok so you pushed 1.2 branch to asf but it is not yet in github.
> iti
> > > should be there eventually, give it a bit of time.
> > >
> > > On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dl...@gmail.com>
> > > wrote:
> > >
> > > > what exactly did you try to do?
> > > >
> > > > just resetting HEAD will not work on remote branch -- you need
> > force-sync
> > > > that (with +) since it is a history-rewriting push, but asf git does
> > not
> > > > allow that.
> > > >
> > > > ASF will mirror ALL branches afaik. I think i've done it before. so
> if
> > > you
> > > > create a new one it should (eventually) get there.
> > > >
> > > > On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
> > > > andrew.musselman@gmail.com> wrote:
> > > >
> > > >> Does ASF git get mirrored to GitHub?  I tried pushing a branch and
> > don't
> > > >> see it there yet.
> > > >>
> > > >> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <
> dlieu.7@gmail.com
> > >
> > > >> wrote:
> > > >>
> > > >> > On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <
> pat@occamsmachete.com
> > >
> > > >> > wrote:
> > > >> >
> > > >> > > to be safe I’d “git reset —hard xyz” to the commit previous to
> the
> > > >> 1.2.1
> > > >> > >
> > > >> >
> > > >> > As i just explained, that resets are not possible with ASF git.
> > > >> Reverting
> > > >> > is the only option.
> > > >> >
> > > >> > -d
> > > >> >
> > > >>
> > > >
> > > >
> > >
> >
>

Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
ok spark 1.2 is mirrored now.
and master should be also ok (back to 1.1)

On Tue, Feb 24, 2015 at 11:53 AM, Andrew Musselman <
andrew.musselman@gmail.com> wrote:

> I reset hard to the previous commit, created a branch, and pushed it.
>
> I got email confirmation like so:
> Repository: mahout
> Updated Branches:
>   refs/heads/spark-1.2 [created] 901ef03b4
>
> On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dl...@gmail.com>
> wrote:
>
> > yeah ok so you pushed 1.2 branch to asf but it is not yet in github. iti
> > should be there eventually, give it a bit of time.
> >
> > On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dl...@gmail.com>
> > wrote:
> >
> > > what exactly did you try to do?
> > >
> > > just resetting HEAD will not work on remote branch -- you need
> force-sync
> > > that (with +) since it is a history-rewriting push, but asf git does
> not
> > > allow that.
> > >
> > > ASF will mirror ALL branches afaik. I think i've done it before. so if
> > you
> > > create a new one it should (eventually) get there.
> > >
> > > On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
> > > andrew.musselman@gmail.com> wrote:
> > >
> > >> Does ASF git get mirrored to GitHub?  I tried pushing a branch and
> don't
> > >> see it there yet.
> > >>
> > >> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <dlieu.7@gmail.com
> >
> > >> wrote:
> > >>
> > >> > On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <pat@occamsmachete.com
> >
> > >> > wrote:
> > >> >
> > >> > > to be safe I’d “git reset —hard xyz” to the commit previous to the
> > >> 1.2.1
> > >> > >
> > >> >
> > >> > As i just explained, that resets are not possible with ASF git.
> > >> Reverting
> > >> > is the only option.
> > >> >
> > >> > -d
> > >> >
> > >>
> > >
> > >
> >
>

Re: Spark shell broken

Posted by Andrew Musselman <an...@gmail.com>.
I reset hard to the previous commit, created a branch, and pushed it.

I got email confirmation like so:
Repository: mahout
Updated Branches:
  refs/heads/spark-1.2 [created] 901ef03b4

On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dl...@gmail.com>
wrote:

> yeah ok so you pushed 1.2 branch to asf but it is not yet in github. iti
> should be there eventually, give it a bit of time.
>
> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dl...@gmail.com>
> wrote:
>
> > what exactly did you try to do?
> >
> > just resetting HEAD will not work on remote branch -- you need force-sync
> > that (with +) since it is a history-rewriting push, but asf git does not
> > allow that.
> >
> > ASF will mirror ALL branches afaik. I think i've done it before. so if
> you
> > create a new one it should (eventually) get there.
> >
> > On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
> > andrew.musselman@gmail.com> wrote:
> >
> >> Does ASF git get mirrored to GitHub?  I tried pushing a branch and don't
> >> see it there yet.
> >>
> >> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <dl...@gmail.com>
> >> wrote:
> >>
> >> > On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <pa...@occamsmachete.com>
> >> > wrote:
> >> >
> >> > > to be safe I’d “git reset —hard xyz” to the commit previous to the
> >> 1.2.1
> >> > >
> >> >
> >> > As i just explained, that resets are not possible with ASF git.
> >> Reverting
> >> > is the only option.
> >> >
> >> > -d
> >> >
> >>
> >
> >
>

Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
issued a revert to head.

On Tue, Feb 24, 2015 at 11:47 AM, Dmitriy Lyubimov <dl...@gmail.com>
wrote:

> yeah ok so you pushed 1.2 branch to asf but it is not yet in github. iti
> should be there eventually, give it a bit of time.
>
> On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dl...@gmail.com>
> wrote:
>
>> what exactly did you try to do?
>>
>> just resetting HEAD will not work on remote branch -- you need force-sync
>> that (with +) since it is a history-rewriting push, but asf git does not
>> allow that.
>>
>> ASF will mirror ALL branches afaik. I think i've done it before. so if
>> you create a new one it should (eventually) get there.
>>
>> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
>> andrew.musselman@gmail.com> wrote:
>>
>>> Does ASF git get mirrored to GitHub?  I tried pushing a branch and don't
>>> see it there yet.
>>>
>>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <dl...@gmail.com>
>>> wrote:
>>>
>>> > On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <pa...@occamsmachete.com>
>>> > wrote:
>>> >
>>> > > to be safe I’d “git reset —hard xyz” to the commit previous to the
>>> 1.2.1
>>> > >
>>> >
>>> > As i just explained, that resets are not possible with ASF git.
>>> Reverting
>>> > is the only option.
>>> >
>>> > -d
>>> >
>>>
>>
>>
>

Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
yeah ok so you pushed 1.2 branch to asf but it is not yet in github. iti
should be there eventually, give it a bit of time.

On Tue, Feb 24, 2015 at 11:35 AM, Dmitriy Lyubimov <dl...@gmail.com>
wrote:

> what exactly did you try to do?
>
> just resetting HEAD will not work on remote branch -- you need force-sync
> that (with +) since it is a history-rewriting push, but asf git does not
> allow that.
>
> ASF will mirror ALL branches afaik. I think i've done it before. so if you
> create a new one it should (eventually) get there.
>
> On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
> andrew.musselman@gmail.com> wrote:
>
>> Does ASF git get mirrored to GitHub?  I tried pushing a branch and don't
>> see it there yet.
>>
>> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <dl...@gmail.com>
>> wrote:
>>
>> > On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <pa...@occamsmachete.com>
>> > wrote:
>> >
>> > > to be safe I’d “git reset —hard xyz” to the commit previous to the
>> 1.2.1
>> > >
>> >
>> > As i just explained, that resets are not possible with ASF git.
>> Reverting
>> > is the only option.
>> >
>> > -d
>> >
>>
>
>

Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
what exactly did you try to do?

just resetting HEAD will not work on remote branch -- you need force-sync
that (with +) since it is a history-rewriting push, but asf git does not
allow that.

ASF will mirror ALL branches afaik. I think i've done it before. so if you
create a new one it should (eventually) get there.

On Tue, Feb 24, 2015 at 11:18 AM, Andrew Musselman <
andrew.musselman@gmail.com> wrote:

> Does ASF git get mirrored to GitHub?  I tried pushing a branch and don't
> see it there yet.
>
> On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <dl...@gmail.com>
> wrote:
>
> > On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <pa...@occamsmachete.com>
> > wrote:
> >
> > > to be safe I’d “git reset —hard xyz” to the commit previous to the
> 1.2.1
> > >
> >
> > As i just explained, that resets are not possible with ASF git. Reverting
> > is the only option.
> >
> > -d
> >
>

Re: Spark shell broken

Posted by Andrew Musselman <an...@gmail.com>.
Does ASF git get mirrored to GitHub?  I tried pushing a branch and don't
see it there yet.

On Tue, Feb 24, 2015 at 11:16 AM, Dmitriy Lyubimov <dl...@gmail.com>
wrote:

> On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <pa...@occamsmachete.com>
> wrote:
>
> > to be safe I’d “git reset —hard xyz” to the commit previous to the 1.2.1
> >
>
> As i just explained, that resets are not possible with ASF git. Reverting
> is the only option.
>
> -d
>

Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
On Tue, Feb 24, 2015 at 10:55 AM, Pat Ferrel <pa...@occamsmachete.com> wrote:

> to be safe I’d “git reset —hard xyz” to the commit previous to the 1.2.1
>

As i just explained, that resets are not possible with ASF git. Reverting
is the only option.

-d

Re: Spark shell broken

Posted by Pat Ferrel <pa...@occamsmachete.com>.
to be safe I’d “git reset —hard xyz” to the commit previous to the 1.2.1

I merged a big commit with this and upgraded my cluster to 1.2.1 so will stick with this for a bit. 

If anyone has a clue please speak up. It seems related to starting a context. The error in spark-itemsimilarity is much simpler than the shell one.

15/02/24 10:17:57 INFO spark.SecurityManager: Changing view acls to: pat,
15/02/24 10:17:57 INFO spark.SecurityManager: Changing modify acls to: pat,
15/02/24 10:17:57 INFO spark.SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(pat, ); users with modify permissions: Set(pat, )
Exception in thread "main" com.typesafe.config.ConfigException$Missing: No configuration setting found for key 'akka.event-handlers'
    at com.typesafe.config.impl.SimpleConfig.findKey(SimpleConfig.java:115)
    at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:136)
    at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:142)
at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:150)
    at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:155)
    at com.typesafe.config.impl.SimpleConfig.getList(SimpleConfig.java:203)
    at com.typesafe.config.impl.SimpleConfig.getHomogeneousUnwrappedList(SimpleConfig.java:260)
    at com.typesafe.config.impl.SimpleConfig.getStringList(SimpleConfig.java:318)
    at akka.actor.ActorSystem$Settings.<init>(ActorSystem.scala:150)
    at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:470)
    at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)
    at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)
    at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)
    at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)
    at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)
at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1446)
    at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
    at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1442)
    at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)
    at org.apache.spark.SparkEnv$.create(SparkEnv.scala:153)
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:203)
    at org.apache.mahout.sparkbindings.package$.mahoutSparkContext(package.scala:95)
    at org.apache.mahout.drivers.MahoutSparkDriver.start(MahoutSparkDriver.scala:81)
    at org.apache.mahout.drivers.ItemSimilarityDriver$.start(ItemSimilarityDriver.scala:118)


On Feb 24, 2015, at 10:48 AM, Andrew Musselman <an...@gmail.com> wrote:

Roll back meaning just the entry in the pom?

On Tue, Feb 24, 2015 at 10:31 AM, Pat Ferrel <pa...@occamsmachete.com> wrote:

> 1.2? I thought the previous version was Spark 1.1.0?
> 
> I need 1.2  so I’m up for trying to fix this. It was a contribution, maybe
> the originator has a clue.
> 
> BTW can’t run spark-itemsimilarity on the cluster either though all unit
> tests pass and the cluster seems to be working with their shell and
> examples.
> 
> I get:
> 
> Exception in thread "main" com.typesafe.config.ConfigException$Missing: No
> configuration setting found for key 'akka.event-handlers'
>        at
> com.typesafe.config.impl.SimpleConfig.findKey(SimpleConfig.java:115)
>        at
> com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:136)
>        at
> com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:142)
>        at
> com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:150)
>        at
> com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:155)
>        at
> com.typesafe.config.impl.SimpleConfig.getList(SimpleConfig.java:203)
>        at
> com.typesafe.config.impl.SimpleConfig.getHomogeneousUnwrappedList(SimpleConfig.java:260)
>        at
> com.typesafe.config.impl.SimpleConfig.getStringList(SimpleConfig.java:318)
>        at akka.actor.ActorSystem$Settings.<init>(ActorSystem.scala:150)
>        at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:470)
>        ...
> 
> On Feb 24, 2015, at 10:22 AM, Dmitriy Lyubimov <dl...@gmail.com> wrote:
> 
> As a remedy, i'd suggest to branch out spark 1.2 work and rollback 1.2.1
> commit on master until 1.2 branch is fixed.
> 
> On Tue, Feb 24, 2015 at 10:19 AM, Dmitriy Lyubimov <dl...@gmail.com>
> wrote:
> 
>> oops.
>> 
>> tests dont test shell startup.
>> 
>> apparently stuff got out of sync with 1.2
>> 
>> On Tue, Feb 24, 2015 at 10:02 AM, Pat Ferrel <pa...@occamsmachete.com>
>> wrote:
>> 
>>> Me too and I built with 1.2.1
>>> 
>>> On Feb 24, 2015, at 9:50 AM, Andrew Musselman <
> andrew.musselman@gmail.com>
>>> wrote:
>>> 
>>> I've just rebuild mahout master and spark v1.2.1-rc2 and am getting this
>>> error when I try out the spark-shell; am I missing something?
>>> 
>>> $ bin/mahout spark-shell
>>> SLF4J: Class path contains multiple SLF4J bindings.
>>> SLF4J: Found binding in
>>> 
>>> 
> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>> SLF4J: Found binding in
>>> 
>>> 
> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>> SLF4J: Found binding in
>>> 
>>> 
> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.2.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>>> explanation.
>>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>>> error:
>>>   while compiling: <init>
>>>      during phase: typer
>>>   library version: version 2.10.4
>>>  compiler version: version 2.10.0
>>> reconstructed args:
>>> 
>>> last tree to typer: Literal(Constant(()))
>>>            symbol: null
>>> symbol definition: null
>>>               tpe: Unit
>>>     symbol owners:
>>>    context owners: package <empty>
>>> 
>>> == Enclosing template or block ==
>>> 
>>> Block( // tree.tpe=Unit
>>> {}
>>> ()
>>> )
>>> 
>>> == Expanded type of tree ==
>>> 
>>> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
>>> 
>>> uncaught exception during compilation: java.lang.NoSuchMethodError
>>> 
>>> Failed to initialize compiler: NoSuchMethodError.
>>> This is most often remedied by a full clean and recompile.
>>> Otherwise, your classpath may continue bytecode compiled by
>>> different and incompatible versions of scala.
>>> 
>>> java.lang.NoSuchMethodError:
>>> 
>>> 
> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
>>>  at
>>> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
>>>  at
>>> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
>>>  at scala.tools.nsc.typechecker.Namers$Namer.classSig(Namers.scala:907)
>>>  at
> scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1289)
>>>  at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
>>>  at
>>> 
> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
>>>  at
>>> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
>>>  at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
>>>  at
> scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
>>>  at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
>>>  at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>>>  at
>>> 
>>> 
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>>>  at scala.collection.immutable.List.loop$1(List.scala:170)
>>>  at scala.collection.immutable.List.mapConserve(List.scala:186)
>>>  at
>>> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
>>>  at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
>>>  at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>>>  at
>>> 
>>> 
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>>>  at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
>>>  at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>>>  at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>>>  at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
>>>  at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
>>>  at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
>>>  at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
>>>  at org.apache.spark.repl.SparkIMain.org
>>> $apache$spark$repl$SparkIMain$$_initialize(SparkIMain.scala:187)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkIMain.initializeSynchronous(SparkIMain.scala:208)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:961)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>>>  at
>>> 
>>> 
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>>>  at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
>>>  at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
>>>  at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
>>>  at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
>>> 15/02/24 09:42:52 WARN SparkILoop$SparkILoopInterpreter: Warning:
> compiler
>>> accessed before init set up.  Assuming no postInit code.
>>> error:
>>>   while compiling: <console>
>>>      during phase: typer
>>>   library version: version 2.10.4
>>>  compiler version: version 2.10.0
>>> reconstructed args:
>>> 
>>> last tree to typer: Literal(Constant(()))
>>>            symbol: null
>>> symbol definition: null
>>>               tpe: Unit
>>>     symbol owners:
>>>    context owners: package $line1
>>> 
>>> == Enclosing template or block ==
>>> 
>>> Block( // tree.tpe=Unit
>>> {}
>>> ()
>>> )
>>> 
>>> == Expanded type of tree ==
>>> 
>>> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
>>> 
>>> uncaught exception during compilation: java.lang.NoSuchMethodError
>>> Exception in thread "main" java.lang.NoSuchMethodError:
>>> 
>>> 
> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
>>>  at
>>> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
>>>  at
>>> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
>>>  at
> scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1300)
>>>  at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
>>>  at
>>> 
> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
>>>  at
>>> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
>>>  at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
>>>  at
> scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
>>>  at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
>>>  at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>>>  at
>>> 
>>> 
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>>>  at scala.collection.immutable.List.loop$1(List.scala:170)
>>>  at scala.collection.immutable.List.mapConserve(List.scala:186)
>>>  at
>>> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
>>>  at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
>>>  at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>>>  at
>>> 
>>> 
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>>>  at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
>>>  at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>>>  at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>>>  at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>>>  at
>>> 
>>> 
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
>>>  at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
>>>  at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
>>>  at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkIMain.compileSourcesKeepingRun(SparkIMain.scala:528)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compileAndSaveRun(SparkIMain.scala:923)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compile(SparkIMain.scala:879)
>>>  at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:719)
>>>  at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:762)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
>>>  at
> org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:270)
>>>  at org.apache.spark.repl.SparkIMain.quietBind(SparkIMain.scala:761)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$apply$mcZ$sp$2.apply$mcV$sp(SparkILoop.scala:935)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:147)
>>>  at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:60)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:106)
>>>  at
>>> 
>>> 
> org.apache.mahout.sparkbindings.shell.MahoutSparkILoop.postInitialization(MahoutSparkILoop.scala:24)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:962)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>>>  at
>>> 
>>> 
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>>>  at
>>> 
>>> 
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>>>  at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
>>>  at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
>>>  at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
>>>  at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
>>> 
>>> 
>> 
> 
> 


Re: Spark shell broken

Posted by Andrew Musselman <an...@gmail.com>.
Roll back meaning just the entry in the pom?

On Tue, Feb 24, 2015 at 10:31 AM, Pat Ferrel <pa...@occamsmachete.com> wrote:

> 1.2? I thought the previous version was Spark 1.1.0?
>
> I need 1.2  so I’m up for trying to fix this. It was a contribution, maybe
> the originator has a clue.
>
> BTW can’t run spark-itemsimilarity on the cluster either though all unit
> tests pass and the cluster seems to be working with their shell and
> examples.
>
> I get:
>
> Exception in thread "main" com.typesafe.config.ConfigException$Missing: No
> configuration setting found for key 'akka.event-handlers'
>         at
> com.typesafe.config.impl.SimpleConfig.findKey(SimpleConfig.java:115)
>         at
> com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:136)
>         at
> com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:142)
>         at
> com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:150)
>         at
> com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:155)
>         at
> com.typesafe.config.impl.SimpleConfig.getList(SimpleConfig.java:203)
>         at
> com.typesafe.config.impl.SimpleConfig.getHomogeneousUnwrappedList(SimpleConfig.java:260)
>         at
> com.typesafe.config.impl.SimpleConfig.getStringList(SimpleConfig.java:318)
>         at akka.actor.ActorSystem$Settings.<init>(ActorSystem.scala:150)
>         at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:470)
>         ...
>
> On Feb 24, 2015, at 10:22 AM, Dmitriy Lyubimov <dl...@gmail.com> wrote:
>
> As a remedy, i'd suggest to branch out spark 1.2 work and rollback 1.2.1
> commit on master until 1.2 branch is fixed.
>
> On Tue, Feb 24, 2015 at 10:19 AM, Dmitriy Lyubimov <dl...@gmail.com>
> wrote:
>
> > oops.
> >
> > tests dont test shell startup.
> >
> > apparently stuff got out of sync with 1.2
> >
> > On Tue, Feb 24, 2015 at 10:02 AM, Pat Ferrel <pa...@occamsmachete.com>
> > wrote:
> >
> >> Me too and I built with 1.2.1
> >>
> >> On Feb 24, 2015, at 9:50 AM, Andrew Musselman <
> andrew.musselman@gmail.com>
> >> wrote:
> >>
> >> I've just rebuild mahout master and spark v1.2.1-rc2 and am getting this
> >> error when I try out the spark-shell; am I missing something?
> >>
> >> $ bin/mahout spark-shell
> >> SLF4J: Class path contains multiple SLF4J bindings.
> >> SLF4J: Found binding in
> >>
> >>
> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> >> SLF4J: Found binding in
> >>
> >>
> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> >> SLF4J: Found binding in
> >>
> >>
> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.2.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> >> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> >> explanation.
> >> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
> >> error:
> >>    while compiling: <init>
> >>       during phase: typer
> >>    library version: version 2.10.4
> >>   compiler version: version 2.10.0
> >> reconstructed args:
> >>
> >> last tree to typer: Literal(Constant(()))
> >>             symbol: null
> >>  symbol definition: null
> >>                tpe: Unit
> >>      symbol owners:
> >>     context owners: package <empty>
> >>
> >> == Enclosing template or block ==
> >>
> >> Block( // tree.tpe=Unit
> >> {}
> >> ()
> >> )
> >>
> >> == Expanded type of tree ==
> >>
> >> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
> >>
> >> uncaught exception during compilation: java.lang.NoSuchMethodError
> >>
> >> Failed to initialize compiler: NoSuchMethodError.
> >> This is most often remedied by a full clean and recompile.
> >> Otherwise, your classpath may continue bytecode compiled by
> >> different and incompatible versions of scala.
> >>
> >> java.lang.NoSuchMethodError:
> >>
> >>
> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
> >>   at
> >> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
> >>   at
> >> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
> >>   at scala.tools.nsc.typechecker.Namers$Namer.classSig(Namers.scala:907)
> >>   at
> scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1289)
> >>   at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
> >>   at
> >>
> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
> >>   at
> >> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
> >>   at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
> >>   at
> scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
> >>   at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
> >>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
> >>   at
> >>
> >>
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
> >>   at scala.collection.immutable.List.loop$1(List.scala:170)
> >>   at scala.collection.immutable.List.mapConserve(List.scala:186)
> >>   at
> >> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
> >>   at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
> >>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
> >>   at
> >>
> >>
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
> >>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
> >>   at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
> >>   at scala.collection.Iterator$class.foreach(Iterator.scala:727)
> >>   at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
> >>   at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
> >>   at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
> >>   at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
> >>   at org.apache.spark.repl.SparkIMain.org
> >> $apache$spark$repl$SparkIMain$$_initialize(SparkIMain.scala:187)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkIMain.initializeSynchronous(SparkIMain.scala:208)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:961)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
> >>   at
> >>
> >>
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
> >>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
> >>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
> >>   at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
> >>   at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
> >> 15/02/24 09:42:52 WARN SparkILoop$SparkILoopInterpreter: Warning:
> compiler
> >> accessed before init set up.  Assuming no postInit code.
> >> error:
> >>    while compiling: <console>
> >>       during phase: typer
> >>    library version: version 2.10.4
> >>   compiler version: version 2.10.0
> >> reconstructed args:
> >>
> >> last tree to typer: Literal(Constant(()))
> >>             symbol: null
> >>  symbol definition: null
> >>                tpe: Unit
> >>      symbol owners:
> >>     context owners: package $line1
> >>
> >> == Enclosing template or block ==
> >>
> >> Block( // tree.tpe=Unit
> >> {}
> >> ()
> >> )
> >>
> >> == Expanded type of tree ==
> >>
> >> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
> >>
> >> uncaught exception during compilation: java.lang.NoSuchMethodError
> >> Exception in thread "main" java.lang.NoSuchMethodError:
> >>
> >>
> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
> >>   at
> >> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
> >>   at
> >> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
> >>   at
> scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1300)
> >>   at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
> >>   at
> >>
> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
> >>   at
> >> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
> >>   at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
> >>   at
> scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
> >>   at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
> >>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
> >>   at
> >>
> >>
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
> >>   at scala.collection.immutable.List.loop$1(List.scala:170)
> >>   at scala.collection.immutable.List.mapConserve(List.scala:186)
> >>   at
> >> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
> >>   at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
> >>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
> >>   at
> >>
> >>
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
> >>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
> >>   at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
> >>   at scala.collection.Iterator$class.foreach(Iterator.scala:727)
> >>   at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
> >>   at
> >>
> >>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
> >>   at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
> >>   at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
> >>   at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkIMain.compileSourcesKeepingRun(SparkIMain.scala:528)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compileAndSaveRun(SparkIMain.scala:923)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compile(SparkIMain.scala:879)
> >>   at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:719)
> >>   at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:762)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
> >>   at
> org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:270)
> >>   at org.apache.spark.repl.SparkIMain.quietBind(SparkIMain.scala:761)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$apply$mcZ$sp$2.apply$mcV$sp(SparkILoop.scala:935)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:147)
> >>   at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:60)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:106)
> >>   at
> >>
> >>
> org.apache.mahout.sparkbindings.shell.MahoutSparkILoop.postInitialization(MahoutSparkILoop.scala:24)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:962)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
> >>   at
> >>
> >>
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
> >>   at
> >>
> >>
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
> >>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
> >>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
> >>   at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
> >>   at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
> >>
> >>
> >
>
>

Re: Spark shell broken

Posted by Pat Ferrel <pa...@occamsmachete.com>.
1.2? I thought the previous version was Spark 1.1.0?

I need 1.2  so I’m up for trying to fix this. It was a contribution, maybe the originator has a clue.

BTW can’t run spark-itemsimilarity on the cluster either though all unit tests pass and the cluster seems to be working with their shell and examples.

I get:

Exception in thread "main" com.typesafe.config.ConfigException$Missing: No configuration setting found for key 'akka.event-handlers'
	at com.typesafe.config.impl.SimpleConfig.findKey(SimpleConfig.java:115)
	at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:136)
	at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:142)
	at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:150)
	at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:155)
	at com.typesafe.config.impl.SimpleConfig.getList(SimpleConfig.java:203)
	at com.typesafe.config.impl.SimpleConfig.getHomogeneousUnwrappedList(SimpleConfig.java:260)
	at com.typesafe.config.impl.SimpleConfig.getStringList(SimpleConfig.java:318)
	at akka.actor.ActorSystem$Settings.<init>(ActorSystem.scala:150)
	at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:470)
        ...

On Feb 24, 2015, at 10:22 AM, Dmitriy Lyubimov <dl...@gmail.com> wrote:

As a remedy, i'd suggest to branch out spark 1.2 work and rollback 1.2.1
commit on master until 1.2 branch is fixed.

On Tue, Feb 24, 2015 at 10:19 AM, Dmitriy Lyubimov <dl...@gmail.com>
wrote:

> oops.
> 
> tests dont test shell startup.
> 
> apparently stuff got out of sync with 1.2
> 
> On Tue, Feb 24, 2015 at 10:02 AM, Pat Ferrel <pa...@occamsmachete.com>
> wrote:
> 
>> Me too and I built with 1.2.1
>> 
>> On Feb 24, 2015, at 9:50 AM, Andrew Musselman <an...@gmail.com>
>> wrote:
>> 
>> I've just rebuild mahout master and spark v1.2.1-rc2 and am getting this
>> error when I try out the spark-shell; am I missing something?
>> 
>> $ bin/mahout spark-shell
>> SLF4J: Class path contains multiple SLF4J bindings.
>> SLF4J: Found binding in
>> 
>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> SLF4J: Found binding in
>> 
>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> SLF4J: Found binding in
>> 
>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.2.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>> explanation.
>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>> error:
>>    while compiling: <init>
>>       during phase: typer
>>    library version: version 2.10.4
>>   compiler version: version 2.10.0
>> reconstructed args:
>> 
>> last tree to typer: Literal(Constant(()))
>>             symbol: null
>>  symbol definition: null
>>                tpe: Unit
>>      symbol owners:
>>     context owners: package <empty>
>> 
>> == Enclosing template or block ==
>> 
>> Block( // tree.tpe=Unit
>> {}
>> ()
>> )
>> 
>> == Expanded type of tree ==
>> 
>> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
>> 
>> uncaught exception during compilation: java.lang.NoSuchMethodError
>> 
>> Failed to initialize compiler: NoSuchMethodError.
>> This is most often remedied by a full clean and recompile.
>> Otherwise, your classpath may continue bytecode compiled by
>> different and incompatible versions of scala.
>> 
>> java.lang.NoSuchMethodError:
>> 
>> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
>>   at
>> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
>>   at
>> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
>>   at scala.tools.nsc.typechecker.Namers$Namer.classSig(Namers.scala:907)
>>   at scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1289)
>>   at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
>>   at
>> 
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
>>   at
>> 
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>>   at
>> 
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>>   at
>> 
>> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
>>   at
>> 
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
>>   at
>> 
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
>>   at
>> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
>>   at
>> 
>> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
>>   at
>> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
>>   at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
>>   at scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
>>   at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
>>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>>   at
>> 
>> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>>   at
>> 
>> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
>>   at
>> 
>> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>>   at
>> 
>> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>>   at scala.collection.immutable.List.loop$1(List.scala:170)
>>   at scala.collection.immutable.List.mapConserve(List.scala:186)
>>   at
>> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
>>   at
>> 
>> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
>>   at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
>>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>>   at
>> 
>> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
>>   at
>> 
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
>>   at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
>>   at
>> 
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>>   at
>> 
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>>   at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>>   at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>>   at
>> 
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
>>   at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
>>   at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
>>   at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
>>   at org.apache.spark.repl.SparkIMain.org
>> $apache$spark$repl$SparkIMain$$_initialize(SparkIMain.scala:187)
>>   at
>> 
>> org.apache.spark.repl.SparkIMain.initializeSynchronous(SparkIMain.scala:208)
>>   at
>> 
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:961)
>>   at
>> 
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>>   at
>> 
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>>   at
>> 
>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
>>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
>>   at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
>>   at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
>> 15/02/24 09:42:52 WARN SparkILoop$SparkILoopInterpreter: Warning: compiler
>> accessed before init set up.  Assuming no postInit code.
>> error:
>>    while compiling: <console>
>>       during phase: typer
>>    library version: version 2.10.4
>>   compiler version: version 2.10.0
>> reconstructed args:
>> 
>> last tree to typer: Literal(Constant(()))
>>             symbol: null
>>  symbol definition: null
>>                tpe: Unit
>>      symbol owners:
>>     context owners: package $line1
>> 
>> == Enclosing template or block ==
>> 
>> Block( // tree.tpe=Unit
>> {}
>> ()
>> )
>> 
>> == Expanded type of tree ==
>> 
>> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
>> 
>> uncaught exception during compilation: java.lang.NoSuchMethodError
>> Exception in thread "main" java.lang.NoSuchMethodError:
>> 
>> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
>>   at
>> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
>>   at
>> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
>>   at scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1300)
>>   at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
>>   at
>> 
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
>>   at
>> 
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>>   at
>> 
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>>   at
>> 
>> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
>>   at
>> 
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
>>   at
>> 
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
>>   at
>> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
>>   at
>> 
>> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
>>   at
>> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
>>   at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
>>   at scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
>>   at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
>>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>>   at
>> 
>> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>>   at
>> 
>> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
>>   at
>> 
>> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>>   at
>> 
>> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>>   at scala.collection.immutable.List.loop$1(List.scala:170)
>>   at scala.collection.immutable.List.mapConserve(List.scala:186)
>>   at
>> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
>>   at
>> 
>> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
>>   at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
>>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>>   at
>> 
>> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>>   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
>>   at
>> 
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
>>   at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
>>   at
>> 
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>>   at
>> 
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>>   at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>>   at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>>   at
>> 
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
>>   at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
>>   at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
>>   at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
>>   at
>> 
>> org.apache.spark.repl.SparkIMain.compileSourcesKeepingRun(SparkIMain.scala:528)
>>   at
>> 
>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compileAndSaveRun(SparkIMain.scala:923)
>>   at
>> 
>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compile(SparkIMain.scala:879)
>>   at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:719)
>>   at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:762)
>>   at
>> 
>> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
>>   at
>> 
>> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
>>   at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:270)
>>   at org.apache.spark.repl.SparkIMain.quietBind(SparkIMain.scala:761)
>>   at
>> 
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$apply$mcZ$sp$2.apply$mcV$sp(SparkILoop.scala:935)
>>   at
>> 
>> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:147)
>>   at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:60)
>>   at
>> 
>> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:106)
>>   at
>> 
>> org.apache.mahout.sparkbindings.shell.MahoutSparkILoop.postInitialization(MahoutSparkILoop.scala:24)
>>   at
>> 
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:962)
>>   at
>> 
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>>   at
>> 
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>>   at
>> 
>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
>>   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
>>   at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
>>   at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
>> 
>> 
> 


Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
As a remedy, i'd suggest to branch out spark 1.2 work and rollback 1.2.1
commit on master until 1.2 branch is fixed.

On Tue, Feb 24, 2015 at 10:19 AM, Dmitriy Lyubimov <dl...@gmail.com>
wrote:

> oops.
>
> tests dont test shell startup.
>
> apparently stuff got out of sync with 1.2
>
> On Tue, Feb 24, 2015 at 10:02 AM, Pat Ferrel <pa...@occamsmachete.com>
> wrote:
>
>> Me too and I built with 1.2.1
>>
>> On Feb 24, 2015, at 9:50 AM, Andrew Musselman <an...@gmail.com>
>> wrote:
>>
>> I've just rebuild mahout master and spark v1.2.1-rc2 and am getting this
>> error when I try out the spark-shell; am I missing something?
>>
>> $ bin/mahout spark-shell
>> SLF4J: Class path contains multiple SLF4J bindings.
>> SLF4J: Found binding in
>>
>> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> SLF4J: Found binding in
>>
>> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> SLF4J: Found binding in
>>
>> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.2.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
>> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
>> explanation.
>> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
>> error:
>>     while compiling: <init>
>>        during phase: typer
>>     library version: version 2.10.4
>>    compiler version: version 2.10.0
>>  reconstructed args:
>>
>>  last tree to typer: Literal(Constant(()))
>>              symbol: null
>>   symbol definition: null
>>                 tpe: Unit
>>       symbol owners:
>>      context owners: package <empty>
>>
>> == Enclosing template or block ==
>>
>> Block( // tree.tpe=Unit
>>  {}
>>  ()
>> )
>>
>> == Expanded type of tree ==
>>
>> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
>>
>> uncaught exception during compilation: java.lang.NoSuchMethodError
>>
>> Failed to initialize compiler: NoSuchMethodError.
>> This is most often remedied by a full clean and recompile.
>> Otherwise, your classpath may continue bytecode compiled by
>> different and incompatible versions of scala.
>>
>> java.lang.NoSuchMethodError:
>>
>> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
>>    at
>> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
>>    at
>> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
>>    at scala.tools.nsc.typechecker.Namers$Namer.classSig(Namers.scala:907)
>>    at scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1289)
>>    at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
>>    at
>>
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
>>    at
>>
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>>    at
>>
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>>    at
>>
>> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
>>    at
>>
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
>>    at
>>
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
>>    at
>> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
>>    at
>>
>> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
>>    at
>> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
>>    at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
>>    at scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
>>    at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
>>    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>>    at
>>
>> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>>    at
>>
>> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
>>    at
>>
>> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>>    at
>>
>> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>>    at scala.collection.immutable.List.loop$1(List.scala:170)
>>    at scala.collection.immutable.List.mapConserve(List.scala:186)
>>    at
>> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
>>    at
>>
>> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
>>    at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
>>    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>>    at
>>
>> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>>    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
>>    at
>>
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
>>    at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
>>    at
>>
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>>    at
>>
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>>    at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>>    at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>>    at
>>
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
>>    at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
>>    at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
>>    at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
>>    at org.apache.spark.repl.SparkIMain.org
>> $apache$spark$repl$SparkIMain$$_initialize(SparkIMain.scala:187)
>>    at
>>
>> org.apache.spark.repl.SparkIMain.initializeSynchronous(SparkIMain.scala:208)
>>    at
>>
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:961)
>>    at
>>
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>>    at
>>
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>>    at
>>
>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>>    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
>>    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
>>    at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
>>    at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
>> 15/02/24 09:42:52 WARN SparkILoop$SparkILoopInterpreter: Warning: compiler
>> accessed before init set up.  Assuming no postInit code.
>> error:
>>     while compiling: <console>
>>        during phase: typer
>>     library version: version 2.10.4
>>    compiler version: version 2.10.0
>>  reconstructed args:
>>
>>  last tree to typer: Literal(Constant(()))
>>              symbol: null
>>   symbol definition: null
>>                 tpe: Unit
>>       symbol owners:
>>      context owners: package $line1
>>
>> == Enclosing template or block ==
>>
>> Block( // tree.tpe=Unit
>>  {}
>>  ()
>> )
>>
>> == Expanded type of tree ==
>>
>> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
>>
>> uncaught exception during compilation: java.lang.NoSuchMethodError
>> Exception in thread "main" java.lang.NoSuchMethodError:
>>
>> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
>>    at
>> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
>>    at
>> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
>>    at scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1300)
>>    at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
>>    at
>>
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
>>    at
>>
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>>    at
>>
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>>    at
>>
>> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
>>    at
>>
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
>>    at
>>
>> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
>>    at
>> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
>>    at
>>
>> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
>>    at
>> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
>>    at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
>>    at scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
>>    at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
>>    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>>    at
>>
>> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>>    at
>>
>> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
>>    at
>>
>> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>>    at
>>
>> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>>    at scala.collection.immutable.List.loop$1(List.scala:170)
>>    at scala.collection.immutable.List.mapConserve(List.scala:186)
>>    at
>> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
>>    at
>>
>> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
>>    at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
>>    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>>    at
>>
>> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>>    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
>>    at
>>
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
>>    at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
>>    at
>>
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>>    at
>>
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>>    at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>>    at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>>    at
>>
>> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
>>    at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
>>    at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
>>    at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
>>    at
>>
>> org.apache.spark.repl.SparkIMain.compileSourcesKeepingRun(SparkIMain.scala:528)
>>    at
>>
>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compileAndSaveRun(SparkIMain.scala:923)
>>    at
>>
>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compile(SparkIMain.scala:879)
>>    at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:719)
>>    at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:762)
>>    at
>>
>> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
>>    at
>>
>> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
>>    at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:270)
>>    at org.apache.spark.repl.SparkIMain.quietBind(SparkIMain.scala:761)
>>    at
>>
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$apply$mcZ$sp$2.apply$mcV$sp(SparkILoop.scala:935)
>>    at
>>
>> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:147)
>>    at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:60)
>>    at
>>
>> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:106)
>>    at
>>
>> org.apache.mahout.sparkbindings.shell.MahoutSparkILoop.postInitialization(MahoutSparkILoop.scala:24)
>>    at
>>
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:962)
>>    at
>>
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>>    at
>>
>> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>>    at
>>
>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>>    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
>>    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
>>    at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
>>    at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
>>
>>
>

Re: Spark shell broken

Posted by Dmitriy Lyubimov <dl...@gmail.com>.
oops.

tests dont test shell startup.

apparently stuff got out of sync with 1.2

On Tue, Feb 24, 2015 at 10:02 AM, Pat Ferrel <pa...@occamsmachete.com> wrote:

> Me too and I built with 1.2.1
>
> On Feb 24, 2015, at 9:50 AM, Andrew Musselman <an...@gmail.com>
> wrote:
>
> I've just rebuild mahout master and spark v1.2.1-rc2 and am getting this
> error when I try out the spark-shell; am I missing something?
>
> $ bin/mahout spark-shell
> SLF4J: Class path contains multiple SLF4J bindings.
> SLF4J: Found binding in
>
> [jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: Found binding in
>
> [jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: Found binding in
>
> [jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.2.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> explanation.
> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
> error:
>     while compiling: <init>
>        during phase: typer
>     library version: version 2.10.4
>    compiler version: version 2.10.0
>  reconstructed args:
>
>  last tree to typer: Literal(Constant(()))
>              symbol: null
>   symbol definition: null
>                 tpe: Unit
>       symbol owners:
>      context owners: package <empty>
>
> == Enclosing template or block ==
>
> Block( // tree.tpe=Unit
>  {}
>  ()
> )
>
> == Expanded type of tree ==
>
> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
>
> uncaught exception during compilation: java.lang.NoSuchMethodError
>
> Failed to initialize compiler: NoSuchMethodError.
> This is most often remedied by a full clean and recompile.
> Otherwise, your classpath may continue bytecode compiled by
> different and incompatible versions of scala.
>
> java.lang.NoSuchMethodError:
>
> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
>    at
> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
>    at
> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
>    at scala.tools.nsc.typechecker.Namers$Namer.classSig(Namers.scala:907)
>    at scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1289)
>    at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
>    at
>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
>    at
>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>    at
>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>    at
>
> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
>    at
>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
>    at
>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
>    at
> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
>    at
>
> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
>    at
> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
>    at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
>    at scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
>    at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
>    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>    at
>
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>    at
>
> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
>    at
>
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>    at
>
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>    at scala.collection.immutable.List.loop$1(List.scala:170)
>    at scala.collection.immutable.List.mapConserve(List.scala:186)
>    at
> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
>    at
>
> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
>    at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
>    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>    at
>
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
>    at
>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
>    at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
>    at
>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>    at
>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>    at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>    at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>    at
>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
>    at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
>    at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
>    at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
>    at org.apache.spark.repl.SparkIMain.org
> $apache$spark$repl$SparkIMain$$_initialize(SparkIMain.scala:187)
>    at
>
> org.apache.spark.repl.SparkIMain.initializeSynchronous(SparkIMain.scala:208)
>    at
>
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:961)
>    at
>
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>    at
>
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>    at
>
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
>    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
>    at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
>    at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
> 15/02/24 09:42:52 WARN SparkILoop$SparkILoopInterpreter: Warning: compiler
> accessed before init set up.  Assuming no postInit code.
> error:
>     while compiling: <console>
>        during phase: typer
>     library version: version 2.10.4
>    compiler version: version 2.10.0
>  reconstructed args:
>
>  last tree to typer: Literal(Constant(()))
>              symbol: null
>   symbol definition: null
>                 tpe: Unit
>       symbol owners:
>      context owners: package $line1
>
> == Enclosing template or block ==
>
> Block( // tree.tpe=Unit
>  {}
>  ()
> )
>
> == Expanded type of tree ==
>
> TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))
>
> uncaught exception during compilation: java.lang.NoSuchMethodError
> Exception in thread "main" java.lang.NoSuchMethodError:
>
> scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
>    at
> scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
>    at
> scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
>    at scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1300)
>    at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
>    at
>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
>    at
>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>    at
>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
>    at
>
> scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
>    at
>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
>    at
>
> scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
>    at
> scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
>    at
>
> scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
>    at
> scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
>    at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
>    at scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
>    at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
>    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>    at
>
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>    at
>
> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
>    at
>
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>    at
>
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
>    at scala.collection.immutable.List.loop$1(List.scala:170)
>    at scala.collection.immutable.List.mapConserve(List.scala:186)
>    at
> scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
>    at
>
> scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
>    at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
>    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
>    at
>
> scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
>    at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
>    at
>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
>    at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
>    at
>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>    at
>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
>    at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>    at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>    at
>
> scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
>    at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
>    at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
>    at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
>    at
>
> org.apache.spark.repl.SparkIMain.compileSourcesKeepingRun(SparkIMain.scala:528)
>    at
>
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compileAndSaveRun(SparkIMain.scala:923)
>    at
>
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.compile(SparkIMain.scala:879)
>    at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:719)
>    at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:762)
>    at
>
> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
>    at
>
> org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
>    at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:270)
>    at org.apache.spark.repl.SparkIMain.quietBind(SparkIMain.scala:761)
>    at
>
> org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$apply$mcZ$sp$2.apply$mcV$sp(SparkILoop.scala:935)
>    at
>
> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:147)
>    at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:60)
>    at
>
> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:106)
>    at
>
> org.apache.mahout.sparkbindings.shell.MahoutSparkILoop.postInitialization(MahoutSparkILoop.scala:24)
>    at
>
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:962)
>    at
>
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>    at
>
> org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
>    at
>
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
>    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
>    at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
>    at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
>
>

Re: Spark shell broken

Posted by Pat Ferrel <pa...@occamsmachete.com>.
Me too and I built with 1.2.1

On Feb 24, 2015, at 9:50 AM, Andrew Musselman <an...@gmail.com> wrote:

I've just rebuild mahout master and spark v1.2.1-rc2 and am getting this
error when I try out the spark-shell; am I missing something?

$ bin/mahout spark-shell
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in
[jar:file:/home/akm/mahout/mrlegacy/target/mahout-mrlegacy-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/home/akm/mahout/spark/target/mahout-spark_2.10-1.0-SNAPSHOT-job.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/home/akm/spark/assembly/target/scala-2.10/spark-assembly-1.2.1-hadoop1.1.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
error:
    while compiling: <init>
       during phase: typer
    library version: version 2.10.4
   compiler version: version 2.10.0
 reconstructed args:

 last tree to typer: Literal(Constant(()))
             symbol: null
  symbol definition: null
                tpe: Unit
      symbol owners:
     context owners: package <empty>

== Enclosing template or block ==

Block( // tree.tpe=Unit
 {}
 ()
)

== Expanded type of tree ==

TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))

uncaught exception during compilation: java.lang.NoSuchMethodError

Failed to initialize compiler: NoSuchMethodError.
This is most often remedied by a full clean and recompile.
Otherwise, your classpath may continue bytecode compiled by
different and incompatible versions of scala.

java.lang.NoSuchMethodError:
scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
   at
scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
   at
scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
   at scala.tools.nsc.typechecker.Namers$Namer.classSig(Namers.scala:907)
   at scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1289)
   at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
   at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
   at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
   at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
   at
scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
   at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
   at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
   at
scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
   at
scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
   at
scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
   at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
   at scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
   at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
   at
scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
   at
scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
   at
scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
   at
scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
   at scala.collection.immutable.List.loop$1(List.scala:170)
   at scala.collection.immutable.List.mapConserve(List.scala:186)
   at
scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
   at
scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
   at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
   at
scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
   at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
   at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
   at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
   at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
   at scala.collection.Iterator$class.foreach(Iterator.scala:727)
   at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
   at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
   at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
   at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
   at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
   at org.apache.spark.repl.SparkIMain.org
$apache$spark$repl$SparkIMain$$_initialize(SparkIMain.scala:187)
   at
org.apache.spark.repl.SparkIMain.initializeSynchronous(SparkIMain.scala:208)
   at
org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:961)
   at
org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
   at
org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
   at
scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
   at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
   at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)
15/02/24 09:42:52 WARN SparkILoop$SparkILoopInterpreter: Warning: compiler
accessed before init set up.  Assuming no postInit code.
error:
    while compiling: <console>
       during phase: typer
    library version: version 2.10.4
   compiler version: version 2.10.0
 reconstructed args:

 last tree to typer: Literal(Constant(()))
             symbol: null
  symbol definition: null
                tpe: Unit
      symbol owners:
     context owners: package $line1

== Enclosing template or block ==

Block( // tree.tpe=Unit
 {}
 ()
)

== Expanded type of tree ==

TypeRef(TypeSymbol(final abstract class Unit extends AnyVal))

uncaught exception during compilation: java.lang.NoSuchMethodError
Exception in thread "main" java.lang.NoSuchMethodError:
scala.reflect.internal.TreeInfo.firstArgument(Lscala/reflect/internal/Trees$Tree;)Lscala/reflect/internal/Trees$Tree;
   at
scala.tools.nsc.typechecker.Typers$Typer.parentTypes(Typers.scala:1550)
   at
scala.tools.nsc.typechecker.Namers$Namer.templateSig(Namers.scala:861)
   at scala.tools.nsc.typechecker.Namers$Namer.getSig$1(Namers.scala:1300)
   at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1347)
   at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply$mcV$sp(Namers.scala:709)
   at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
   at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1$$anonfun$apply$1.apply(Namers.scala:708)
   at
scala.tools.nsc.typechecker.Namers$Namer.scala$tools$nsc$typechecker$Namers$Namer$$logAndValidate(Namers.scala:1385)
   at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:708)
   at
scala.tools.nsc.typechecker.Namers$Namer$$anonfun$monoTypeCompleter$1.apply(Namers.scala:707)
   at
scala.tools.nsc.typechecker.Namers$$anon$1.completeImpl(Namers.scala:1496)
   at
scala.tools.nsc.typechecker.Namers$LockingTypeCompleter$class.complete(Namers.scala:1504)
   at
scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1494)
   at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
   at scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
   at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5119)
   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
   at
scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
   at
scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:2770)
   at
scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
   at
scala.tools.nsc.typechecker.Typers$Typer$$anonfun$55.apply(Typers.scala:2870)
   at scala.collection.immutable.List.loop$1(List.scala:170)
   at scala.collection.immutable.List.mapConserve(List.scala:186)
   at
scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2870)
   at
scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5127)
   at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5404)
   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5458)
   at
scala.tools.nsc.interpreter.ReplGlobal$$anon$1$$anon$2.typed(ReplGlobal.scala:29)
   at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5509)
   at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:98)
   at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:461)
   at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
   at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:90)
   at scala.collection.Iterator$class.foreach(Iterator.scala:727)
   at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
   at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:90)
   at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1574)
   at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1548)
   at scala.tools.nsc.Global$Run.compileSources(Global.scala:1544)
   at
org.apache.spark.repl.SparkIMain.compileSourcesKeepingRun(SparkIMain.scala:528)
   at
org.apache.spark.repl.SparkIMain$ReadEvalPrint.compileAndSaveRun(SparkIMain.scala:923)
   at
org.apache.spark.repl.SparkIMain$ReadEvalPrint.compile(SparkIMain.scala:879)
   at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:719)
   at org.apache.spark.repl.SparkIMain.bind(SparkIMain.scala:762)
   at
org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
   at
org.apache.spark.repl.SparkIMain$$anonfun$quietBind$1.apply(SparkIMain.scala:761)
   at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:270)
   at org.apache.spark.repl.SparkIMain.quietBind(SparkIMain.scala:761)
   at
org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$apply$mcZ$sp$2.apply$mcV$sp(SparkILoop.scala:935)
   at
org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:147)
   at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:60)
   at
org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:106)
   at
org.apache.mahout.sparkbindings.shell.MahoutSparkILoop.postInitialization(MahoutSparkILoop.scala:24)
   at
org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:962)
   at
org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
   at
org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
   at
scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
   at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
   at org.apache.mahout.sparkbindings.shell.Main$.main(Main.scala:39)
   at org.apache.mahout.sparkbindings.shell.Main.main(Main.scala)