You are viewing a plain text version of this content. The canonical link for it is here.
Posted to users@groovy.apache.org by tog <gu...@gmail.com> on 2015/11/15 17:52:54 UTC
GroovyShell
Hi
I have a behavior I dont understand using GroovyShell.
Here is a script <https://gist.github.com/galleon/231dbfcff36f8d4ce6c2>
that is working fine when I use it from the command line
When I use it from a second script
<https://gist.github.com/galleon/e0807499a1b8b78924ca> using GroovyShell I
got the following exception:
I got the following exception. Any idea why?
tog GroovySpark $ groovy GroovySparkThroughGroovyShell.groovy
java.lang.ClassNotFoundException:
org.apache.spark.rpc.akka.AkkaRpcEnvFactory
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:677)
at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:787)
at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:775)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
at org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:42)
at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:53)
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:254)
at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:194)
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:277)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:450)
at
org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
at
org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
at
org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
at
org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
at
org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
at
org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
at Script1.run(Script1.groovy:9)
at groovy.lang.GroovyShell.evaluate(GroovyShell.java:591)
at groovy.lang.GroovyShell.evaluate(GroovyShell.java:629)
at groovy.lang.GroovyShell.evaluate(GroovyShell.java:600)
at groovy.lang.GroovyShell$evaluate.call(Unknown Source)
at
org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCall(CallSiteArray.java:48)
at
org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:113)
at
org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:125)
at
GroovySparkThroughGroovyShell.run(GroovySparkThroughGroovyShell.groovy:47)
at
groovy.lang.GroovyShell.runScriptOrMainOrTestOrRunnable(GroovyShell.java:263)
at groovy.lang.GroovyShell.run(GroovyShell.java:524)
at groovy.lang.GroovyShell.run(GroovyShell.java:513)
at groovy.ui.GroovyMain.processOnce(GroovyMain.java:652)
at groovy.ui.GroovyMain.run(GroovyMain.java:384)
at groovy.ui.GroovyMain.process(GroovyMain.java:370)
at groovy.ui.GroovyMain.processArgs(GroovyMain.java:129)
at groovy.ui.GroovyMain.main(GroovyMain.java:109)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at
org.codehaus.groovy.tools.GroovyStarter.rootLoader(GroovyStarter.java:109)
at org.codehaus.groovy.tools.GroovyStarter.main(GroovyStarter.java:131)
org.apache.spark.SparkConf@7428de63
java.lang.ClassNotFoundException:
org.apache.spark.rpc.akka.AkkaRpcEnvFactory
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:677)
at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:787)
at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:775)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
at org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:42)
at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:53)
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:254)
at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:194)
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:277)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:450)
at
org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
at
org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
at
org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
at
org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
at
org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
at
org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
at Script1.run(Script1.groovy:9)
at groovy.lang.GroovyShell.evaluate(GroovyShell.java:591)
at groovy.lang.GroovyShell.evaluate(GroovyShell.java:629)
at groovy.lang.GroovyShell.evaluate(GroovyShell.java:600)
at groovy.lang.GroovyShell$evaluate.call(Unknown Source)
at
org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCall(CallSiteArray.java:48)
at
org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:113)
at
org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:125)
at
GroovySparkThroughGroovyShell.run(GroovySparkThroughGroovyShell.groovy:47)
at
groovy.lang.GroovyShell.runScriptOrMainOrTestOrRunnable(GroovyShell.java:263)
at groovy.lang.GroovyShell.run(GroovyShell.java:524)
at groovy.lang.GroovyShell.run(GroovyShell.java:513)
at groovy.ui.GroovyMain.processOnce(GroovyMain.java:652)
at groovy.ui.GroovyMain.run(GroovyMain.java:384)
at groovy.ui.GroovyMain.process(GroovyMain.java:370)
at groovy.ui.GroovyMain.processArgs(GroovyMain.java:129)
at groovy.ui.GroovyMain.main(GroovyMain.java:109)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at
org.codehaus.groovy.tools.GroovyStarter.rootLoader(GroovyStarter.java:109)
at org.codehaus.groovy.tools.GroovyStarter.main(GroovyStarter.java:131)
--
PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
Re: GroovyShell
Posted by Keegan Witt <ke...@gmail.com>.
Ah. Yes, I'm reproducing your issue now. Honestly, I don't know enough
about Grape to answer this. Groovysh and GroovyConsole both use
GroovyShell underneath and I don't see that they do any customization of
classloading/classpaths. At the moment, I'm stumped :(
-Keegan
On Wed, Nov 18, 2015 at 2:25 PM, tog <gu...@gmail.com> wrote:
> Well I was more thinking along the lines asking Grab not to download the servlet-api
> coming probably with Apache Spark.
>
> But I did renamed the one in $GROOVY_HOME/lib and thibault's branch fixed
> my exception regarding Akka. I nevertheless have a new issue which again
> seems related to the way class are loaded.
>
> I really need to better understand which are the differences in running
> the very same script using the command line and GroovyShell ... there seems
> to be subtile differences that should explain that difference in behavior.
>
> Here is my new exception in case that rings a bell.
>
> tog GroovySpark $ $GROOVY_HOME/bin/groovy
> GroovySparkThroughGroovyShell.groovy
>
> org.apache.spark.SparkException: Job aborted due to stage failure: Task 0
> in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage
> 0.0 (TID 0, localhost): java.lang.ClassNotFoundException:
> Script1$_run_closure1
>
> at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
>
> at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>
> at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
>
> at java.lang.Class.forName0(Native Method)
>
> at java.lang.Class.forName(Class.java:348)
>
> at
> org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:67)
>
>
> Cheers
>
> Guillaume
>
Re: GroovyShell
Posted by tog <gu...@gmail.com>.
Well I was more thinking along the lines asking Grab not to download
the servlet-api
coming probably with Apache Spark.
But I did renamed the one in $GROOVY_HOME/lib and thibault's branch fixed
my exception regarding Akka. I nevertheless have a new issue which again
seems related to the way class are loaded.
I really need to better understand which are the differences in running the
very same script using the command line and GroovyShell ... there seems to
be subtile differences that should explain that difference in behavior.
Here is my new exception in case that rings a bell.
tog GroovySpark $ $GROOVY_HOME/bin/groovy
GroovySparkThroughGroovyShell.groovy
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0
in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage
0.0 (TID 0, localhost): java.lang.ClassNotFoundException:
Script1$_run_closure1
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at
org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:67)
Cheers
Guillaume
Re: GroovyShell
Posted by Pascal Schumacher <pa...@gmx.net>.
The only way I know of is to delete/move the servlet-api-2.4.jar.
-Pascal
Am 18.11.2015 um 19:25 schrieb tog:
> I did recompile your groovysh-grab, is that really the one because I
> face new problem such as:
>
> togGroovySpark $ $GROOVY_HOME/bin/groovy
> GroovySparkThroughGroovyShell.groovy
>
> java.lang.SecurityException: class
> "javax.servlet.FilterRegistration"'s signer information does not match
> signer information of other classes in the same package
>
> Probably multiple jars with the same name. How can I ask Grab not to
> load servlet-api-2.4.jar which ships with groovy ?
>
> Cheers
>
> Guillaume
>
>
> On 17 November 2015 at 21:24, Thibault Kruse <tibokruse@googlemail.com
> <ma...@googlemail.com>> wrote:
>
> you'd have to recompile, my PR is growing old
>
> On Tue, Nov 17, 2015 at 10:08 PM, tog <guillaume.alleon@gmail.com
> <ma...@gmail.com>> wrote:
> > Thibault
> >
> > Has your change been pushed in groovy recently or should I
> recompile my own
> > version to test if that solve my issue?
> > Any other way to test it without having to generate my own version ?
> >
> > Cheers
> > Guillaume
> >
> > On 17 November 2015 at 20:57, Thibault Kruse
> <tibokruse@googlemail.com <ma...@googlemail.com>>
> > wrote:
> >>
> >> Not sure if this is related all. But I had an issue getting Grape
> >> imports available in Groovysh (which is related to Groovy Shell),
> >> which cause me to try and tamper with the Grape classloading:
> >>
> >>
> http://mail-archives.apache.org/mod_mbox/groovy-dev/201508.mbox/%3CCAByu6UVw1KNVqPnQrjKRCANj6e8od9sGczinz7iDWA1P+=45PA@mail.gmail.com%3E
> >>
> >> This might be unrelated to your problems, though.
> >>
> >>
> >> On Tue, Nov 17, 2015 at 9:24 PM, tog
> <guillaume.alleon@gmail.com <ma...@gmail.com>>
> wrote:
> >> > Hello
> >> >
> >> > Any more ideas regarding my issue?
> >> >
> >> > Thanks
> >> > Guillaume
> >> >
> >> > On 15 November 2015 at 20:19, tog <guillaume.alleon@gmail.com
> <ma...@gmail.com>> wrote:
> >> >>
> >> >> Sorry, my previous email is wrong.
> >> >>
> >> >> The block:
> >> >> groovy.grape.Grape.grab(
> >> >> groupId: 'org.apache.spark',
> >> >> artifactId: 'spark-core_2.10',
> >> >> version: '1.5.2'
> >> >> )
> >> >>
> >> >> does not seem equivalent to:
> >> >>
> >> >> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
> >> >>
> >> >> since the imports cannot be found.
> >> >>
> >> >>
> >> >>
> >> >>
> ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
> >> >>
> >> >> tog GroovySpark $ groovy GroovySparkWordcount.groovy
> >> >>
> >> >>
> org.codehaus.groovy.control.MultipleCompilationErrorsException:
> startup
> >> >> failed:
> >> >>
> >> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9:
> unable to
> >> >> resolve class org.apache.spark.api.java.JavaSparkContext
> >> >>
> >> >> @ line 9, column 1.
> >> >>
> >> >> import org.apache.spark.api.java.JavaSparkContext
> >> >>
> >> >> ^
> >> >>
> >> >>
> >> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8:
> unable to
> >> >> resolve class org.apache.spark.SparkConf
> >> >>
> >> >> @ line 8, column 1.
> >> >>
> >> >> import org.apache.spark.SparkConf
> >> >>
> >> >> ^
> >> >>
> >> >>
> >> >> 2 errors
> >> >>
> >> >>
> >> >> On 15 November 2015 at 18:55, tog
> <guillaume.alleon@gmail.com <ma...@gmail.com>>
> wrote:
> >> >>>
> >> >>> Thanks, Yes, just realize the typo ... I fixed it and get
> the very
> >> >>> same
> >> >>> error.
> >> >>> I am getting lost ;-)
> >> >>>
> >> >>>
> >> >>>
> >> >>> org.apache.spark.SparkConf@2158ddec
> java.lang.ClassNotFoundException:
> >> >>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
> >> >>> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
> >> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
> >> >>>
> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
> >> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
> >> >>> java.lang.Class.forName0(Native Method) at
> >> >>> java.lang.Class.forName(Class.java:348) at
> >> >>>
> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
> >> >>> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
> >> >>> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
> >> >>>
> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
> >> >>>
> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
> >> >>> at
> >> >>> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
> >> >>>
> >> >>>
> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
> >> >>> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> >> >>> Method) at
> >> >>>
> >> >>>
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> >> >>> at
> >> >>>
> >> >>>
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> >> >>> at
> java.lang.reflect.Constructor.newInstance(Constructor.java:422) at
> >> >>>
> >> >>>
> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
> >> >>> at
> >> >>>
> >> >>>
> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
> >> >>> at
> >> >>>
> >> >>>
> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
> >> >>> at
> >> >>>
> >> >>>
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
> >> >>> at
> >> >>>
> >> >>>
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
> >> >>> at Script6.run(Script6.groovy:16)
> >> >>>
> >> >>>
> >> >>>
> >> >>> On 15 November 2015 at 18:41, Bahman Movaqar
> <Bahman@bahmanm.com <ma...@bahmanm.com>>
> >> >>> wrote:
> >> >>>>
> >> >>>> On 11/15/2015 10:03 PM, tog wrote:
> >> >>>>
> >> >>>> > @Grap seems to have default repo to look into ... with
> the change
> >> >>>> > you
> >> >>>> > are suggesting I got
> >> >>>> > ava.lang.RuntimeException: Error grabbing Grapes --
> [unresolved
> >> >>>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not
> found] at
> >> >>>> >
> sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> >> >>>> > Method)
> >> >>>> >
> >> >>>> > How do I define them?
> >> >>>>
> >> >>>> It was a typo on my side. `artifactId` should be
> "spark-core_2.10"
> >> >>>> (note
> >> >>>> the `-` character).
> >> >>>>
> >> >>>> --
> >> >>>> Bahman Movaqar
> >> >>>>
> >> >>>> http://BahmanM.com - https://twitter.com/bahman__m
> >> >>>> https://github.com/bahmanm - https://gist.github.com/bahmanm
> >> >>>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com
> <http://keyserver2.pgp.com>)
> >> >>>>
> >> >>>
> >> >>>
> >> >>>
> >> >>> --
> >> >>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
> <http://subkeys.pgp.net>
> >> >>
> >> >>
> >> >>
> >> >>
> >> >> --
> >> >> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
> <http://subkeys.pgp.net>
> >> >
> >> >
> >> >
> >> >
> >> > --
> >> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
> <http://subkeys.pgp.net>
> >
> >
> >
> >
> > --
> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net <http://subkeys.pgp.net>
>
>
>
>
> --
> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net <http://subkeys.pgp.net>
Re: GroovyShell
Posted by tog <gu...@gmail.com>.
I did recompile your groovysh-grab, is that really the one because I face
new problem such as:
tog GroovySpark $ $GROOVY_HOME/bin/groovy
GroovySparkThroughGroovyShell.groovy
java.lang.SecurityException: class "javax.servlet.FilterRegistration"'s
signer information does not match signer information of other classes in
the same package
Probably multiple jars with the same name. How can I ask Grab not to
load servlet-api-2.4.jar
which ships with groovy ?
Cheers
Guillaume
On 17 November 2015 at 21:24, Thibault Kruse <ti...@googlemail.com>
wrote:
> you'd have to recompile, my PR is growing old
>
> On Tue, Nov 17, 2015 at 10:08 PM, tog <gu...@gmail.com> wrote:
> > Thibault
> >
> > Has your change been pushed in groovy recently or should I recompile my
> own
> > version to test if that solve my issue?
> > Any other way to test it without having to generate my own version ?
> >
> > Cheers
> > Guillaume
> >
> > On 17 November 2015 at 20:57, Thibault Kruse <ti...@googlemail.com>
> > wrote:
> >>
> >> Not sure if this is related all. But I had an issue getting Grape
> >> imports available in Groovysh (which is related to Groovy Shell),
> >> which cause me to try and tamper with the Grape classloading:
> >>
> >>
> http://mail-archives.apache.org/mod_mbox/groovy-dev/201508.mbox/%3CCAByu6UVw1KNVqPnQrjKRCANj6e8od9sGczinz7iDWA1P+=45PA@mail.gmail.com%3E
> >>
> >> This might be unrelated to your problems, though.
> >>
> >>
> >> On Tue, Nov 17, 2015 at 9:24 PM, tog <gu...@gmail.com>
> wrote:
> >> > Hello
> >> >
> >> > Any more ideas regarding my issue?
> >> >
> >> > Thanks
> >> > Guillaume
> >> >
> >> > On 15 November 2015 at 20:19, tog <gu...@gmail.com> wrote:
> >> >>
> >> >> Sorry, my previous email is wrong.
> >> >>
> >> >> The block:
> >> >> groovy.grape.Grape.grab(
> >> >> groupId: 'org.apache.spark',
> >> >> artifactId: 'spark-core_2.10',
> >> >> version: '1.5.2'
> >> >> )
> >> >>
> >> >> does not seem equivalent to:
> >> >>
> >> >> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
> >> >>
> >> >> since the imports cannot be found.
> >> >>
> >> >>
> >> >>
> >> >>
> ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
> >> >>
> >> >> tog GroovySpark $ groovy GroovySparkWordcount.groovy
> >> >>
> >> >> org.codehaus.groovy.control.MultipleCompilationErrorsException:
> startup
> >> >> failed:
> >> >>
> >> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9: unable to
> >> >> resolve class org.apache.spark.api.java.JavaSparkContext
> >> >>
> >> >> @ line 9, column 1.
> >> >>
> >> >> import org.apache.spark.api.java.JavaSparkContext
> >> >>
> >> >> ^
> >> >>
> >> >>
> >> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8: unable to
> >> >> resolve class org.apache.spark.SparkConf
> >> >>
> >> >> @ line 8, column 1.
> >> >>
> >> >> import org.apache.spark.SparkConf
> >> >>
> >> >> ^
> >> >>
> >> >>
> >> >> 2 errors
> >> >>
> >> >>
> >> >> On 15 November 2015 at 18:55, tog <gu...@gmail.com>
> wrote:
> >> >>>
> >> >>> Thanks, Yes, just realize the typo ... I fixed it and get the very
> >> >>> same
> >> >>> error.
> >> >>> I am getting lost ;-)
> >> >>>
> >> >>>
> >> >>>
> >> >>> org.apache.spark.SparkConf@2158ddec
> java.lang.ClassNotFoundException:
> >> >>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
> >> >>> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
> >> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
> >> >>> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
> >> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
> >> >>> java.lang.Class.forName0(Native Method) at
> >> >>> java.lang.Class.forName(Class.java:348) at
> >> >>> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
> >> >>> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
> >> >>> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
> >> >>> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
> >> >>> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
> >> >>> at
> >> >>> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
> >> >>>
> >> >>>
> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
> >> >>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> >> >>> Method) at
> >> >>>
> >> >>>
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> >> >>> at
> >> >>>
> >> >>>
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> >> >>> at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
> at
> >> >>>
> >> >>>
> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
> >> >>> at
> >> >>>
> >> >>>
> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
> >> >>> at
> >> >>>
> >> >>>
> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
> >> >>> at
> >> >>>
> >> >>>
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
> >> >>> at
> >> >>>
> >> >>>
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
> >> >>> at Script6.run(Script6.groovy:16)
> >> >>>
> >> >>>
> >> >>>
> >> >>> On 15 November 2015 at 18:41, Bahman Movaqar <Ba...@bahmanm.com>
> >> >>> wrote:
> >> >>>>
> >> >>>> On 11/15/2015 10:03 PM, tog wrote:
> >> >>>>
> >> >>>> > @Grap seems to have default repo to look into ... with the change
> >> >>>> > you
> >> >>>> > are suggesting I got
> >> >>>> > ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
> >> >>>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not found] at
> >> >>>> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> >> >>>> > Method)
> >> >>>> >
> >> >>>> > How do I define them?
> >> >>>>
> >> >>>> It was a typo on my side. `artifactId` should be "spark-core_2.10"
> >> >>>> (note
> >> >>>> the `-` character).
> >> >>>>
> >> >>>> --
> >> >>>> Bahman Movaqar
> >> >>>>
> >> >>>> http://BahmanM.com - https://twitter.com/bahman__m
> >> >>>> https://github.com/bahmanm - https://gist.github.com/bahmanm
> >> >>>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
> >> >>>>
> >> >>>
> >> >>>
> >> >>>
> >> >>> --
> >> >>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
> >> >>
> >> >>
> >> >>
> >> >>
> >> >> --
> >> >> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
> >> >
> >> >
> >> >
> >> >
> >> > --
> >> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
> >
> >
> >
> >
> > --
> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>
--
PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
Re: GroovyShell
Posted by tog <gu...@gmail.com>.
Hi Keegan
What you tried works for me as well (just one import).
Could you try with this:
new
GroovyShell().evaluate("@Grab('org.apache.spark:spark-core_2.10:1.5.2')\n" +
"import org.apache.spark.SparkConf\n" +
"import org.apache.spark.api.java.JavaSparkContext\n" +
"def conf = new
SparkConf().setMaster(\"local[2]\").setAppName(\"WordCount\")\n" +
"def ctx = new JavaSparkContext(conf)\n"
);
Thanks
Guillaume
On 18 November 2015 at 05:40, Keegan Witt <ke...@gmail.com> wrote:
> Ah, sorry, I misunderstood. I think the issue might be that you don't
> have Ivy on your classpath. This executes without exceptions for me
>
> Main.java
>
> import groovy.lang.GroovyShell;
> public class Main {
> public static void main(String[] args) {
> new GroovyShell().evaluate("@Grab('org.apache.spark:spark-core_2.10:1.5.2')\n" +
> "import org.apache.spark.api.java.JavaSparkContext");
> }
> }
>
>
> build.gradle
>
> apply plugin: 'java'
> repositories {
> mavenCentral()
> }
> dependencies {
> compile 'org.codehaus.groovy:groovy-all:2.4.5'
> runtime 'org.apache.ivy:ivy:2.4.0'
> }
>
>
> It also works in the GroovyConsole for me. I know it seems obvious, but
> maybe check your Groovy lib directory for the Ivy jar? I'm having trouble
> thinking what else the difference might be. Unless maybe I'm running a
> line that's different from the one you saw fail?
>
> -Keegan
>
> On Tue, Nov 17, 2015 at 4:48 PM, tog <gu...@gmail.com> wrote:
>
>> Hi Keegan
>>
>> Thanks for testing
>>
>> Well it works with groovysh indeed but not in a script using GroovyShell
>> using the following scripts
>>
>> https://gist.github.com/galleon/231dbfcff36f8d4ce6c2
>> https://gist.github.com/galleon/e0807499a1b8b78924ca
>>
>> Any idea what I do wrong ?
>>
>> --------
>>
>> tog GroovySpark $ groovy -version
>>
>> Groovy Version: 2.4.5 JVM: 1.8.0_60 Vendor: Oracle Corporation OS: Mac OS
>> X
>>
>>
>> tog GroovySpark $ groovysh
>>
>> Groovy Shell (2.4.5, JVM: 1.8.0_60)
>>
>> Type '*:help*' or '*:h*' for help.
>>
>>
>> --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>>
>> *groovy:*000*>* groovy.grape.Grape.grab(group:'org.apache.spark',
>> module:'spark-core_2.10', version:'1.5.2')
>>
>> *===>* null
>>
>> *groovy:*000*>* import org.apache.spark.api.java.JavaSparkContext
>>
>> *===>* org.apache.spark.api.java.JavaSparkContext
>>
>> *groovy:*000*>* println JavaSparkContext
>>
>> class org.apache.spark.api.java.JavaSparkContext
>>
>> *===>* null
>>
>> On 17 November 2015 at 21:26, Keegan Witt <ke...@gmail.com> wrote:
>>
>>> What version of Groovy are you using? It seems to work for me with
>>> 2.4.5 and Java 1.8.0_65.
>>>
>>> groovy.grape.Grape.grab(group:'org.apache.spark',
>>> module:'spark-core_2.10', version:'1.5.2')
>>> ===> null
>>> groovy:000> import org.apache.spark.api.java.JavaSparkContext
>>> ===> org.apache.spark.api.java.JavaSparkContext
>>> groovy:000> println JavaSparkContext
>>> ERROR org.apache.spark.SparkException:
>>> A master URL must be set in your configuration
>>> at org.apache.spark.SparkContext.<init> (SparkContext.scala:394)
>>> at org.apache.spark.SparkContext.<init> (SparkContext.scala:112)
>>> at org.apache.spark.api.java.JavaSparkContext.<init>
>>> (JavaSparkContext.scala:56)
>>>
>>>
>>> On Tue, Nov 17, 2015 at 4:24 PM, Thibault Kruse <
>>> tibokruse@googlemail.com> wrote:
>>>
>>>> you'd have to recompile, my PR is growing old
>>>>
>>>> On Tue, Nov 17, 2015 at 10:08 PM, tog <gu...@gmail.com>
>>>> wrote:
>>>> > Thibault
>>>> >
>>>> > Has your change been pushed in groovy recently or should I recompile
>>>> my own
>>>> > version to test if that solve my issue?
>>>> > Any other way to test it without having to generate my own version ?
>>>> >
>>>> > Cheers
>>>> > Guillaume
>>>> >
>>>> > On 17 November 2015 at 20:57, Thibault Kruse <
>>>> tibokruse@googlemail.com>
>>>> > wrote:
>>>> >>
>>>> >> Not sure if this is related all. But I had an issue getting Grape
>>>> >> imports available in Groovysh (which is related to Groovy Shell),
>>>> >> which cause me to try and tamper with the Grape classloading:
>>>> >>
>>>> >>
>>>> http://mail-archives.apache.org/mod_mbox/groovy-dev/201508.mbox/%3CCAByu6UVw1KNVqPnQrjKRCANj6e8od9sGczinz7iDWA1P+=45PA@mail.gmail.com%3E
>>>> >>
>>>> >> This might be unrelated to your problems, though.
>>>> >>
>>>> >>
>>>> >> On Tue, Nov 17, 2015 at 9:24 PM, tog <gu...@gmail.com>
>>>> wrote:
>>>> >> > Hello
>>>> >> >
>>>> >> > Any more ideas regarding my issue?
>>>> >> >
>>>> >> > Thanks
>>>> >> > Guillaume
>>>> >> >
>>>> >> > On 15 November 2015 at 20:19, tog <gu...@gmail.com>
>>>> wrote:
>>>> >> >>
>>>> >> >> Sorry, my previous email is wrong.
>>>> >> >>
>>>> >> >> The block:
>>>> >> >> groovy.grape.Grape.grab(
>>>> >> >> groupId: 'org.apache.spark',
>>>> >> >> artifactId: 'spark-core_2.10',
>>>> >> >> version: '1.5.2'
>>>> >> >> )
>>>> >> >>
>>>> >> >> does not seem equivalent to:
>>>> >> >>
>>>> >> >> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
>>>> >> >>
>>>> >> >> since the imports cannot be found.
>>>> >> >>
>>>> >> >>
>>>> >> >>
>>>> >> >>
>>>> ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>>>> >> >>
>>>> >> >> tog GroovySpark $ groovy GroovySparkWordcount.groovy
>>>> >> >>
>>>> >> >> org.codehaus.groovy.control.MultipleCompilationErrorsException:
>>>> startup
>>>> >> >> failed:
>>>> >> >>
>>>> >> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9:
>>>> unable to
>>>> >> >> resolve class org.apache.spark.api.java.JavaSparkContext
>>>> >> >>
>>>> >> >> @ line 9, column 1.
>>>> >> >>
>>>> >> >> import org.apache.spark.api.java.JavaSparkContext
>>>> >> >>
>>>> >> >> ^
>>>> >> >>
>>>> >> >>
>>>> >> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8:
>>>> unable to
>>>> >> >> resolve class org.apache.spark.SparkConf
>>>> >> >>
>>>> >> >> @ line 8, column 1.
>>>> >> >>
>>>> >> >> import org.apache.spark.SparkConf
>>>> >> >>
>>>> >> >> ^
>>>> >> >>
>>>> >> >>
>>>> >> >> 2 errors
>>>> >> >>
>>>> >> >>
>>>> >> >> On 15 November 2015 at 18:55, tog <gu...@gmail.com>
>>>> wrote:
>>>> >> >>>
>>>> >> >>> Thanks, Yes, just realize the typo ... I fixed it and get the
>>>> very
>>>> >> >>> same
>>>> >> >>> error.
>>>> >> >>> I am getting lost ;-)
>>>> >> >>>
>>>> >> >>>
>>>> >> >>>
>>>> >> >>> org.apache.spark.SparkConf@2158ddec
>>>> java.lang.ClassNotFoundException:
>>>> >> >>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
>>>> >> >>> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
>>>> >> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
>>>> >> >>> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
>>>> >> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
>>>> >> >>> java.lang.Class.forName0(Native Method) at
>>>> >> >>> java.lang.Class.forName(Class.java:348) at
>>>> >> >>> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
>>>> >> >>> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
>>>> >> >>> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
>>>> >> >>> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
>>>> >> >>>
>>>> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
>>>> >> >>> at
>>>> >> >>> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
>>>> >> >>>
>>>> >> >>>
>>>> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
>>>> >> >>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>>>> >> >>> Method) at
>>>> >> >>>
>>>> >> >>>
>>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>>> >> >>> at
>>>> >> >>>
>>>> >> >>>
>>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>>> >> >>> at
>>>> java.lang.reflect.Constructor.newInstance(Constructor.java:422) at
>>>> >> >>>
>>>> >> >>>
>>>> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
>>>> >> >>> at
>>>> >> >>>
>>>> >> >>>
>>>> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
>>>> >> >>> at
>>>> >> >>>
>>>> >> >>>
>>>> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
>>>> >> >>> at
>>>> >> >>>
>>>> >> >>>
>>>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
>>>> >> >>> at
>>>> >> >>>
>>>> >> >>>
>>>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
>>>> >> >>> at Script6.run(Script6.groovy:16)
>>>> >> >>>
>>>> >> >>>
>>>> >> >>>
>>>> >> >>> On 15 November 2015 at 18:41, Bahman Movaqar <Bahman@bahmanm.com
>>>> >
>>>> >> >>> wrote:
>>>> >> >>>>
>>>> >> >>>> On 11/15/2015 10:03 PM, tog wrote:
>>>> >> >>>>
>>>> >> >>>> > @Grap seems to have default repo to look into ... with the
>>>> change
>>>> >> >>>> > you
>>>> >> >>>> > are suggesting I got
>>>> >> >>>> > ava.lang.RuntimeException: Error grabbing Grapes --
>>>> [unresolved
>>>> >> >>>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not
>>>> found] at
>>>> >> >>>> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>>>> >> >>>> > Method)
>>>> >> >>>> >
>>>> >> >>>> > How do I define them?
>>>> >> >>>>
>>>> >> >>>> It was a typo on my side. `artifactId` should be
>>>> "spark-core_2.10"
>>>> >> >>>> (note
>>>> >> >>>> the `-` character).
>>>> >> >>>>
>>>> >> >>>> --
>>>> >> >>>> Bahman Movaqar
>>>> >> >>>>
>>>> >> >>>> http://BahmanM.com - https://twitter.com/bahman__m
>>>> >> >>>> https://github.com/bahmanm - https://gist.github.com/bahmanm
>>>> >> >>>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
>>>> >> >>>>
>>>> >> >>>
>>>> >> >>>
>>>> >> >>>
>>>> >> >>> --
>>>> >> >>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>>> >> >>
>>>> >> >>
>>>> >> >>
>>>> >> >>
>>>> >> >> --
>>>> >> >> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>>> >> >
>>>> >> >
>>>> >> >
>>>> >> >
>>>> >> > --
>>>> >> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>>> >
>>>> >
>>>> >
>>>> >
>>>> > --
>>>> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>>>
>>>
>>>
>>
>>
>> --
>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>
>
>
--
PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
Re: GroovyShell
Posted by Keegan Witt <ke...@gmail.com>.
Ah, sorry, I misunderstood. I think the issue might be that you don't have
Ivy on your classpath. This executes without exceptions for me
Main.java
import groovy.lang.GroovyShell;
public class Main {
public static void main(String[] args) {
new GroovyShell().evaluate("@Grab('org.apache.spark:spark-core_2.10:1.5.2')\n"
+
"import org.apache.spark.api.java.JavaSparkContext");
}
}
build.gradle
apply plugin: 'java'
repositories {
mavenCentral()
}
dependencies {
compile 'org.codehaus.groovy:groovy-all:2.4.5'
runtime 'org.apache.ivy:ivy:2.4.0'
}
It also works in the GroovyConsole for me. I know it seems obvious, but
maybe check your Groovy lib directory for the Ivy jar? I'm having trouble
thinking what else the difference might be. Unless maybe I'm running a
line that's different from the one you saw fail?
-Keegan
On Tue, Nov 17, 2015 at 4:48 PM, tog <gu...@gmail.com> wrote:
> Hi Keegan
>
> Thanks for testing
>
> Well it works with groovysh indeed but not in a script using GroovyShell
> using the following scripts
>
> https://gist.github.com/galleon/231dbfcff36f8d4ce6c2
> https://gist.github.com/galleon/e0807499a1b8b78924ca
>
> Any idea what I do wrong ?
>
> --------
>
> tog GroovySpark $ groovy -version
>
> Groovy Version: 2.4.5 JVM: 1.8.0_60 Vendor: Oracle Corporation OS: Mac OS X
>
>
> tog GroovySpark $ groovysh
>
> Groovy Shell (2.4.5, JVM: 1.8.0_60)
>
> Type '*:help*' or '*:h*' for help.
>
>
> --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>
> *groovy:*000*>* groovy.grape.Grape.grab(group:'org.apache.spark',
> module:'spark-core_2.10', version:'1.5.2')
>
> *===>* null
>
> *groovy:*000*>* import org.apache.spark.api.java.JavaSparkContext
>
> *===>* org.apache.spark.api.java.JavaSparkContext
>
> *groovy:*000*>* println JavaSparkContext
>
> class org.apache.spark.api.java.JavaSparkContext
>
> *===>* null
>
> On 17 November 2015 at 21:26, Keegan Witt <ke...@gmail.com> wrote:
>
>> What version of Groovy are you using? It seems to work for me with 2.4.5
>> and Java 1.8.0_65.
>>
>> groovy.grape.Grape.grab(group:'org.apache.spark',
>> module:'spark-core_2.10', version:'1.5.2')
>> ===> null
>> groovy:000> import org.apache.spark.api.java.JavaSparkContext
>> ===> org.apache.spark.api.java.JavaSparkContext
>> groovy:000> println JavaSparkContext
>> ERROR org.apache.spark.SparkException:
>> A master URL must be set in your configuration
>> at org.apache.spark.SparkContext.<init> (SparkContext.scala:394)
>> at org.apache.spark.SparkContext.<init> (SparkContext.scala:112)
>> at org.apache.spark.api.java.JavaSparkContext.<init>
>> (JavaSparkContext.scala:56)
>>
>>
>> On Tue, Nov 17, 2015 at 4:24 PM, Thibault Kruse <tibokruse@googlemail.com
>> > wrote:
>>
>>> you'd have to recompile, my PR is growing old
>>>
>>> On Tue, Nov 17, 2015 at 10:08 PM, tog <gu...@gmail.com>
>>> wrote:
>>> > Thibault
>>> >
>>> > Has your change been pushed in groovy recently or should I recompile
>>> my own
>>> > version to test if that solve my issue?
>>> > Any other way to test it without having to generate my own version ?
>>> >
>>> > Cheers
>>> > Guillaume
>>> >
>>> > On 17 November 2015 at 20:57, Thibault Kruse <tibokruse@googlemail.com
>>> >
>>> > wrote:
>>> >>
>>> >> Not sure if this is related all. But I had an issue getting Grape
>>> >> imports available in Groovysh (which is related to Groovy Shell),
>>> >> which cause me to try and tamper with the Grape classloading:
>>> >>
>>> >>
>>> http://mail-archives.apache.org/mod_mbox/groovy-dev/201508.mbox/%3CCAByu6UVw1KNVqPnQrjKRCANj6e8od9sGczinz7iDWA1P+=45PA@mail.gmail.com%3E
>>> >>
>>> >> This might be unrelated to your problems, though.
>>> >>
>>> >>
>>> >> On Tue, Nov 17, 2015 at 9:24 PM, tog <gu...@gmail.com>
>>> wrote:
>>> >> > Hello
>>> >> >
>>> >> > Any more ideas regarding my issue?
>>> >> >
>>> >> > Thanks
>>> >> > Guillaume
>>> >> >
>>> >> > On 15 November 2015 at 20:19, tog <gu...@gmail.com>
>>> wrote:
>>> >> >>
>>> >> >> Sorry, my previous email is wrong.
>>> >> >>
>>> >> >> The block:
>>> >> >> groovy.grape.Grape.grab(
>>> >> >> groupId: 'org.apache.spark',
>>> >> >> artifactId: 'spark-core_2.10',
>>> >> >> version: '1.5.2'
>>> >> >> )
>>> >> >>
>>> >> >> does not seem equivalent to:
>>> >> >>
>>> >> >> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
>>> >> >>
>>> >> >> since the imports cannot be found.
>>> >> >>
>>> >> >>
>>> >> >>
>>> >> >>
>>> ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>>> >> >>
>>> >> >> tog GroovySpark $ groovy GroovySparkWordcount.groovy
>>> >> >>
>>> >> >> org.codehaus.groovy.control.MultipleCompilationErrorsException:
>>> startup
>>> >> >> failed:
>>> >> >>
>>> >> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9: unable
>>> to
>>> >> >> resolve class org.apache.spark.api.java.JavaSparkContext
>>> >> >>
>>> >> >> @ line 9, column 1.
>>> >> >>
>>> >> >> import org.apache.spark.api.java.JavaSparkContext
>>> >> >>
>>> >> >> ^
>>> >> >>
>>> >> >>
>>> >> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8: unable
>>> to
>>> >> >> resolve class org.apache.spark.SparkConf
>>> >> >>
>>> >> >> @ line 8, column 1.
>>> >> >>
>>> >> >> import org.apache.spark.SparkConf
>>> >> >>
>>> >> >> ^
>>> >> >>
>>> >> >>
>>> >> >> 2 errors
>>> >> >>
>>> >> >>
>>> >> >> On 15 November 2015 at 18:55, tog <gu...@gmail.com>
>>> wrote:
>>> >> >>>
>>> >> >>> Thanks, Yes, just realize the typo ... I fixed it and get the very
>>> >> >>> same
>>> >> >>> error.
>>> >> >>> I am getting lost ;-)
>>> >> >>>
>>> >> >>>
>>> >> >>>
>>> >> >>> org.apache.spark.SparkConf@2158ddec
>>> java.lang.ClassNotFoundException:
>>> >> >>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
>>> >> >>> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
>>> >> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
>>> >> >>> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
>>> >> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
>>> >> >>> java.lang.Class.forName0(Native Method) at
>>> >> >>> java.lang.Class.forName(Class.java:348) at
>>> >> >>> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
>>> >> >>> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
>>> >> >>> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
>>> >> >>> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
>>> >> >>>
>>> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
>>> >> >>> at
>>> >> >>> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
>>> >> >>>
>>> >> >>>
>>> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
>>> >> >>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>>> >> >>> Method) at
>>> >> >>>
>>> >> >>>
>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>> >> >>> at
>>> >> >>>
>>> >> >>>
>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>> >> >>> at
>>> java.lang.reflect.Constructor.newInstance(Constructor.java:422) at
>>> >> >>>
>>> >> >>>
>>> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
>>> >> >>> at
>>> >> >>>
>>> >> >>>
>>> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
>>> >> >>> at
>>> >> >>>
>>> >> >>>
>>> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
>>> >> >>> at
>>> >> >>>
>>> >> >>>
>>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
>>> >> >>> at
>>> >> >>>
>>> >> >>>
>>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
>>> >> >>> at Script6.run(Script6.groovy:16)
>>> >> >>>
>>> >> >>>
>>> >> >>>
>>> >> >>> On 15 November 2015 at 18:41, Bahman Movaqar <Ba...@bahmanm.com>
>>> >> >>> wrote:
>>> >> >>>>
>>> >> >>>> On 11/15/2015 10:03 PM, tog wrote:
>>> >> >>>>
>>> >> >>>> > @Grap seems to have default repo to look into ... with the
>>> change
>>> >> >>>> > you
>>> >> >>>> > are suggesting I got
>>> >> >>>> > ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
>>> >> >>>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not found]
>>> at
>>> >> >>>> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>>> >> >>>> > Method)
>>> >> >>>> >
>>> >> >>>> > How do I define them?
>>> >> >>>>
>>> >> >>>> It was a typo on my side. `artifactId` should be
>>> "spark-core_2.10"
>>> >> >>>> (note
>>> >> >>>> the `-` character).
>>> >> >>>>
>>> >> >>>> --
>>> >> >>>> Bahman Movaqar
>>> >> >>>>
>>> >> >>>> http://BahmanM.com - https://twitter.com/bahman__m
>>> >> >>>> https://github.com/bahmanm - https://gist.github.com/bahmanm
>>> >> >>>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
>>> >> >>>>
>>> >> >>>
>>> >> >>>
>>> >> >>>
>>> >> >>> --
>>> >> >>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>> >> >>
>>> >> >>
>>> >> >>
>>> >> >>
>>> >> >> --
>>> >> >> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>> >> >
>>> >> >
>>> >> >
>>> >> >
>>> >> > --
>>> >> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>> >
>>> >
>>> >
>>> >
>>> > --
>>> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>>
>>
>>
>
>
> --
> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>
Re: GroovyShell
Posted by tog <gu...@gmail.com>.
Hi Keegan
Thanks for testing
Well it works with groovysh indeed but not in a script using GroovyShell
using the following scripts
https://gist.github.com/galleon/231dbfcff36f8d4ce6c2
https://gist.github.com/galleon/e0807499a1b8b78924ca
Any idea what I do wrong ?
--------
tog GroovySpark $ groovy -version
Groovy Version: 2.4.5 JVM: 1.8.0_60 Vendor: Oracle Corporation OS: Mac OS X
tog GroovySpark $ groovysh
Groovy Shell (2.4.5, JVM: 1.8.0_60)
Type '*:help*' or '*:h*' for help.
--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
*groovy:*000*>* groovy.grape.Grape.grab(group:'org.apache.spark',
module:'spark-core_2.10', version:'1.5.2')
*===>* null
*groovy:*000*>* import org.apache.spark.api.java.JavaSparkContext
*===>* org.apache.spark.api.java.JavaSparkContext
*groovy:*000*>* println JavaSparkContext
class org.apache.spark.api.java.JavaSparkContext
*===>* null
On 17 November 2015 at 21:26, Keegan Witt <ke...@gmail.com> wrote:
> What version of Groovy are you using? It seems to work for me with 2.4.5
> and Java 1.8.0_65.
>
> groovy.grape.Grape.grab(group:'org.apache.spark',
> module:'spark-core_2.10', version:'1.5.2')
> ===> null
> groovy:000> import org.apache.spark.api.java.JavaSparkContext
> ===> org.apache.spark.api.java.JavaSparkContext
> groovy:000> println JavaSparkContext
> ERROR org.apache.spark.SparkException:
> A master URL must be set in your configuration
> at org.apache.spark.SparkContext.<init> (SparkContext.scala:394)
> at org.apache.spark.SparkContext.<init> (SparkContext.scala:112)
> at org.apache.spark.api.java.JavaSparkContext.<init>
> (JavaSparkContext.scala:56)
>
>
> On Tue, Nov 17, 2015 at 4:24 PM, Thibault Kruse <ti...@googlemail.com>
> wrote:
>
>> you'd have to recompile, my PR is growing old
>>
>> On Tue, Nov 17, 2015 at 10:08 PM, tog <gu...@gmail.com> wrote:
>> > Thibault
>> >
>> > Has your change been pushed in groovy recently or should I recompile my
>> own
>> > version to test if that solve my issue?
>> > Any other way to test it without having to generate my own version ?
>> >
>> > Cheers
>> > Guillaume
>> >
>> > On 17 November 2015 at 20:57, Thibault Kruse <ti...@googlemail.com>
>> > wrote:
>> >>
>> >> Not sure if this is related all. But I had an issue getting Grape
>> >> imports available in Groovysh (which is related to Groovy Shell),
>> >> which cause me to try and tamper with the Grape classloading:
>> >>
>> >>
>> http://mail-archives.apache.org/mod_mbox/groovy-dev/201508.mbox/%3CCAByu6UVw1KNVqPnQrjKRCANj6e8od9sGczinz7iDWA1P+=45PA@mail.gmail.com%3E
>> >>
>> >> This might be unrelated to your problems, though.
>> >>
>> >>
>> >> On Tue, Nov 17, 2015 at 9:24 PM, tog <gu...@gmail.com>
>> wrote:
>> >> > Hello
>> >> >
>> >> > Any more ideas regarding my issue?
>> >> >
>> >> > Thanks
>> >> > Guillaume
>> >> >
>> >> > On 15 November 2015 at 20:19, tog <gu...@gmail.com>
>> wrote:
>> >> >>
>> >> >> Sorry, my previous email is wrong.
>> >> >>
>> >> >> The block:
>> >> >> groovy.grape.Grape.grab(
>> >> >> groupId: 'org.apache.spark',
>> >> >> artifactId: 'spark-core_2.10',
>> >> >> version: '1.5.2'
>> >> >> )
>> >> >>
>> >> >> does not seem equivalent to:
>> >> >>
>> >> >> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
>> >> >>
>> >> >> since the imports cannot be found.
>> >> >>
>> >> >>
>> >> >>
>> >> >>
>> ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>> >> >>
>> >> >> tog GroovySpark $ groovy GroovySparkWordcount.groovy
>> >> >>
>> >> >> org.codehaus.groovy.control.MultipleCompilationErrorsException:
>> startup
>> >> >> failed:
>> >> >>
>> >> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9: unable
>> to
>> >> >> resolve class org.apache.spark.api.java.JavaSparkContext
>> >> >>
>> >> >> @ line 9, column 1.
>> >> >>
>> >> >> import org.apache.spark.api.java.JavaSparkContext
>> >> >>
>> >> >> ^
>> >> >>
>> >> >>
>> >> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8: unable
>> to
>> >> >> resolve class org.apache.spark.SparkConf
>> >> >>
>> >> >> @ line 8, column 1.
>> >> >>
>> >> >> import org.apache.spark.SparkConf
>> >> >>
>> >> >> ^
>> >> >>
>> >> >>
>> >> >> 2 errors
>> >> >>
>> >> >>
>> >> >> On 15 November 2015 at 18:55, tog <gu...@gmail.com>
>> wrote:
>> >> >>>
>> >> >>> Thanks, Yes, just realize the typo ... I fixed it and get the very
>> >> >>> same
>> >> >>> error.
>> >> >>> I am getting lost ;-)
>> >> >>>
>> >> >>>
>> >> >>>
>> >> >>> org.apache.spark.SparkConf@2158ddec
>> java.lang.ClassNotFoundException:
>> >> >>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
>> >> >>> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
>> >> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
>> >> >>> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
>> >> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
>> >> >>> java.lang.Class.forName0(Native Method) at
>> >> >>> java.lang.Class.forName(Class.java:348) at
>> >> >>> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
>> >> >>> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
>> >> >>> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
>> >> >>> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
>> >> >>>
>> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
>> >> >>> at
>> >> >>> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
>> >> >>>
>> >> >>>
>> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
>> >> >>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>> >> >>> Method) at
>> >> >>>
>> >> >>>
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>> >> >>> at
>> >> >>>
>> >> >>>
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>> >> >>> at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
>> at
>> >> >>>
>> >> >>>
>> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
>> >> >>> at
>> >> >>>
>> >> >>>
>> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
>> >> >>> at
>> >> >>>
>> >> >>>
>> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
>> >> >>> at
>> >> >>>
>> >> >>>
>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
>> >> >>> at
>> >> >>>
>> >> >>>
>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
>> >> >>> at Script6.run(Script6.groovy:16)
>> >> >>>
>> >> >>>
>> >> >>>
>> >> >>> On 15 November 2015 at 18:41, Bahman Movaqar <Ba...@bahmanm.com>
>> >> >>> wrote:
>> >> >>>>
>> >> >>>> On 11/15/2015 10:03 PM, tog wrote:
>> >> >>>>
>> >> >>>> > @Grap seems to have default repo to look into ... with the
>> change
>> >> >>>> > you
>> >> >>>> > are suggesting I got
>> >> >>>> > ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
>> >> >>>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not found]
>> at
>> >> >>>> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>> >> >>>> > Method)
>> >> >>>> >
>> >> >>>> > How do I define them?
>> >> >>>>
>> >> >>>> It was a typo on my side. `artifactId` should be "spark-core_2.10"
>> >> >>>> (note
>> >> >>>> the `-` character).
>> >> >>>>
>> >> >>>> --
>> >> >>>> Bahman Movaqar
>> >> >>>>
>> >> >>>> http://BahmanM.com - https://twitter.com/bahman__m
>> >> >>>> https://github.com/bahmanm - https://gist.github.com/bahmanm
>> >> >>>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
>> >> >>>>
>> >> >>>
>> >> >>>
>> >> >>>
>> >> >>> --
>> >> >>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>> >> >>
>> >> >>
>> >> >>
>> >> >>
>> >> >> --
>> >> >> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>> >> >
>> >> >
>> >> >
>> >> >
>> >> > --
>> >> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>> >
>> >
>> >
>> >
>> > --
>> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>
>
>
--
PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
Re: GroovyShell
Posted by Keegan Witt <ke...@gmail.com>.
What version of Groovy are you using? It seems to work for me with 2.4.5
and Java 1.8.0_65.
groovy.grape.Grape.grab(group:'org.apache.spark', module:'spark-core_2.10',
version:'1.5.2')
===> null
groovy:000> import org.apache.spark.api.java.JavaSparkContext
===> org.apache.spark.api.java.JavaSparkContext
groovy:000> println JavaSparkContext
ERROR org.apache.spark.SparkException:
A master URL must be set in your configuration
at org.apache.spark.SparkContext.<init> (SparkContext.scala:394)
at org.apache.spark.SparkContext.<init> (SparkContext.scala:112)
at org.apache.spark.api.java.JavaSparkContext.<init>
(JavaSparkContext.scala:56)
On Tue, Nov 17, 2015 at 4:24 PM, Thibault Kruse <ti...@googlemail.com>
wrote:
> you'd have to recompile, my PR is growing old
>
> On Tue, Nov 17, 2015 at 10:08 PM, tog <gu...@gmail.com> wrote:
> > Thibault
> >
> > Has your change been pushed in groovy recently or should I recompile my
> own
> > version to test if that solve my issue?
> > Any other way to test it without having to generate my own version ?
> >
> > Cheers
> > Guillaume
> >
> > On 17 November 2015 at 20:57, Thibault Kruse <ti...@googlemail.com>
> > wrote:
> >>
> >> Not sure if this is related all. But I had an issue getting Grape
> >> imports available in Groovysh (which is related to Groovy Shell),
> >> which cause me to try and tamper with the Grape classloading:
> >>
> >>
> http://mail-archives.apache.org/mod_mbox/groovy-dev/201508.mbox/%3CCAByu6UVw1KNVqPnQrjKRCANj6e8od9sGczinz7iDWA1P+=45PA@mail.gmail.com%3E
> >>
> >> This might be unrelated to your problems, though.
> >>
> >>
> >> On Tue, Nov 17, 2015 at 9:24 PM, tog <gu...@gmail.com>
> wrote:
> >> > Hello
> >> >
> >> > Any more ideas regarding my issue?
> >> >
> >> > Thanks
> >> > Guillaume
> >> >
> >> > On 15 November 2015 at 20:19, tog <gu...@gmail.com> wrote:
> >> >>
> >> >> Sorry, my previous email is wrong.
> >> >>
> >> >> The block:
> >> >> groovy.grape.Grape.grab(
> >> >> groupId: 'org.apache.spark',
> >> >> artifactId: 'spark-core_2.10',
> >> >> version: '1.5.2'
> >> >> )
> >> >>
> >> >> does not seem equivalent to:
> >> >>
> >> >> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
> >> >>
> >> >> since the imports cannot be found.
> >> >>
> >> >>
> >> >>
> >> >>
> ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
> >> >>
> >> >> tog GroovySpark $ groovy GroovySparkWordcount.groovy
> >> >>
> >> >> org.codehaus.groovy.control.MultipleCompilationErrorsException:
> startup
> >> >> failed:
> >> >>
> >> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9: unable to
> >> >> resolve class org.apache.spark.api.java.JavaSparkContext
> >> >>
> >> >> @ line 9, column 1.
> >> >>
> >> >> import org.apache.spark.api.java.JavaSparkContext
> >> >>
> >> >> ^
> >> >>
> >> >>
> >> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8: unable to
> >> >> resolve class org.apache.spark.SparkConf
> >> >>
> >> >> @ line 8, column 1.
> >> >>
> >> >> import org.apache.spark.SparkConf
> >> >>
> >> >> ^
> >> >>
> >> >>
> >> >> 2 errors
> >> >>
> >> >>
> >> >> On 15 November 2015 at 18:55, tog <gu...@gmail.com>
> wrote:
> >> >>>
> >> >>> Thanks, Yes, just realize the typo ... I fixed it and get the very
> >> >>> same
> >> >>> error.
> >> >>> I am getting lost ;-)
> >> >>>
> >> >>>
> >> >>>
> >> >>> org.apache.spark.SparkConf@2158ddec
> java.lang.ClassNotFoundException:
> >> >>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
> >> >>> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
> >> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
> >> >>> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
> >> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
> >> >>> java.lang.Class.forName0(Native Method) at
> >> >>> java.lang.Class.forName(Class.java:348) at
> >> >>> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
> >> >>> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
> >> >>> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
> >> >>> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
> >> >>> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
> >> >>> at
> >> >>> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
> >> >>>
> >> >>>
> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
> >> >>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> >> >>> Method) at
> >> >>>
> >> >>>
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> >> >>> at
> >> >>>
> >> >>>
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> >> >>> at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
> at
> >> >>>
> >> >>>
> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
> >> >>> at
> >> >>>
> >> >>>
> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
> >> >>> at
> >> >>>
> >> >>>
> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
> >> >>> at
> >> >>>
> >> >>>
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
> >> >>> at
> >> >>>
> >> >>>
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
> >> >>> at Script6.run(Script6.groovy:16)
> >> >>>
> >> >>>
> >> >>>
> >> >>> On 15 November 2015 at 18:41, Bahman Movaqar <Ba...@bahmanm.com>
> >> >>> wrote:
> >> >>>>
> >> >>>> On 11/15/2015 10:03 PM, tog wrote:
> >> >>>>
> >> >>>> > @Grap seems to have default repo to look into ... with the change
> >> >>>> > you
> >> >>>> > are suggesting I got
> >> >>>> > ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
> >> >>>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not found] at
> >> >>>> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> >> >>>> > Method)
> >> >>>> >
> >> >>>> > How do I define them?
> >> >>>>
> >> >>>> It was a typo on my side. `artifactId` should be "spark-core_2.10"
> >> >>>> (note
> >> >>>> the `-` character).
> >> >>>>
> >> >>>> --
> >> >>>> Bahman Movaqar
> >> >>>>
> >> >>>> http://BahmanM.com - https://twitter.com/bahman__m
> >> >>>> https://github.com/bahmanm - https://gist.github.com/bahmanm
> >> >>>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
> >> >>>>
> >> >>>
> >> >>>
> >> >>>
> >> >>> --
> >> >>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
> >> >>
> >> >>
> >> >>
> >> >>
> >> >> --
> >> >> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
> >> >
> >> >
> >> >
> >> >
> >> > --
> >> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
> >
> >
> >
> >
> > --
> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>
Re: GroovyShell
Posted by Thibault Kruse <ti...@googlemail.com>.
you'd have to recompile, my PR is growing old
On Tue, Nov 17, 2015 at 10:08 PM, tog <gu...@gmail.com> wrote:
> Thibault
>
> Has your change been pushed in groovy recently or should I recompile my own
> version to test if that solve my issue?
> Any other way to test it without having to generate my own version ?
>
> Cheers
> Guillaume
>
> On 17 November 2015 at 20:57, Thibault Kruse <ti...@googlemail.com>
> wrote:
>>
>> Not sure if this is related all. But I had an issue getting Grape
>> imports available in Groovysh (which is related to Groovy Shell),
>> which cause me to try and tamper with the Grape classloading:
>>
>> http://mail-archives.apache.org/mod_mbox/groovy-dev/201508.mbox/%3CCAByu6UVw1KNVqPnQrjKRCANj6e8od9sGczinz7iDWA1P+=45PA@mail.gmail.com%3E
>>
>> This might be unrelated to your problems, though.
>>
>>
>> On Tue, Nov 17, 2015 at 9:24 PM, tog <gu...@gmail.com> wrote:
>> > Hello
>> >
>> > Any more ideas regarding my issue?
>> >
>> > Thanks
>> > Guillaume
>> >
>> > On 15 November 2015 at 20:19, tog <gu...@gmail.com> wrote:
>> >>
>> >> Sorry, my previous email is wrong.
>> >>
>> >> The block:
>> >> groovy.grape.Grape.grab(
>> >> groupId: 'org.apache.spark',
>> >> artifactId: 'spark-core_2.10',
>> >> version: '1.5.2'
>> >> )
>> >>
>> >> does not seem equivalent to:
>> >>
>> >> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
>> >>
>> >> since the imports cannot be found.
>> >>
>> >>
>> >>
>> >> ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>> >>
>> >> tog GroovySpark $ groovy GroovySparkWordcount.groovy
>> >>
>> >> org.codehaus.groovy.control.MultipleCompilationErrorsException: startup
>> >> failed:
>> >>
>> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9: unable to
>> >> resolve class org.apache.spark.api.java.JavaSparkContext
>> >>
>> >> @ line 9, column 1.
>> >>
>> >> import org.apache.spark.api.java.JavaSparkContext
>> >>
>> >> ^
>> >>
>> >>
>> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8: unable to
>> >> resolve class org.apache.spark.SparkConf
>> >>
>> >> @ line 8, column 1.
>> >>
>> >> import org.apache.spark.SparkConf
>> >>
>> >> ^
>> >>
>> >>
>> >> 2 errors
>> >>
>> >>
>> >> On 15 November 2015 at 18:55, tog <gu...@gmail.com> wrote:
>> >>>
>> >>> Thanks, Yes, just realize the typo ... I fixed it and get the very
>> >>> same
>> >>> error.
>> >>> I am getting lost ;-)
>> >>>
>> >>>
>> >>>
>> >>> org.apache.spark.SparkConf@2158ddec java.lang.ClassNotFoundException:
>> >>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
>> >>> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
>> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
>> >>> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
>> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
>> >>> java.lang.Class.forName0(Native Method) at
>> >>> java.lang.Class.forName(Class.java:348) at
>> >>> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
>> >>> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
>> >>> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
>> >>> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
>> >>> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
>> >>> at
>> >>> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
>> >>>
>> >>> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
>> >>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>> >>> Method) at
>> >>>
>> >>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>> >>> at
>> >>>
>> >>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>> >>> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) at
>> >>>
>> >>> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
>> >>> at
>> >>>
>> >>> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
>> >>> at
>> >>>
>> >>> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
>> >>> at
>> >>>
>> >>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
>> >>> at
>> >>>
>> >>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
>> >>> at Script6.run(Script6.groovy:16)
>> >>>
>> >>>
>> >>>
>> >>> On 15 November 2015 at 18:41, Bahman Movaqar <Ba...@bahmanm.com>
>> >>> wrote:
>> >>>>
>> >>>> On 11/15/2015 10:03 PM, tog wrote:
>> >>>>
>> >>>> > @Grap seems to have default repo to look into ... with the change
>> >>>> > you
>> >>>> > are suggesting I got
>> >>>> > ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
>> >>>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not found] at
>> >>>> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>> >>>> > Method)
>> >>>> >
>> >>>> > How do I define them?
>> >>>>
>> >>>> It was a typo on my side. `artifactId` should be "spark-core_2.10"
>> >>>> (note
>> >>>> the `-` character).
>> >>>>
>> >>>> --
>> >>>> Bahman Movaqar
>> >>>>
>> >>>> http://BahmanM.com - https://twitter.com/bahman__m
>> >>>> https://github.com/bahmanm - https://gist.github.com/bahmanm
>> >>>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
>> >>>>
>> >>>
>> >>>
>> >>>
>> >>> --
>> >>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>> >>
>> >>
>> >>
>> >>
>> >> --
>> >> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>> >
>> >
>> >
>> >
>> > --
>> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>
>
>
>
> --
> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
Re: GroovyShell
Posted by tog <gu...@gmail.com>.
Thibault
Has your change been pushed in groovy recently or should I recompile my own
version to test if that solve my issue?
Any other way to test it without having to generate my own version ?
Cheers
Guillaume
On 17 November 2015 at 20:57, Thibault Kruse <ti...@googlemail.com>
wrote:
> Not sure if this is related all. But I had an issue getting Grape
> imports available in Groovysh (which is related to Groovy Shell),
> which cause me to try and tamper with the Grape classloading:
>
> http://mail-archives.apache.org/mod_mbox/groovy-dev/201508.mbox/%3CCAByu6UVw1KNVqPnQrjKRCANj6e8od9sGczinz7iDWA1P+=45PA@mail.gmail.com%3E
>
> This might be unrelated to your problems, though.
>
>
> On Tue, Nov 17, 2015 at 9:24 PM, tog <gu...@gmail.com> wrote:
> > Hello
> >
> > Any more ideas regarding my issue?
> >
> > Thanks
> > Guillaume
> >
> > On 15 November 2015 at 20:19, tog <gu...@gmail.com> wrote:
> >>
> >> Sorry, my previous email is wrong.
> >>
> >> The block:
> >> groovy.grape.Grape.grab(
> >> groupId: 'org.apache.spark',
> >> artifactId: 'spark-core_2.10',
> >> version: '1.5.2'
> >> )
> >>
> >> does not seem equivalent to:
> >>
> >> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
> >>
> >> since the imports cannot be found.
> >>
> >>
> >>
> ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
> >>
> >> tog GroovySpark $ groovy GroovySparkWordcount.groovy
> >>
> >> org.codehaus.groovy.control.MultipleCompilationErrorsException: startup
> >> failed:
> >>
> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9: unable to
> >> resolve class org.apache.spark.api.java.JavaSparkContext
> >>
> >> @ line 9, column 1.
> >>
> >> import org.apache.spark.api.java.JavaSparkContext
> >>
> >> ^
> >>
> >>
> >> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8: unable to
> >> resolve class org.apache.spark.SparkConf
> >>
> >> @ line 8, column 1.
> >>
> >> import org.apache.spark.SparkConf
> >>
> >> ^
> >>
> >>
> >> 2 errors
> >>
> >>
> >> On 15 November 2015 at 18:55, tog <gu...@gmail.com> wrote:
> >>>
> >>> Thanks, Yes, just realize the typo ... I fixed it and get the very same
> >>> error.
> >>> I am getting lost ;-)
> >>>
> >>>
> >>>
> >>> org.apache.spark.SparkConf@2158ddec java.lang.ClassNotFoundException:
> >>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
> >>> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
> >>> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
> >>> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
> >>> java.lang.Class.forName0(Native Method) at
> >>> java.lang.Class.forName(Class.java:348) at
> >>> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
> >>> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
> >>> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
> >>> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
> >>> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267) at
> >>> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
> >>>
> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
> >>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method) at
> >>>
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> >>> at
> >>>
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> >>> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) at
> >>>
> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
> >>> at
> >>>
> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
> >>> at
> >>>
> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
> >>> at
> >>>
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
> >>> at
> >>>
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
> >>> at Script6.run(Script6.groovy:16)
> >>>
> >>>
> >>>
> >>> On 15 November 2015 at 18:41, Bahman Movaqar <Ba...@bahmanm.com>
> wrote:
> >>>>
> >>>> On 11/15/2015 10:03 PM, tog wrote:
> >>>>
> >>>> > @Grap seems to have default repo to look into ... with the change
> you
> >>>> > are suggesting I got
> >>>> > ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
> >>>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not found] at
> >>>> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
> >>>> >
> >>>> > How do I define them?
> >>>>
> >>>> It was a typo on my side. `artifactId` should be "spark-core_2.10"
> (note
> >>>> the `-` character).
> >>>>
> >>>> --
> >>>> Bahman Movaqar
> >>>>
> >>>> http://BahmanM.com - https://twitter.com/bahman__m
> >>>> https://github.com/bahmanm - https://gist.github.com/bahmanm
> >>>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
> >>>>
> >>>
> >>>
> >>>
> >>> --
> >>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
> >>
> >>
> >>
> >>
> >> --
> >> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
> >
> >
> >
> >
> > --
> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>
--
PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
Re: GroovyShell
Posted by Thibault Kruse <ti...@googlemail.com>.
Not sure if this is related all. But I had an issue getting Grape
imports available in Groovysh (which is related to Groovy Shell),
which cause me to try and tamper with the Grape classloading:
http://mail-archives.apache.org/mod_mbox/groovy-dev/201508.mbox/%3CCAByu6UVw1KNVqPnQrjKRCANj6e8od9sGczinz7iDWA1P+=45PA@mail.gmail.com%3E
This might be unrelated to your problems, though.
On Tue, Nov 17, 2015 at 9:24 PM, tog <gu...@gmail.com> wrote:
> Hello
>
> Any more ideas regarding my issue?
>
> Thanks
> Guillaume
>
> On 15 November 2015 at 20:19, tog <gu...@gmail.com> wrote:
>>
>> Sorry, my previous email is wrong.
>>
>> The block:
>> groovy.grape.Grape.grab(
>> groupId: 'org.apache.spark',
>> artifactId: 'spark-core_2.10',
>> version: '1.5.2'
>> )
>>
>> does not seem equivalent to:
>>
>> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
>>
>> since the imports cannot be found.
>>
>>
>> ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>>
>> tog GroovySpark $ groovy GroovySparkWordcount.groovy
>>
>> org.codehaus.groovy.control.MultipleCompilationErrorsException: startup
>> failed:
>>
>> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9: unable to
>> resolve class org.apache.spark.api.java.JavaSparkContext
>>
>> @ line 9, column 1.
>>
>> import org.apache.spark.api.java.JavaSparkContext
>>
>> ^
>>
>>
>> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8: unable to
>> resolve class org.apache.spark.SparkConf
>>
>> @ line 8, column 1.
>>
>> import org.apache.spark.SparkConf
>>
>> ^
>>
>>
>> 2 errors
>>
>>
>> On 15 November 2015 at 18:55, tog <gu...@gmail.com> wrote:
>>>
>>> Thanks, Yes, just realize the typo ... I fixed it and get the very same
>>> error.
>>> I am getting lost ;-)
>>>
>>>
>>>
>>> org.apache.spark.SparkConf@2158ddec java.lang.ClassNotFoundException:
>>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
>>> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
>>> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
>>> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
>>> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
>>> java.lang.Class.forName0(Native Method) at
>>> java.lang.Class.forName(Class.java:348) at
>>> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
>>> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
>>> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
>>> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
>>> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267) at
>>> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
>>> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
>>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at
>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>> at
>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) at
>>> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
>>> at
>>> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
>>> at
>>> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
>>> at
>>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
>>> at
>>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
>>> at Script6.run(Script6.groovy:16)
>>>
>>>
>>>
>>> On 15 November 2015 at 18:41, Bahman Movaqar <Ba...@bahmanm.com> wrote:
>>>>
>>>> On 11/15/2015 10:03 PM, tog wrote:
>>>>
>>>> > @Grap seems to have default repo to look into ... with the change you
>>>> > are suggesting I got
>>>> > ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
>>>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not found] at
>>>> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>>> >
>>>> > How do I define them?
>>>>
>>>> It was a typo on my side. `artifactId` should be "spark-core_2.10" (note
>>>> the `-` character).
>>>>
>>>> --
>>>> Bahman Movaqar
>>>>
>>>> http://BahmanM.com - https://twitter.com/bahman__m
>>>> https://github.com/bahmanm - https://gist.github.com/bahmanm
>>>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
>>>>
>>>
>>>
>>>
>>> --
>>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>
>>
>>
>>
>> --
>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>
>
>
>
> --
> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
Re: GroovyShell
Posted by tog <gu...@gmail.com>.
Ok, yes I realized that and changed the names earlier ... but that still
doesn't work using GroovyShell ;-)
Thanks
Guillaume
On 17 November 2015 at 21:16, Keegan Witt <ke...@gmail.com> wrote:
> I was just pointing out that you used "groupId" instead of "group", and
> "artifactId" instead of "module".
>
> On Tue, Nov 17, 2015 at 4:04 PM, tog <gu...@gmail.com> wrote:
>
>> Hi Keegan
>>
>> Not sure to understand. When I run directly my script from the command
>> line:
>>
>> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
>> ................... ................ ................
>> ............................ works
>>
>> @Grab(group='org.apache.spark', module='spark-core_2.10',
>> version='1.5.2') .......................................works
>>
>> groovy.grape.Grape.grab(group:'org.apache.spark',
>> module:'spark-core_2.10', version:'1.5.2') ............. does not work
>>
>> I was suggested the last one in place of @Gra)
>>
>>
>> When I try to run the same script through GroovyShell (which is what I
>> really want to do) then I got the following exception:
>>
>> java.lang.ClassNotFoundException:
>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory
>>
>> at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
>>
>> at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>>
>> at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:677)
>>
>> at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:787)
>>
>> at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:775)
>>
>> at java.lang.Class.forName0(Native Method)
>>
>> at java.lang.Class.forName(Class.java:348)
>>
>>
>> Cheers
>>
>> Guillaume
>>
>> On 17 November 2015 at 20:47, Keegan Witt <ke...@gmail.com> wrote:
>>
>>> Guillaume,
>>> You just have the wrong syntax. This
>>> @Grab(group='org.apache.spark', module='spark-core_2.10',
>>> version='1.5.2')
>>>
>>> is the equivalent to this
>>> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
>>>
>>> Grab uses the Ivy names for the coordinates rather than Maven's naming
>>> convention. Let me know if that doesn't work or that doesn't answer your
>>> question.
>>>
>>> -Keegan
>>>
>>> On Tue, Nov 17, 2015 at 3:24 PM, tog <gu...@gmail.com> wrote:
>>>
>>>> Hello
>>>>
>>>> Any more ideas regarding my issue?
>>>>
>>>> Thanks
>>>> Guillaume
>>>>
>>>> On 15 November 2015 at 20:19, tog <gu...@gmail.com> wrote:
>>>>
>>>>> Sorry, my previous email is wrong.
>>>>>
>>>>> The block:
>>>>> groovy.grape.Grape.grab(
>>>>> groupId: 'org.apache.spark',
>>>>> artifactId: 'spark-core_2.10',
>>>>> version: '1.5.2'
>>>>> )
>>>>>
>>>>> does not seem equivalent to:
>>>>>
>>>>> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
>>>>>
>>>>> since the imports cannot be found.
>>>>>
>>>>>
>>>>> ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>>>>>
>>>>> tog GroovySpark $ groovy GroovySparkWordcount.groovy
>>>>>
>>>>> org.codehaus.groovy.control.MultipleCompilationErrorsException:
>>>>> startup failed:
>>>>>
>>>>> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9: unable to
>>>>> resolve class org.apache.spark.api.java.JavaSparkContext
>>>>>
>>>>> @ line 9, column 1.
>>>>>
>>>>> import org.apache.spark.api.java.JavaSparkContext
>>>>>
>>>>> ^
>>>>>
>>>>>
>>>>> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8: unable to
>>>>> resolve class org.apache.spark.SparkConf
>>>>>
>>>>> @ line 8, column 1.
>>>>>
>>>>> import org.apache.spark.SparkConf
>>>>>
>>>>> ^
>>>>>
>>>>>
>>>>> 2 errors
>>>>>
>>>>> On 15 November 2015 at 18:55, tog <gu...@gmail.com> wrote:
>>>>>
>>>>>> Thanks, Yes, just realize the typo ... I fixed it and get the very
>>>>>> same error.
>>>>>> I am getting lost ;-)
>>>>>>
>>>>>>
>>>>>>
>>>>>> org.apache.spark.SparkConf@2158ddec
>>>>>> java.lang.ClassNotFoundException:
>>>>>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
>>>>>> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
>>>>>> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
>>>>>> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
>>>>>> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
>>>>>> java.lang.Class.forName0(Native Method) at
>>>>>> java.lang.Class.forName(Class.java:348) at
>>>>>> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
>>>>>> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
>>>>>> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
>>>>>> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
>>>>>> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267) at
>>>>>> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
>>>>>> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
>>>>>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at
>>>>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>>>>> at
>>>>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>>>>> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) at
>>>>>> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
>>>>>> at
>>>>>> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
>>>>>> at
>>>>>> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
>>>>>> at
>>>>>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
>>>>>> at
>>>>>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
>>>>>> at Script6.run(Script6.groovy:16)
>>>>>>
>>>>>>
>>>>>>
>>>>>> On 15 November 2015 at 18:41, Bahman Movaqar <Ba...@bahmanm.com>
>>>>>> wrote:
>>>>>>
>>>>>>> On 11/15/2015 10:03 PM, tog wrote:
>>>>>>>
>>>>>>> > @Grap seems to have default repo to look into ... with the change
>>>>>>> you
>>>>>>> > are suggesting I got
>>>>>>> > ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
>>>>>>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not found] at
>>>>>>> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>>>>>>> Method)
>>>>>>> >
>>>>>>> > How do I define them?
>>>>>>>
>>>>>>> It was a typo on my side. `artifactId` should be "spark-core_2.10"
>>>>>>> (note
>>>>>>> the `-` character).
>>>>>>>
>>>>>>> --
>>>>>>> Bahman Movaqar
>>>>>>>
>>>>>>> http://BahmanM.com - https://twitter.com/bahman__m
>>>>>>> https://github.com/bahmanm - https://gist.github.com/bahmanm
>>>>>>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
>>>>>>>
>>>>>>>
>>>>>>
>>>>>>
>>>>>> --
>>>>>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>>>>>
>>>>>
>>>>>
>>>>>
>>>>> --
>>>>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>>>>
>>>>
>>>>
>>>>
>>>> --
>>>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>>>
>>>
>>>
>>
>>
>> --
>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>
>
>
--
PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
Re: GroovyShell
Posted by Keegan Witt <ke...@gmail.com>.
I was just pointing out that you used "groupId" instead of "group", and
"artifactId" instead of "module".
On Tue, Nov 17, 2015 at 4:04 PM, tog <gu...@gmail.com> wrote:
> Hi Keegan
>
> Not sure to understand. When I run directly my script from the command
> line:
>
> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
> ................... ................ ................
> ............................ works
>
> @Grab(group='org.apache.spark', module='spark-core_2.10', version='1.5.2')
> .......................................works
>
> groovy.grape.Grape.grab(group:'org.apache.spark',
> module:'spark-core_2.10', version:'1.5.2') ............. does not work
>
> I was suggested the last one in place of @Gra)
>
>
> When I try to run the same script through GroovyShell (which is what I
> really want to do) then I got the following exception:
>
> java.lang.ClassNotFoundException:
> org.apache.spark.rpc.akka.AkkaRpcEnvFactory
>
> at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
>
> at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>
> at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:677)
>
> at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:787)
>
> at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:775)
>
> at java.lang.Class.forName0(Native Method)
>
> at java.lang.Class.forName(Class.java:348)
>
>
> Cheers
>
> Guillaume
>
> On 17 November 2015 at 20:47, Keegan Witt <ke...@gmail.com> wrote:
>
>> Guillaume,
>> You just have the wrong syntax. This
>> @Grab(group='org.apache.spark', module='spark-core_2.10', version='1.5.2')
>>
>> is the equivalent to this
>> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
>>
>> Grab uses the Ivy names for the coordinates rather than Maven's naming
>> convention. Let me know if that doesn't work or that doesn't answer your
>> question.
>>
>> -Keegan
>>
>> On Tue, Nov 17, 2015 at 3:24 PM, tog <gu...@gmail.com> wrote:
>>
>>> Hello
>>>
>>> Any more ideas regarding my issue?
>>>
>>> Thanks
>>> Guillaume
>>>
>>> On 15 November 2015 at 20:19, tog <gu...@gmail.com> wrote:
>>>
>>>> Sorry, my previous email is wrong.
>>>>
>>>> The block:
>>>> groovy.grape.Grape.grab(
>>>> groupId: 'org.apache.spark',
>>>> artifactId: 'spark-core_2.10',
>>>> version: '1.5.2'
>>>> )
>>>>
>>>> does not seem equivalent to:
>>>>
>>>> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
>>>>
>>>> since the imports cannot be found.
>>>>
>>>>
>>>> ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>>>>
>>>> tog GroovySpark $ groovy GroovySparkWordcount.groovy
>>>>
>>>> org.codehaus.groovy.control.MultipleCompilationErrorsException: startup
>>>> failed:
>>>>
>>>> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9: unable to
>>>> resolve class org.apache.spark.api.java.JavaSparkContext
>>>>
>>>> @ line 9, column 1.
>>>>
>>>> import org.apache.spark.api.java.JavaSparkContext
>>>>
>>>> ^
>>>>
>>>>
>>>> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8: unable to
>>>> resolve class org.apache.spark.SparkConf
>>>>
>>>> @ line 8, column 1.
>>>>
>>>> import org.apache.spark.SparkConf
>>>>
>>>> ^
>>>>
>>>>
>>>> 2 errors
>>>>
>>>> On 15 November 2015 at 18:55, tog <gu...@gmail.com> wrote:
>>>>
>>>>> Thanks, Yes, just realize the typo ... I fixed it and get the very
>>>>> same error.
>>>>> I am getting lost ;-)
>>>>>
>>>>>
>>>>>
>>>>> org.apache.spark.SparkConf@2158ddec java.lang.ClassNotFoundException:
>>>>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
>>>>> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
>>>>> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
>>>>> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
>>>>> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
>>>>> java.lang.Class.forName0(Native Method) at
>>>>> java.lang.Class.forName(Class.java:348) at
>>>>> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
>>>>> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
>>>>> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
>>>>> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
>>>>> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267) at
>>>>> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
>>>>> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
>>>>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at
>>>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>>>> at
>>>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>>>> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) at
>>>>> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
>>>>> at
>>>>> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
>>>>> at
>>>>> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
>>>>> at
>>>>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
>>>>> at
>>>>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
>>>>> at Script6.run(Script6.groovy:16)
>>>>>
>>>>>
>>>>>
>>>>> On 15 November 2015 at 18:41, Bahman Movaqar <Ba...@bahmanm.com>
>>>>> wrote:
>>>>>
>>>>>> On 11/15/2015 10:03 PM, tog wrote:
>>>>>>
>>>>>> > @Grap seems to have default repo to look into ... with the change
>>>>>> you
>>>>>> > are suggesting I got
>>>>>> > ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
>>>>>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not found] at
>>>>>> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>>>>>> Method)
>>>>>> >
>>>>>> > How do I define them?
>>>>>>
>>>>>> It was a typo on my side. `artifactId` should be "spark-core_2.10"
>>>>>> (note
>>>>>> the `-` character).
>>>>>>
>>>>>> --
>>>>>> Bahman Movaqar
>>>>>>
>>>>>> http://BahmanM.com - https://twitter.com/bahman__m
>>>>>> https://github.com/bahmanm - https://gist.github.com/bahmanm
>>>>>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
>>>>>>
>>>>>>
>>>>>
>>>>>
>>>>> --
>>>>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>>>>
>>>>
>>>>
>>>>
>>>> --
>>>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>>>
>>>
>>>
>>>
>>> --
>>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>>
>>
>>
>
>
> --
> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>
Re: GroovyShell
Posted by tog <gu...@gmail.com>.
Hi Keegan
Not sure to understand. When I run directly my script from the command line:
@Grab('org.apache.spark:spark-core_2.10:1.5.2')
................... ................ ................
............................ works
@Grab(group='org.apache.spark', module='spark-core_2.10', version='1.5.2')
.......................................works
groovy.grape.Grape.grab(group:'org.apache.spark', module:'spark-core_2.10',
version:'1.5.2') ............. does not work
I was suggested the last one in place of @Gra)
When I try to run the same script through GroovyShell (which is what I
really want to do) then I got the following exception:
java.lang.ClassNotFoundException:
org.apache.spark.rpc.akka.AkkaRpcEnvFactory
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:677)
at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:787)
at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:775)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
Cheers
Guillaume
On 17 November 2015 at 20:47, Keegan Witt <ke...@gmail.com> wrote:
> Guillaume,
> You just have the wrong syntax. This
> @Grab(group='org.apache.spark', module='spark-core_2.10', version='1.5.2')
>
> is the equivalent to this
> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
>
> Grab uses the Ivy names for the coordinates rather than Maven's naming
> convention. Let me know if that doesn't work or that doesn't answer your
> question.
>
> -Keegan
>
> On Tue, Nov 17, 2015 at 3:24 PM, tog <gu...@gmail.com> wrote:
>
>> Hello
>>
>> Any more ideas regarding my issue?
>>
>> Thanks
>> Guillaume
>>
>> On 15 November 2015 at 20:19, tog <gu...@gmail.com> wrote:
>>
>>> Sorry, my previous email is wrong.
>>>
>>> The block:
>>> groovy.grape.Grape.grab(
>>> groupId: 'org.apache.spark',
>>> artifactId: 'spark-core_2.10',
>>> version: '1.5.2'
>>> )
>>>
>>> does not seem equivalent to:
>>>
>>> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
>>>
>>> since the imports cannot be found.
>>>
>>>
>>> ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>>>
>>> tog GroovySpark $ groovy GroovySparkWordcount.groovy
>>>
>>> org.codehaus.groovy.control.MultipleCompilationErrorsException: startup
>>> failed:
>>>
>>> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9: unable to
>>> resolve class org.apache.spark.api.java.JavaSparkContext
>>>
>>> @ line 9, column 1.
>>>
>>> import org.apache.spark.api.java.JavaSparkContext
>>>
>>> ^
>>>
>>>
>>> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8: unable to
>>> resolve class org.apache.spark.SparkConf
>>>
>>> @ line 8, column 1.
>>>
>>> import org.apache.spark.SparkConf
>>>
>>> ^
>>>
>>>
>>> 2 errors
>>>
>>> On 15 November 2015 at 18:55, tog <gu...@gmail.com> wrote:
>>>
>>>> Thanks, Yes, just realize the typo ... I fixed it and get the very same
>>>> error.
>>>> I am getting lost ;-)
>>>>
>>>>
>>>>
>>>> org.apache.spark.SparkConf@2158ddec java.lang.ClassNotFoundException:
>>>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
>>>> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
>>>> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
>>>> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
>>>> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
>>>> java.lang.Class.forName0(Native Method) at
>>>> java.lang.Class.forName(Class.java:348) at
>>>> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
>>>> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
>>>> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
>>>> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
>>>> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267) at
>>>> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
>>>> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
>>>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at
>>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>>> at
>>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>>> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) at
>>>> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
>>>> at
>>>> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
>>>> at
>>>> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
>>>> at
>>>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
>>>> at
>>>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
>>>> at Script6.run(Script6.groovy:16)
>>>>
>>>>
>>>>
>>>> On 15 November 2015 at 18:41, Bahman Movaqar <Ba...@bahmanm.com>
>>>> wrote:
>>>>
>>>>> On 11/15/2015 10:03 PM, tog wrote:
>>>>>
>>>>> > @Grap seems to have default repo to look into ... with the change you
>>>>> > are suggesting I got
>>>>> > ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
>>>>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not found] at
>>>>> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>>>> >
>>>>> > How do I define them?
>>>>>
>>>>> It was a typo on my side. `artifactId` should be "spark-core_2.10"
>>>>> (note
>>>>> the `-` character).
>>>>>
>>>>> --
>>>>> Bahman Movaqar
>>>>>
>>>>> http://BahmanM.com - https://twitter.com/bahman__m
>>>>> https://github.com/bahmanm - https://gist.github.com/bahmanm
>>>>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
>>>>>
>>>>>
>>>>
>>>>
>>>> --
>>>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>>>
>>>
>>>
>>>
>>> --
>>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>>
>>
>>
>>
>> --
>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>
>
>
--
PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
Re: GroovyShell
Posted by Keegan Witt <ke...@gmail.com>.
Guillaume,
You just have the wrong syntax. This
@Grab(group='org.apache.spark', module='spark-core_2.10', version='1.5.2')
is the equivalent to this
@Grab('org.apache.spark:spark-core_2.10:1.5.2')
Grab uses the Ivy names for the coordinates rather than Maven's naming
convention. Let me know if that doesn't work or that doesn't answer your
question.
-Keegan
On Tue, Nov 17, 2015 at 3:24 PM, tog <gu...@gmail.com> wrote:
> Hello
>
> Any more ideas regarding my issue?
>
> Thanks
> Guillaume
>
> On 15 November 2015 at 20:19, tog <gu...@gmail.com> wrote:
>
>> Sorry, my previous email is wrong.
>>
>> The block:
>> groovy.grape.Grape.grab(
>> groupId: 'org.apache.spark',
>> artifactId: 'spark-core_2.10',
>> version: '1.5.2'
>> )
>>
>> does not seem equivalent to:
>>
>> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
>>
>> since the imports cannot be found.
>>
>>
>> ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>>
>> tog GroovySpark $ groovy GroovySparkWordcount.groovy
>>
>> org.codehaus.groovy.control.MultipleCompilationErrorsException: startup
>> failed:
>>
>> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9: unable to
>> resolve class org.apache.spark.api.java.JavaSparkContext
>>
>> @ line 9, column 1.
>>
>> import org.apache.spark.api.java.JavaSparkContext
>>
>> ^
>>
>>
>> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8: unable to
>> resolve class org.apache.spark.SparkConf
>>
>> @ line 8, column 1.
>>
>> import org.apache.spark.SparkConf
>>
>> ^
>>
>>
>> 2 errors
>>
>> On 15 November 2015 at 18:55, tog <gu...@gmail.com> wrote:
>>
>>> Thanks, Yes, just realize the typo ... I fixed it and get the very same
>>> error.
>>> I am getting lost ;-)
>>>
>>>
>>>
>>> org.apache.spark.SparkConf@2158ddec java.lang.ClassNotFoundException:
>>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
>>> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
>>> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
>>> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
>>> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
>>> java.lang.Class.forName0(Native Method) at
>>> java.lang.Class.forName(Class.java:348) at
>>> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
>>> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
>>> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
>>> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
>>> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267) at
>>> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
>>> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
>>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at
>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>> at
>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) at
>>> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
>>> at
>>> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
>>> at
>>> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
>>> at
>>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
>>> at
>>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
>>> at Script6.run(Script6.groovy:16)
>>>
>>>
>>>
>>> On 15 November 2015 at 18:41, Bahman Movaqar <Ba...@bahmanm.com> wrote:
>>>
>>>> On 11/15/2015 10:03 PM, tog wrote:
>>>>
>>>> > @Grap seems to have default repo to look into ... with the change you
>>>> > are suggesting I got
>>>> > ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
>>>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not found] at
>>>> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>>> >
>>>> > How do I define them?
>>>>
>>>> It was a typo on my side. `artifactId` should be "spark-core_2.10" (note
>>>> the `-` character).
>>>>
>>>> --
>>>> Bahman Movaqar
>>>>
>>>> http://BahmanM.com - https://twitter.com/bahman__m
>>>> https://github.com/bahmanm - https://gist.github.com/bahmanm
>>>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
>>>>
>>>>
>>>
>>>
>>> --
>>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>>
>>
>>
>>
>> --
>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>
>
>
>
> --
> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>
Re: GroovyShell
Posted by tog <gu...@gmail.com>.
Hello
Any more ideas regarding my issue?
Thanks
Guillaume
On 15 November 2015 at 20:19, tog <gu...@gmail.com> wrote:
> Sorry, my previous email is wrong.
>
> The block:
> groovy.grape.Grape.grab(
> groupId: 'org.apache.spark',
> artifactId: 'spark-core_2.10',
> version: '1.5.2'
> )
>
> does not seem equivalent to:
>
> @Grab('org.apache.spark:spark-core_2.10:1.5.2')
>
> since the imports cannot be found.
>
>
> ------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>
> tog GroovySpark $ groovy GroovySparkWordcount.groovy
>
> org.codehaus.groovy.control.MultipleCompilationErrorsException: startup
> failed:
>
> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9: unable to
> resolve class org.apache.spark.api.java.JavaSparkContext
>
> @ line 9, column 1.
>
> import org.apache.spark.api.java.JavaSparkContext
>
> ^
>
>
> /Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8: unable to
> resolve class org.apache.spark.SparkConf
>
> @ line 8, column 1.
>
> import org.apache.spark.SparkConf
>
> ^
>
>
> 2 errors
>
> On 15 November 2015 at 18:55, tog <gu...@gmail.com> wrote:
>
>> Thanks, Yes, just realize the typo ... I fixed it and get the very same
>> error.
>> I am getting lost ;-)
>>
>>
>>
>> org.apache.spark.SparkConf@2158ddec java.lang.ClassNotFoundException:
>> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
>> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
>> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
>> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
>> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
>> java.lang.Class.forName0(Native Method) at
>> java.lang.Class.forName(Class.java:348) at
>> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
>> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
>> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
>> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
>> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267) at
>> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
>> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>> at
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) at
>> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
>> at
>> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
>> at
>> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
>> at
>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
>> at
>> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
>> at Script6.run(Script6.groovy:16)
>>
>>
>>
>> On 15 November 2015 at 18:41, Bahman Movaqar <Ba...@bahmanm.com> wrote:
>>
>>> On 11/15/2015 10:03 PM, tog wrote:
>>>
>>> > @Grap seems to have default repo to look into ... with the change you
>>> > are suggesting I got
>>> > ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
>>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not found] at
>>> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>> >
>>> > How do I define them?
>>>
>>> It was a typo on my side. `artifactId` should be "spark-core_2.10" (note
>>> the `-` character).
>>>
>>> --
>>> Bahman Movaqar
>>>
>>> http://BahmanM.com - https://twitter.com/bahman__m
>>> https://github.com/bahmanm - https://gist.github.com/bahmanm
>>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
>>>
>>>
>>
>>
>> --
>> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>>
>
>
>
> --
> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>
--
PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
Re: GroovyShell
Posted by tog <gu...@gmail.com>.
Sorry, my previous email is wrong.
The block:
groovy.grape.Grape.grab(
groupId: 'org.apache.spark',
artifactId: 'spark-core_2.10',
version: '1.5.2'
)
does not seem equivalent to:
@Grab('org.apache.spark:spark-core_2.10:1.5.2')
since the imports cannot be found.
------------------------------------------------------------------------------------------------------------------------------------------------------------------------
tog GroovySpark $ groovy GroovySparkWordcount.groovy
org.codehaus.groovy.control.MultipleCompilationErrorsException: startup
failed:
/Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 9: unable to
resolve class org.apache.spark.api.java.JavaSparkContext
@ line 9, column 1.
import org.apache.spark.api.java.JavaSparkContext
^
/Users/tog/Work/GroovySpark/GroovySparkWordcount.groovy: 8: unable to
resolve class org.apache.spark.SparkConf
@ line 8, column 1.
import org.apache.spark.SparkConf
^
2 errors
On 15 November 2015 at 18:55, tog <gu...@gmail.com> wrote:
> Thanks, Yes, just realize the typo ... I fixed it and get the very same
> error.
> I am getting lost ;-)
>
>
>
> org.apache.spark.SparkConf@2158ddec java.lang.ClassNotFoundException:
> org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
> java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
> java.lang.Class.forName0(Native Method) at
> java.lang.Class.forName(Class.java:348) at
> org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
> org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
> org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
> org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
> org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267) at
> org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) at
> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
> at
> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
> at
> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
> at
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
> at
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
> at Script6.run(Script6.groovy:16)
>
>
>
> On 15 November 2015 at 18:41, Bahman Movaqar <Ba...@bahmanm.com> wrote:
>
>> On 11/15/2015 10:03 PM, tog wrote:
>>
>> > @Grap seems to have default repo to look into ... with the change you
>> > are suggesting I got
>> > ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
>> > dependency: org.apache.spark#spark core_2.10;1.5.2: not found] at
>> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>> >
>> > How do I define them?
>>
>> It was a typo on my side. `artifactId` should be "spark-core_2.10" (note
>> the `-` character).
>>
>> --
>> Bahman Movaqar
>>
>> http://BahmanM.com - https://twitter.com/bahman__m
>> https://github.com/bahmanm - https://gist.github.com/bahmanm
>> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
>>
>>
>
>
> --
> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
>
--
PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
Re: GroovyShell
Posted by tog <gu...@gmail.com>.
Thanks, Yes, just realize the typo ... I fixed it and get the very same
error.
I am getting lost ;-)
org.apache.spark.SparkConf@2158ddec java.lang.ClassNotFoundException:
org.apache.spark.rpc.akka.AkkaRpcEnvFactory at
java.net.URLClassLoader.findClass(URLClassLoader.java:381) at
java.lang.ClassLoader.loadClass(ClassLoader.java:424) at
sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) at
java.lang.ClassLoader.loadClass(ClassLoader.java:357) at
java.lang.Class.forName0(Native Method) at
java.lang.Class.forName(Class.java:348) at
org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:40) at
org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:52) at
org.apache.spark.SparkEnv$.create(SparkEnv.scala:247) at
org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:188) at
org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267) at
org.apache.spark.SparkContext.<init>(SparkContext.scala:424) at
org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:422) at
org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
at
org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
at
org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
at
org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
at
org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
at Script6.run(Script6.groovy:16)
On 15 November 2015 at 18:41, Bahman Movaqar <Ba...@bahmanm.com> wrote:
> On 11/15/2015 10:03 PM, tog wrote:
>
> > @Grap seems to have default repo to look into ... with the change you
> > are suggesting I got
> > ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
> > dependency: org.apache.spark#spark core_2.10;1.5.2: not found] at
> > sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> >
> > How do I define them?
>
> It was a typo on my side. `artifactId` should be "spark-core_2.10" (note
> the `-` character).
>
> --
> Bahman Movaqar
>
> http://BahmanM.com - https://twitter.com/bahman__m
> https://github.com/bahmanm - https://gist.github.com/bahmanm
> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
>
>
--
PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
Re: GroovyShell
Posted by Bahman Movaqar <Ba...@BahmanM.com>.
On 11/15/2015 10:03 PM, tog wrote:
> @Grap seems to have default repo to look into ... with the change you
> are suggesting I got
> ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved
> dependency: org.apache.spark#spark core_2.10;1.5.2: not found] at
> sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>
> How do I define them?
It was a typo on my side. `artifactId` should be "spark-core_2.10" (note
the `-` character).
--
Bahman Movaqar
http://BahmanM.com - https://twitter.com/bahman__m
https://github.com/bahmanm - https://gist.github.com/bahmanm
PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
Re: GroovyShell
Posted by tog <gu...@gmail.com>.
Hi
@Grap seems to have default repo to look into ... with the change you are
suggesting I got
ava.lang.RuntimeException: Error grabbing Grapes -- [unresolved dependency:
org.apache.spark#spark core_2.10;1.5.2: not found] at
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
How do I define them?
Cheers
Guillaume
On 15 November 2015 at 16:57, Bahman Movaqar <Ba...@bahmanm.com> wrote:
> I never managed to get `@Grab` working in shell. To go around it, I'd
> change the first line to:
>
> groovy.grape.Grape.grab(
> groupId: 'org.apache.spark',
> artifactId: 'spark core_2.10',
> version: '1.5.2'
> )
>
> --
> Bahman Movaqar
>
> http://BahmanM.com - https://twitter.com/bahman__m
> https://github.com/bahmanm - https://gist.github.com/bahmanm
> PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
>
> On 11/15/2015 08:22 PM, tog wrote:
> > Hi
> >
> > I have a behavior I dont understand using GroovyShell.
> > Here is a script <https://gist.github.com/galleon/231dbfcff36f8d4ce6c2>
> > that is working fine when I use it from the command line
> >
> > When I use it from a second script
> > <https://gist.github.com/galleon/e0807499a1b8b78924ca> using GroovyShell
> > I got the following exception:
> >
> > I got the following exception. Any idea why?
> >
> > togGroovySpark $ groovy GroovySparkThroughGroovyShell.groovy
> >
> > java.lang.ClassNotFoundException:
> > org.apache.spark.rpc.akka.AkkaRpcEnvFactory
> >
> > at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
> >
> > at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
> >
> > at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:677)
> >
> > at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:787)
> >
> > at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:775)
> >
> > at java.lang.Class.forName0(Native Method)
> >
> > at java.lang.Class.forName(Class.java:348)
> >
> > at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
> >
> > at org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:42)
> >
> > at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:53)
> >
> > at org.apache.spark.SparkEnv$.create(SparkEnv.scala:254)
> >
> > at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:194)
> >
> > at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:277)
> >
> > at org.apache.spark.SparkContext.<init>(SparkContext.scala:450)
> >
> > at
> >
> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
> >
> > at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> >
> > at
> >
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> >
> > at
> >
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> >
> > at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
> >
> > at
> >
> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
> >
> > at
> >
> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
> >
> > at
> >
> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
> >
> > at
> >
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
> >
> > at
> >
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
> >
> > at Script1.run(Script1.groovy:9)
> >
> > at groovy.lang.GroovyShell.evaluate(GroovyShell.java:591)
> >
> > at groovy.lang.GroovyShell.evaluate(GroovyShell.java:629)
> >
> > at groovy.lang.GroovyShell.evaluate(GroovyShell.java:600)
> >
> > at groovy.lang.GroovyShell$evaluate.call(Unknown Source)
> >
> > at
> >
> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCall(CallSiteArray.java:48)
> >
> > at
> >
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:113)
> >
> > at
> >
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:125)
> >
> > at
> >
> GroovySparkThroughGroovyShell.run(GroovySparkThroughGroovyShell.groovy:47)
> >
> > at
> >
> groovy.lang.GroovyShell.runScriptOrMainOrTestOrRunnable(GroovyShell.java:263)
> >
> > at groovy.lang.GroovyShell.run(GroovyShell.java:524)
> >
> > at groovy.lang.GroovyShell.run(GroovyShell.java:513)
> >
> > at groovy.ui.GroovyMain.processOnce(GroovyMain.java:652)
> >
> > at groovy.ui.GroovyMain.run(GroovyMain.java:384)
> >
> > at groovy.ui.GroovyMain.process(GroovyMain.java:370)
> >
> > at groovy.ui.GroovyMain.processArgs(GroovyMain.java:129)
> >
> > at groovy.ui.GroovyMain.main(GroovyMain.java:109)
> >
> > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> >
> > at
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> >
> > at
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> >
> > at java.lang.reflect.Method.invoke(Method.java:497)
> >
> > at
> >
> org.codehaus.groovy.tools.GroovyStarter.rootLoader(GroovyStarter.java:109)
> >
> > at org.codehaus.groovy.tools.GroovyStarter.main(GroovyStarter.java:131)
> >
> >
> > org.apache.spark.SparkConf@7428de63
> >
> >
> > java.lang.ClassNotFoundException:
> > org.apache.spark.rpc.akka.AkkaRpcEnvFactory
> >
> > at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
> >
> > at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
> >
> > at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:677)
> >
> > at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:787)
> >
> > at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:775)
> >
> > at java.lang.Class.forName0(Native Method)
> >
> > at java.lang.Class.forName(Class.java:348)
> >
> > at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
> >
> > at org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:42)
> >
> > at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:53)
> >
> > at org.apache.spark.SparkEnv$.create(SparkEnv.scala:254)
> >
> > at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:194)
> >
> > at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:277)
> >
> > at org.apache.spark.SparkContext.<init>(SparkContext.scala:450)
> >
> > at
> >
> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
> >
> > at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> >
> > at
> >
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> >
> > at
> >
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> >
> > at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
> >
> > at
> >
> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
> >
> > at
> >
> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
> >
> > at
> >
> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
> >
> > at
> >
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
> >
> > at
> >
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
> >
> > at Script1.run(Script1.groovy:9)
> >
> > at groovy.lang.GroovyShell.evaluate(GroovyShell.java:591)
> >
> > at groovy.lang.GroovyShell.evaluate(GroovyShell.java:629)
> >
> > at groovy.lang.GroovyShell.evaluate(GroovyShell.java:600)
> >
> > at groovy.lang.GroovyShell$evaluate.call(Unknown Source)
> >
> > at
> >
> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCall(CallSiteArray.java:48)
> >
> > at
> >
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:113)
> >
> > at
> >
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:125)
> >
> > at
> >
> GroovySparkThroughGroovyShell.run(GroovySparkThroughGroovyShell.groovy:47)
> >
> > at
> >
> groovy.lang.GroovyShell.runScriptOrMainOrTestOrRunnable(GroovyShell.java:263)
> >
> > at groovy.lang.GroovyShell.run(GroovyShell.java:524)
> >
> > at groovy.lang.GroovyShell.run(GroovyShell.java:513)
> >
> > at groovy.ui.GroovyMain.processOnce(GroovyMain.java:652)
> >
> > at groovy.ui.GroovyMain.run(GroovyMain.java:384)
> >
> > at groovy.ui.GroovyMain.process(GroovyMain.java:370)
> >
> > at groovy.ui.GroovyMain.processArgs(GroovyMain.java:129)
> >
> > at groovy.ui.GroovyMain.main(GroovyMain.java:109)
> >
> > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> >
> > at
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> >
> > at
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> >
> > at java.lang.reflect.Method.invoke(Method.java:497)
> >
> > at
> >
> org.codehaus.groovy.tools.GroovyStarter.rootLoader(GroovyStarter.java:109)
> >
> > at org.codehaus.groovy.tools.GroovyStarter.main(GroovyStarter.java:131)
> >
> >
> > --
> > PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net <http://subkeys.pgp.net>
>
>
--
PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net
Re: GroovyShell
Posted by Bahman Movaqar <Ba...@BahmanM.com>.
I never managed to get `@Grab` working in shell. To go around it, I'd
change the first line to:
groovy.grape.Grape.grab(
groupId: 'org.apache.spark',
artifactId: 'spark core_2.10',
version: '1.5.2'
)
--
Bahman Movaqar
http://BahmanM.com - https://twitter.com/bahman__m
https://github.com/bahmanm - https://gist.github.com/bahmanm
PGP Key ID: 0x6AB5BD68 (keyserver2.pgp.com)
On 11/15/2015 08:22 PM, tog wrote:
> Hi
>
> I have a behavior I dont understand using GroovyShell.
> Here is a script <https://gist.github.com/galleon/231dbfcff36f8d4ce6c2>
> that is working fine when I use it from the command line
>
> When I use it from a second script
> <https://gist.github.com/galleon/e0807499a1b8b78924ca> using GroovyShell
> I got the following exception:
>
> I got the following exception. Any idea why?
>
> togGroovySpark $ groovy GroovySparkThroughGroovyShell.groovy
>
> java.lang.ClassNotFoundException:
> org.apache.spark.rpc.akka.AkkaRpcEnvFactory
>
> at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
>
> at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>
> at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:677)
>
> at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:787)
>
> at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:775)
>
> at java.lang.Class.forName0(Native Method)
>
> at java.lang.Class.forName(Class.java:348)
>
> at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
>
> at org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:42)
>
> at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:53)
>
> at org.apache.spark.SparkEnv$.create(SparkEnv.scala:254)
>
> at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:194)
>
> at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:277)
>
> at org.apache.spark.SparkContext.<init>(SparkContext.scala:450)
>
> at
> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
>
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>
> at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
>
> at
> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
>
> at
> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
>
> at
> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
>
> at
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
>
> at
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
>
> at Script1.run(Script1.groovy:9)
>
> at groovy.lang.GroovyShell.evaluate(GroovyShell.java:591)
>
> at groovy.lang.GroovyShell.evaluate(GroovyShell.java:629)
>
> at groovy.lang.GroovyShell.evaluate(GroovyShell.java:600)
>
> at groovy.lang.GroovyShell$evaluate.call(Unknown Source)
>
> at
> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCall(CallSiteArray.java:48)
>
> at
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:113)
>
> at
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:125)
>
> at
> GroovySparkThroughGroovyShell.run(GroovySparkThroughGroovyShell.groovy:47)
>
> at
> groovy.lang.GroovyShell.runScriptOrMainOrTestOrRunnable(GroovyShell.java:263)
>
> at groovy.lang.GroovyShell.run(GroovyShell.java:524)
>
> at groovy.lang.GroovyShell.run(GroovyShell.java:513)
>
> at groovy.ui.GroovyMain.processOnce(GroovyMain.java:652)
>
> at groovy.ui.GroovyMain.run(GroovyMain.java:384)
>
> at groovy.ui.GroovyMain.process(GroovyMain.java:370)
>
> at groovy.ui.GroovyMain.processArgs(GroovyMain.java:129)
>
> at groovy.ui.GroovyMain.main(GroovyMain.java:109)
>
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>
> at java.lang.reflect.Method.invoke(Method.java:497)
>
> at
> org.codehaus.groovy.tools.GroovyStarter.rootLoader(GroovyStarter.java:109)
>
> at org.codehaus.groovy.tools.GroovyStarter.main(GroovyStarter.java:131)
>
>
> org.apache.spark.SparkConf@7428de63
>
>
> java.lang.ClassNotFoundException:
> org.apache.spark.rpc.akka.AkkaRpcEnvFactory
>
> at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
>
> at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>
> at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:677)
>
> at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:787)
>
> at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:775)
>
> at java.lang.Class.forName0(Native Method)
>
> at java.lang.Class.forName(Class.java:348)
>
> at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
>
> at org.apache.spark.rpc.RpcEnv$.getRpcEnvFactory(RpcEnv.scala:42)
>
> at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:53)
>
> at org.apache.spark.SparkEnv$.create(SparkEnv.scala:254)
>
> at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:194)
>
> at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:277)
>
> at org.apache.spark.SparkContext.<init>(SparkContext.scala:450)
>
> at
> org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
>
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>
> at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
>
> at
> org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:80)
>
> at
> org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:105)
>
> at
> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallConstructor(CallSiteArray.java:60)
>
> at
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:235)
>
> at
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:247)
>
> at Script1.run(Script1.groovy:9)
>
> at groovy.lang.GroovyShell.evaluate(GroovyShell.java:591)
>
> at groovy.lang.GroovyShell.evaluate(GroovyShell.java:629)
>
> at groovy.lang.GroovyShell.evaluate(GroovyShell.java:600)
>
> at groovy.lang.GroovyShell$evaluate.call(Unknown Source)
>
> at
> org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCall(CallSiteArray.java:48)
>
> at
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:113)
>
> at
> org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:125)
>
> at
> GroovySparkThroughGroovyShell.run(GroovySparkThroughGroovyShell.groovy:47)
>
> at
> groovy.lang.GroovyShell.runScriptOrMainOrTestOrRunnable(GroovyShell.java:263)
>
> at groovy.lang.GroovyShell.run(GroovyShell.java:524)
>
> at groovy.lang.GroovyShell.run(GroovyShell.java:513)
>
> at groovy.ui.GroovyMain.processOnce(GroovyMain.java:652)
>
> at groovy.ui.GroovyMain.run(GroovyMain.java:384)
>
> at groovy.ui.GroovyMain.process(GroovyMain.java:370)
>
> at groovy.ui.GroovyMain.processArgs(GroovyMain.java:129)
>
> at groovy.ui.GroovyMain.main(GroovyMain.java:109)
>
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>
> at java.lang.reflect.Method.invoke(Method.java:497)
>
> at
> org.codehaus.groovy.tools.GroovyStarter.rootLoader(GroovyStarter.java:109)
>
> at org.codehaus.groovy.tools.GroovyStarter.main(GroovyStarter.java:131)
>
>
> --
> PGP KeyID: 2048R/EA31CFC9 subkeys.pgp.net <http://subkeys.pgp.net>