You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@twill.apache.org by Peter Grman <pe...@gmail.com> on 2014/06/09 21:32:23 UTC

Using janino with Apache Twill

Hi, I'm currently in the process of porting an Apache project for Big-Data
analysis (Apache Drill) to YARN. I'm using Twill for that manner and it
almost works.

The last problem, I can't seem to figure out, is an exception from janino.
Apache Drill uses the compiler to create classes on the fly and that works
fine when I run Drill directly. But when run in Apache Twill I get the
exception from Janino that a class couldn't be loaded. I've added this
specific class as a dependency and also could find it in the container
jar-file which was created by Twill.

And the class really is there, because the class which can't be found, is
the same as the exception which is thrown. - Yes that same class to which
the original CompileException is transformed in the end, is the same as the
class which is imported as first and can't be found. I suspect that there
is a problem with how twill loads the jars and that's the reason why janino
can't find the class - since it works running it directly from command line
- but I don't know where to look anymore.

Any ideas?
Thanks for your help

The stacktrace from the exception is here:
https://gist.github.com/pgrm/d29d33f356601b1a1a3f
The class definition which can't be compiled alone is here:
https://gist.github.com/pgrm/a589edb64317f5cb1207
And my Twill-Application definition can be found here:
https://github.com/pgrm/incubator-drill/blob/YARN-support/yarn-integration/src/main/java/org/apache/drill/yarn/integration/RunDrill.java

And here also one more the the exception trace:

2014-06-09T18:30:40,093Z ERROR o.a.d.e.p.i.p.ProjectRecordBatch [zk1]
[37daf04b-7d82-4d2f-987c-59851f2aeafe:frag:0:0]
AbstractSingleRecordBatch:next(AbstractSingleRecordBatch.java:60) -
Failure during query
org.apache.drill.exec.exception.SchemaChangeException: Failure while
attempting to load generated class

	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:243)
	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)

	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
	at org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)

	at org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)
	at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
	at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)

	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:744)
Caused by: org.apache.drill.exec.exception.ClassTransformationException:
Failure generating transformation classes for value:
 package org.apache.drill.exec.test.generated;


import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.expr.holders.BitHolder;
import org.apache.drill.exec.expr.holders.VarCharHolder;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.record.RecordBatch;
import org.apache.drill.exec.vector.RepeatedVarCharVector;
import org.apache.drill.exec.vector.VarCharVector;
import org.apache.drill.exec.vector.complex.impl.RepeatedVarCharReaderImpl;

public class ProjectorGen0 {


    RepeatedVarCharVector vv0;
    RepeatedVarCharReaderImpl reader4;
    VarCharVector vv5;


    public boolean doEval(int inIndex, int outIndex)
        throws SchemaChangeException

    {
        {
            VarCharHolder out3 = new VarCharHolder();

            complex:
            vv0 .getAccessor().getReader().setPosition((inIndex));

            reader4 .read(0, out3);
            BitHolder out8 = new BitHolder();

            out8 .value = 1;
            if (!vv5 .getMutator().setSafe((outIndex), out3)) {

                out8 .value = 0;
            }
            if (out8 .value == 0) {
                return false;
            }

        }
        {
            return true;

        }
    }


    public void doSetup(FragmentContext context, RecordBatch incoming,
RecordBatch outgoing)
        throws SchemaChangeException

    {
        {
            int[] fieldIds1 = new int[ 1 ] ;

            fieldIds1 [ 0 ] = 0;
            Object tmp2 =
(incoming).getValueAccessorById(RepeatedVarCharVector.class,
fieldIds1).getValueVector();
            if (tmp2 == null) {
                throw new SchemaChangeException("Failure while loading
vector vv0 with id:
org.apache.drill.exec.record.TypedFieldId@1cf4a5a0.");
            }
            vv0 = ((RepeatedVarCharVector) tmp2);

            reader4 = ((RepeatedVarCharReaderImpl) vv0
.getAccessor().getReader());
            int[] fieldIds6 = new int[ 1 ] ;

            fieldIds6 [ 0 ] = 0;
            Object tmp7 =
(outgoing).getValueAccessorById(VarCharVector.class,
fieldIds6).getValueVector();

            if (tmp7 == null) {
                throw new SchemaChangeException("Failure while loading
vector vv5 with id:
org.apache.drill.exec.record.TypedFieldId@1ce776c0.");
            }
            vv5 = ((VarCharVector) tmp7);

        }
    }


}

	at org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:302)
	at org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:185)

	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:240)
	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)

	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
	at org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)

	at org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)
	at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
	at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)

	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:744)
Caused by: org.codehaus.commons.compiler.CompileException: Line 4,
Column 8: Imported class
"org.apache.drill.exec.exception.SchemaChangeException" could not be
loaded
	at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:9014)
	at org.codehaus.janino.UnitCompiler.import2(UnitCompiler.java:192)
	at org.codehaus.janino.UnitCompiler.access$000(UnitCompiler.java:104)
	at org.codehaus.janino.UnitCompiler$1.visitSingleTypeImportDeclaration(UnitCompiler.java:166)
	at org.codehaus.janino.Java$CompilationUnit$SingleTypeImportDeclaration.accept(Java.java:171)

	at org.codehaus.janino.UnitCompiler.<init>(UnitCompiler.java:164)
	at org.apache.drill.exec.compile.JaninoClassCompiler.getClassByteCode(JaninoClassCompiler.java:53)
	at org.apache.drill.exec.compile.QueryClassLoader.getClassByteCode(QueryClassLoader.java:69)

	at org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:256)
	at org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:185)
	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:240)

	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)
	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)

	at org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
	at org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)

	at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
	at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)

	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:744)

2014-06-09T18:30:40,109Z ERROR o.a.d.e.p.i.ScreenCreator$ScreenRoot
[zk1] [37daf04b-7d82-4d2f-987c-59851f2aeafe:frag:0:0]
ErrorHelper:logAndConvertError(ErrorHelper.java:60) - Error
5625373a-729d-4f3f-a262-6f46f2f879d9: Screen received stop request
sent.
org.codehaus.commons.compiler.CompileException: Line 4, Column 8:
Imported class "org.apache.drill.exec.exception.SchemaChangeException"
could not be loaded
	at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:9014)
	at org.codehaus.janino.UnitCompiler.import2(UnitCompiler.java:192)
	at org.codehaus.janino.UnitCompiler.access$000(UnitCompiler.java:104)
	at org.codehaus.janino.UnitCompiler$1.visitSingleTypeImportDeclaration(UnitCompiler.java:166)
	at org.codehaus.janino.Java$CompilationUnit$SingleTypeImportDeclaration.accept(Java.java:171)

	at org.codehaus.janino.UnitCompiler.<init>(UnitCompiler.java:164)
	at org.apache.drill.exec.compile.JaninoClassCompiler.getClassByteCode(JaninoClassCompiler.java:53)
	at org.apache.drill.exec.compile.QueryClassLoader.getClassByteCode(QueryClassLoader.java:69)

	at org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:256)
	at org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:185)
	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:240)

	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)
	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)

	at org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
	at org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)

	at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
	at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)

	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:744)

Re: Using janino with Apache Twill

Posted by Peter Grman <pe...@gmail.com>.
I've also tried now to explicitly add the jar file as a resource:

.withResources(SchemaChangeException.class.getProtectionDomain().getCodeSource().getLocation().toURI())

Since according to the Twill Documentation, this might solve my problem "If
the URI is a jar file, classes inside would be loadable by the ClassLoader."

However instead I just got an exception, because the jar file already is
inside, so how can I find it?

The exception is:

Exception in thread "ServiceDelegate STARTING" java.lang.RuntimeException:
java.util.zip.ZipException: duplicate entry:
lib/drill-java-exec-1.0.0-m2-incubating-SNAPSHOT-rebuffed.jar
 at com.google.common.base.Throwables.propagate(Throwables.java:160)
at
org.apache.twill.yarn.YarnTwillController.doStartUp(YarnTwillController.java:133)
 at
org.apache.twill.internal.AbstractZKServiceController.startUp(AbstractZKServiceController.java:82)
at
org.apache.twill.internal.AbstractExecutionServiceController$ServiceDelegate.startUp(AbstractExecutionServiceController.java:109)
 at
com.google.common.util.concurrent.AbstractIdleService$1$1.run(AbstractIdleService.java:43)
at java.lang.Thread.run(Thread.java:744)
Caused by: java.util.zip.ZipException: duplicate entry:
lib/drill-java-exec-1.0.0-m2-incubating-SNAPSHOT-rebuffed.jar
at java.util.zip.ZipOutputStream.putNextEntry(ZipOutputStream.java:215)
 at java.util.jar.JarOutputStream.putNextEntry(JarOutputStream.java:109)
at
org.apache.twill.internal.ApplicationBundler.copyResource(ApplicationBundler.java:347)
 at
org.apache.twill.internal.ApplicationBundler.createBundle(ApplicationBundler.java:140)
at
org.apache.twill.yarn.YarnTwillPreparer.createContainerJar(YarnTwillPreparer.java:388)
 at
org.apache.twill.yarn.YarnTwillPreparer.access$300(YarnTwillPreparer.java:106)
at
org.apache.twill.yarn.YarnTwillPreparer$1.call(YarnTwillPreparer.java:264)
 at
org.apache.twill.yarn.YarnTwillPreparer$1.call(YarnTwillPreparer.java:253)
at
org.apache.twill.yarn.YarnTwillController.doStartUp(YarnTwillController.java:98)
 ... 4 more
java.util.concurrent.ExecutionException: java.lang.RuntimeException:
java.util.zip.ZipException: duplicate entry:
lib/drill-java-exec-1.0.0-m2-incubating-SNAPSHOT-rebuffed.jar
 at
com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
at
com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
 at
com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
at org.apache.drill.yarn.integration.RunDrill.main(RunDrill.java:84)
Caused by: java.lang.RuntimeException: java.util.zip.ZipException:
duplicate entry:
lib/drill-java-exec-1.0.0-m2-incubating-SNAPSHOT-rebuffed.jar
at com.google.common.base.Throwables.propagate(Throwables.java:160)
 at
org.apache.twill.yarn.YarnTwillController.doStartUp(YarnTwillController.java:133)
at
org.apache.twill.internal.AbstractZKServiceController.startUp(AbstractZKServiceController.java:82)
 at
org.apache.twill.internal.AbstractExecutionServiceController$ServiceDelegate.startUp(AbstractExecutionServiceController.java:109)
at
com.google.common.util.concurrent.AbstractIdleService$1$1.run(AbstractIdleService.java:43)
 at java.lang.Thread.run(Thread.java:744)
Caused by: java.util.zip.ZipException: duplicate entry:
lib/drill-java-exec-1.0.0-m2-incubating-SNAPSHOT-rebuffed.jar
 at java.util.zip.ZipOutputStream.putNextEntry(ZipOutputStream.java:215)
at java.util.jar.JarOutputStream.putNextEntry(JarOutputStream.java:109)
 at
org.apache.twill.internal.ApplicationBundler.copyResource(ApplicationBundler.java:347)
at
org.apache.twill.internal.ApplicationBundler.createBundle(ApplicationBundler.java:140)
 at
org.apache.twill.yarn.YarnTwillPreparer.createContainerJar(YarnTwillPreparer.java:388)
at
org.apache.twill.yarn.YarnTwillPreparer.access$300(YarnTwillPreparer.java:106)
 at
org.apache.twill.yarn.YarnTwillPreparer$1.call(YarnTwillPreparer.java:264)
at
org.apache.twill.yarn.YarnTwillPreparer$1.call(YarnTwillPreparer.java:253)
 at
org.apache.twill.yarn.YarnTwillController.doStartUp(YarnTwillController.java:98)
... 4 more
Exception in thread "Thread-4"
com.google.common.util.concurrent.UncheckedExecutionException:
java.lang.Exception: Service failed to start.
 at
com.google.common.util.concurrent.Futures.wrapAndThrowUnchecked(Futures.java:1358)
at com.google.common.util.concurrent.Futures.getUnchecked(Futures.java:1344)
 at
org.apache.twill.internal.AbstractExecutionServiceController.stopAndWait(AbstractExecutionServiceController.java:86)
at org.apache.drill.yarn.integration.RunDrill$1.run(RunDrill.java:78)
Caused by: java.lang.Exception: Service failed to start.
at
com.google.common.util.concurrent.AbstractService$1.failed(AbstractService.java:121)
at
com.google.common.util.concurrent.AbstractService$6$1.run(AbstractService.java:456)
 at
com.google.common.util.concurrent.MoreExecutors$SameThreadExecutorService.execute(MoreExecutors.java:293)
at
com.google.common.util.concurrent.AbstractService$ListenerExecutorPair.execute(AbstractService.java:482)
 at
com.google.common.util.concurrent.AbstractService$6.run(AbstractService.java:454)
at
com.google.common.util.concurrent.AbstractService.executeListeners(AbstractService.java:381)
 at
com.google.common.util.concurrent.AbstractService.notifyFailed(AbstractService.java:314)
at
com.google.common.util.concurrent.AbstractIdleService$1$1.run(AbstractIdleService.java:46)
 at java.lang.Thread.run(Thread.java:744)
Caused by: java.lang.RuntimeException: java.util.zip.ZipException:
duplicate entry:
lib/drill-java-exec-1.0.0-m2-incubating-SNAPSHOT-rebuffed.jar
 at com.google.common.base.Throwables.propagate(Throwables.java:160)
at
org.apache.twill.yarn.YarnTwillController.doStartUp(YarnTwillController.java:133)
 at
org.apache.twill.internal.AbstractZKServiceController.startUp(AbstractZKServiceController.java:82)
at
org.apache.twill.internal.AbstractExecutionServiceController$ServiceDelegate.startUp(AbstractExecutionServiceController.java:109)
 at
com.google.common.util.concurrent.AbstractIdleService$1$1.run(AbstractIdleService.java:43)
... 1 more
Caused by: java.util.zip.ZipException: duplicate entry:
lib/drill-java-exec-1.0.0-m2-incubating-SNAPSHOT-rebuffed.jar
at java.util.zip.ZipOutputStream.putNextEntry(ZipOutputStream.java:215)
 at java.util.jar.JarOutputStream.putNextEntry(JarOutputStream.java:109)
at
org.apache.twill.internal.ApplicationBundler.copyResource(ApplicationBundler.java:347)
 at
org.apache.twill.internal.ApplicationBundler.createBundle(ApplicationBundler.java:140)
at
org.apache.twill.yarn.YarnTwillPreparer.createContainerJar(YarnTwillPreparer.java:388)
 at
org.apache.twill.yarn.YarnTwillPreparer.access$300(YarnTwillPreparer.java:106)
at
org.apache.twill.yarn.YarnTwillPreparer$1.call(YarnTwillPreparer.java:264)
 at
org.apache.twill.yarn.YarnTwillPreparer$1.call(YarnTwillPreparer.java:253)
at
org.apache.twill.yarn.YarnTwillController.doStartUp(YarnTwillController.java:98)
 ... 4 more



On Mon, Jun 9, 2014 at 9:32 PM, Peter Grman <pe...@gmail.com> wrote:

> Hi, I'm currently in the process of porting an Apache project for Big-Data
> analysis (Apache Drill) to YARN. I'm using Twill for that manner and it
> almost works.
>
> The last problem, I can't seem to figure out, is an exception from janino.
> Apache Drill uses the compiler to create classes on the fly and that works
> fine when I run Drill directly. But when run in Apache Twill I get the
> exception from Janino that a class couldn't be loaded. I've added this
> specific class as a dependency and also could find it in the container
> jar-file which was created by Twill.
>
> And the class really is there, because the class which can't be found, is
> the same as the exception which is thrown. - Yes that same class to which
> the original CompileException is transformed in the end, is the same as the
> class which is imported as first and can't be found. I suspect that there
> is a problem with how twill loads the jars and that's the reason why janino
> can't find the class - since it works running it directly from command line
> - but I don't know where to look anymore.
>
> Any ideas?
> Thanks for your help
>
> The stacktrace from the exception is here:
> https://gist.github.com/pgrm/d29d33f356601b1a1a3f
> The class definition which can't be compiled alone is here:
> https://gist.github.com/pgrm/a589edb64317f5cb1207
> And my Twill-Application definition can be found here:
> https://github.com/pgrm/incubator-drill/blob/YARN-support/yarn-integration/src/main/java/org/apache/drill/yarn/integration/RunDrill.java
>
> And here also one more the the exception trace:
>
> 2014-06-09T18:30:40,093Z ERROR o.a.d.e.p.i.p.ProjectRecordBatch [zk1] [37daf04b-7d82-4d2f-987c-59851f2aeafe:frag:0:0] AbstractSingleRecordBatch:next(AbstractSingleRecordBatch.java:60) - Failure during query
> org.apache.drill.exec.exception.SchemaChangeException: Failure while attempting to load generated class
>
> 	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:243)
> 	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
>
> 	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)
>
> 	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
> 	at org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
> 	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
>
> 	at org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)
> 	at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
> 	at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)
>
> 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> 	at java.lang.Thread.run(Thread.java:744)
> Caused by: org.apache.drill.exec.exception.ClassTransformationException: Failure generating transformation classes for value:
>  package org.apache.drill.exec.test.generated;
>
>
> import org.apache.drill.exec.exception.SchemaChangeException;
> import org.apache.drill.exec.expr.holders.BitHolder;
> import org.apache.drill.exec.expr.holders.VarCharHolder;
> import org.apache.drill.exec.ops.FragmentContext;
> import org.apache.drill.exec.record.RecordBatch;
> import org.apache.drill.exec.vector.RepeatedVarCharVector;
> import org.apache.drill.exec.vector.VarCharVector;
> import org.apache.drill.exec.vector.complex.impl.RepeatedVarCharReaderImpl;
>
> public class ProjectorGen0 {
>
>
>     RepeatedVarCharVector vv0;
>     RepeatedVarCharReaderImpl reader4;
>     VarCharVector vv5;
>
>
>     public boolean doEval(int inIndex, int outIndex)
>         throws SchemaChangeException
>
>     {
>         {
>             VarCharHolder out3 = new VarCharHolder();
>
>             complex:
>             vv0 .getAccessor().getReader().setPosition((inIndex));
>
>             reader4 .read(0, out3);
>             BitHolder out8 = new BitHolder();
>
>             out8 .value = 1;
>             if (!vv5 .getMutator().setSafe((outIndex), out3)) {
>
>                 out8 .value = 0;
>             }
>             if (out8 .value == 0) {
>                 return false;
>             }
>
>         }
>         {
>             return true;
>
>         }
>     }
>
>
>     public void doSetup(FragmentContext context, RecordBatch incoming, RecordBatch outgoing)
>         throws SchemaChangeException
>
>     {
>         {
>             int[] fieldIds1 = new int[ 1 ] ;
>
>             fieldIds1 [ 0 ] = 0;
>             Object tmp2 = (incoming).getValueAccessorById(RepeatedVarCharVector.class, fieldIds1).getValueVector();
>             if (tmp2 == null) {
>                 throw new SchemaChangeException("Failure while loading vector vv0 with id: org.apache.drill.exec.record.TypedFieldId@1cf4a5a0.");
>             }
>             vv0 = ((RepeatedVarCharVector) tmp2);
>
>             reader4 = ((RepeatedVarCharReaderImpl) vv0 .getAccessor().getReader());
>             int[] fieldIds6 = new int[ 1 ] ;
>
>             fieldIds6 [ 0 ] = 0;
>             Object tmp7 = (outgoing).getValueAccessorById(VarCharVector.class, fieldIds6).getValueVector();
>
>             if (tmp7 == null) {
>                 throw new SchemaChangeException("Failure while loading vector vv5 with id: org.apache.drill.exec.record.TypedFieldId@1ce776c0.");
>             }
>             vv5 = ((VarCharVector) tmp7);
>
>         }
>     }
>
>
> }
>
> 	at org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:302)
> 	at org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:185)
>
> 	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:240)
> 	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
>
> 	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)
>
> 	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
> 	at org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
> 	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
>
> 	at org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)
> 	at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
> 	at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)
>
> 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> 	at java.lang.Thread.run(Thread.java:744)
> Caused by: org.codehaus.commons.compiler.CompileException: Line 4, Column 8: Imported class "org.apache.drill.exec.exception.SchemaChangeException" could not be loaded
>
> 	at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:9014)
> 	at org.codehaus.janino.UnitCompiler.import2(UnitCompiler.java:192)
> 	at org.codehaus.janino.UnitCompiler.access$000(UnitCompiler.java:104)
> 	at org.codehaus.janino.UnitCompiler$1.visitSingleTypeImportDeclaration(UnitCompiler.java:166)
> 	at org.codehaus.janino.Java$CompilationUnit$SingleTypeImportDeclaration.accept(Java.java:171)
>
> 	at org.codehaus.janino.UnitCompiler.<init>(UnitCompiler.java:164)
> 	at org.apache.drill.exec.compile.JaninoClassCompiler.getClassByteCode(JaninoClassCompiler.java:53)
> 	at org.apache.drill.exec.compile.QueryClassLoader.getClassByteCode(QueryClassLoader.java:69)
>
> 	at org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:256)
> 	at org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:185)
> 	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:240)
>
> 	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
> 	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)
> 	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
>
> 	at org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
> 	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
> 	at org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)
>
> 	at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
> 	at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)
> 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> 	at java.lang.Thread.run(Thread.java:744)
>
> 2014-06-09T18:30:40,109Z ERROR o.a.d.e.p.i.ScreenCreator$ScreenRoot [zk1] [37daf04b-7d82-4d2f-987c-59851f2aeafe:frag:0:0] ErrorHelper:logAndConvertError(ErrorHelper.java:60) - Error 5625373a-729d-4f3f-a262-6f46f2f879d9: Screen received stop request sent.
>
> org.codehaus.commons.compiler.CompileException: Line 4, Column 8: Imported class "org.apache.drill.exec.exception.SchemaChangeException" could not be loaded
> 	at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:9014)
> 	at org.codehaus.janino.UnitCompiler.import2(UnitCompiler.java:192)
> 	at org.codehaus.janino.UnitCompiler.access$000(UnitCompiler.java:104)
> 	at org.codehaus.janino.UnitCompiler$1.visitSingleTypeImportDeclaration(UnitCompiler.java:166)
> 	at org.codehaus.janino.Java$CompilationUnit$SingleTypeImportDeclaration.accept(Java.java:171)
>
> 	at org.codehaus.janino.UnitCompiler.<init>(UnitCompiler.java:164)
> 	at org.apache.drill.exec.compile.JaninoClassCompiler.getClassByteCode(JaninoClassCompiler.java:53)
> 	at org.apache.drill.exec.compile.QueryClassLoader.getClassByteCode(QueryClassLoader.java:69)
>
> 	at org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:256)
> 	at org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:185)
> 	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:240)
>
> 	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
> 	at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)
> 	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
>
> 	at org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
> 	at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
> 	at org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)
>
> 	at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
> 	at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)
> 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> 	at java.lang.Thread.run(Thread.java:744)
>
>

Re: Using janino with Apache Twill

Posted by Peter Grman <pe...@gmail.com>.
Ok thank you, that brings some light into the matter.

I've checked it further and saw that they are creating a new ClassLoader
which is based on the URLClassLoader with an empty array of URLs. This
still seems to work in the normal state and has only a problem in Twill.

For now, I've "fixed" it by manually loading the necessary classes from a
classloader I'd get from any class, which is defined in the same jar
(SchemaChangeException.class.getClassLoader().loadClass(className)). This
can now find the correct classes, since I get exactly the classloader
you've mentioned.

So now, instead of using this dirty hack, I can just remember the thread
context classloader of my runnable thread and set it as the parent.

Thank you very much, I think that will do the trick.


On Tue, Jun 10, 2014 at 1:00 AM, Terence Yim <ch...@gmail.com> wrote:

> Hi Peter,
>
> I am not familiar with how janino works, but it seems to me that it
> may not be using context ClassLoader to load classes or as least the
> thread that is compiling the generated class does not have the context
> ClassLoader set properly.
>
> The way that Twill works is pretty straightforward. It creates a
> "launcher.jar", which has no dependency on any library and start the
> JVM in a YARN container like this:
>
> java -cp launcher.jar ....
>
> Hence the system classloader has no user/library classes, but only the
> Launcher class.
>
> Then in the Launcher.main() method, it creates a URLClassLoader, using
> all the jars + .class files inside the "container.jar" file, to load
> the user TwillRunnable. It also sets it as the context ClassLoader of
> the thread that calls the "run()" method. So, if you want to load
> class manually (through ClassLoader or Class.forName) in a different
> thread than the "run()" thread, you'll have to use set the context
> ClassLoader of that thread or explicitly construct the ClassLoader
> with the correct parent ClassLoader.
>
> Terence
>
> On Mon, Jun 9, 2014 at 12:32 PM, Peter Grman <pe...@gmail.com>
> wrote:
> > Hi, I'm currently in the process of porting an Apache project for
> Big-Data
> > analysis (Apache Drill) to YARN. I'm using Twill for that manner and it
> > almost works.
> >
> > The last problem, I can't seem to figure out, is an exception from
> janino.
> > Apache Drill uses the compiler to create classes on the fly and that
> works
> > fine when I run Drill directly. But when run in Apache Twill I get the
> > exception from Janino that a class couldn't be loaded. I've added this
> > specific class as a dependency and also could find it in the container
> > jar-file which was created by Twill.
> >
> > And the class really is there, because the class which can't be found, is
> > the same as the exception which is thrown. - Yes that same class to which
> > the original CompileException is transformed in the end, is the same as
> the
> > class which is imported as first and can't be found. I suspect that there
> > is a problem with how twill loads the jars and that's the reason why
> janino
> > can't find the class - since it works running it directly from command
> line
> > - but I don't know where to look anymore.
> >
> > Any ideas?
> > Thanks for your help
> >
> > The stacktrace from the exception is here:
> > https://gist.github.com/pgrm/d29d33f356601b1a1a3f
> > The class definition which can't be compiled alone is here:
> > https://gist.github.com/pgrm/a589edb64317f5cb1207
> > And my Twill-Application definition can be found here:
> >
> https://github.com/pgrm/incubator-drill/blob/YARN-support/yarn-integration/src/main/java/org/apache/drill/yarn/integration/RunDrill.java
> >
> > And here also one more the the exception trace:
> >
> > 2014-06-09T18:30:40,093Z ERROR o.a.d.e.p.i.p.ProjectRecordBatch [zk1]
> > [37daf04b-7d82-4d2f-987c-59851f2aeafe:frag:0:0]
> > AbstractSingleRecordBatch:next(AbstractSingleRecordBatch.java:60) -
> > Failure during query
> > org.apache.drill.exec.exception.SchemaChangeException: Failure while
> > attempting to load generated class
> >
> >         at
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:243)
> >         at
> org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
> >         at
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)
> >
> >         at
> org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
> >         at
> org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
> >         at
> org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
> >
> >         at
> org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)
> >         at
> org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
> >         at
> org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)
> >
> >         at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> >         at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> >         at java.lang.Thread.run(Thread.java:744)
> > Caused by: org.apache.drill.exec.exception.ClassTransformationException:
> > Failure generating transformation classes for value:
> >  package org.apache.drill.exec.test.generated;
> >
> >
> > import org.apache.drill.exec.exception.SchemaChangeException;
> > import org.apache.drill.exec.expr.holders.BitHolder;
> > import org.apache.drill.exec.expr.holders.VarCharHolder;
> > import org.apache.drill.exec.ops.FragmentContext;
> > import org.apache.drill.exec.record.RecordBatch;
> > import org.apache.drill.exec.vector.RepeatedVarCharVector;
> > import org.apache.drill.exec.vector.VarCharVector;
> > import
> org.apache.drill.exec.vector.complex.impl.RepeatedVarCharReaderImpl;
> >
> > public class ProjectorGen0 {
> >
> >
> >     RepeatedVarCharVector vv0;
> >     RepeatedVarCharReaderImpl reader4;
> >     VarCharVector vv5;
> >
> >
> >     public boolean doEval(int inIndex, int outIndex)
> >         throws SchemaChangeException
> >
> >     {
> >         {
> >             VarCharHolder out3 = new VarCharHolder();
> >
> >             complex:
> >             vv0 .getAccessor().getReader().setPosition((inIndex));
> >
> >             reader4 .read(0, out3);
> >             BitHolder out8 = new BitHolder();
> >
> >             out8 .value = 1;
> >             if (!vv5 .getMutator().setSafe((outIndex), out3)) {
> >
> >                 out8 .value = 0;
> >             }
> >             if (out8 .value == 0) {
> >                 return false;
> >             }
> >
> >         }
> >         {
> >             return true;
> >
> >         }
> >     }
> >
> >
> >     public void doSetup(FragmentContext context, RecordBatch incoming,
> > RecordBatch outgoing)
> >         throws SchemaChangeException
> >
> >     {
> >         {
> >             int[] fieldIds1 = new int[ 1 ] ;
> >
> >             fieldIds1 [ 0 ] = 0;
> >             Object tmp2 =
> > (incoming).getValueAccessorById(RepeatedVarCharVector.class,
> > fieldIds1).getValueVector();
> >             if (tmp2 == null) {
> >                 throw new SchemaChangeException("Failure while loading
> > vector vv0 with id:
> > org.apache.drill.exec.record.TypedFieldId@1cf4a5a0.");
> >             }
> >             vv0 = ((RepeatedVarCharVector) tmp2);
> >
> >             reader4 = ((RepeatedVarCharReaderImpl) vv0
> > .getAccessor().getReader());
> >             int[] fieldIds6 = new int[ 1 ] ;
> >
> >             fieldIds6 [ 0 ] = 0;
> >             Object tmp7 =
> > (outgoing).getValueAccessorById(VarCharVector.class,
> > fieldIds6).getValueVector();
> >
> >             if (tmp7 == null) {
> >                 throw new SchemaChangeException("Failure while loading
> > vector vv5 with id:
> > org.apache.drill.exec.record.TypedFieldId@1ce776c0.");
> >             }
> >             vv5 = ((VarCharVector) tmp7);
> >
> >         }
> >     }
> >
> >
> > }
> >
> >         at
> org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:302)
> >         at
> org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:185)
> >
> >         at
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:240)
> >         at
> org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
> >         at
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)
> >
> >         at
> org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
> >         at
> org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
> >         at
> org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
> >
> >         at
> org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)
> >         at
> org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
> >         at
> org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)
> >
> >         at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> >         at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> >         at java.lang.Thread.run(Thread.java:744)
> > Caused by: org.codehaus.commons.compiler.CompileException: Line 4,
> > Column 8: Imported class
> > "org.apache.drill.exec.exception.SchemaChangeException" could not be
> > loaded
> >         at
> org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:9014)
> >         at
> org.codehaus.janino.UnitCompiler.import2(UnitCompiler.java:192)
> >         at
> org.codehaus.janino.UnitCompiler.access$000(UnitCompiler.java:104)
> >         at
> org.codehaus.janino.UnitCompiler$1.visitSingleTypeImportDeclaration(UnitCompiler.java:166)
> >         at
> org.codehaus.janino.Java$CompilationUnit$SingleTypeImportDeclaration.accept(Java.java:171)
> >
> >         at org.codehaus.janino.UnitCompiler.<init>(UnitCompiler.java:164)
> >         at
> org.apache.drill.exec.compile.JaninoClassCompiler.getClassByteCode(JaninoClassCompiler.java:53)
> >         at
> org.apache.drill.exec.compile.QueryClassLoader.getClassByteCode(QueryClassLoader.java:69)
> >
> >         at
> org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:256)
> >         at
> org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:185)
> >         at
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:240)
> >
> >         at
> org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
> >         at
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)
> >         at
> org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
> >
> >         at
> org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
> >         at
> org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
> >         at
> org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)
> >
> >         at
> org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
> >         at
> org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)
> >         at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> >
> >         at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> >         at java.lang.Thread.run(Thread.java:744)
> >
> > 2014-06-09T18:30:40,109Z ERROR o.a.d.e.p.i.ScreenCreator$ScreenRoot
> > [zk1] [37daf04b-7d82-4d2f-987c-59851f2aeafe:frag:0:0]
> > ErrorHelper:logAndConvertError(ErrorHelper.java:60) - Error
> > 5625373a-729d-4f3f-a262-6f46f2f879d9: Screen received stop request
> > sent.
> > org.codehaus.commons.compiler.CompileException: Line 4, Column 8:
> > Imported class "org.apache.drill.exec.exception.SchemaChangeException"
> > could not be loaded
> >         at
> org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:9014)
> >         at
> org.codehaus.janino.UnitCompiler.import2(UnitCompiler.java:192)
> >         at
> org.codehaus.janino.UnitCompiler.access$000(UnitCompiler.java:104)
> >         at
> org.codehaus.janino.UnitCompiler$1.visitSingleTypeImportDeclaration(UnitCompiler.java:166)
> >         at
> org.codehaus.janino.Java$CompilationUnit$SingleTypeImportDeclaration.accept(Java.java:171)
> >
> >         at org.codehaus.janino.UnitCompiler.<init>(UnitCompiler.java:164)
> >         at
> org.apache.drill.exec.compile.JaninoClassCompiler.getClassByteCode(JaninoClassCompiler.java:53)
> >         at
> org.apache.drill.exec.compile.QueryClassLoader.getClassByteCode(QueryClassLoader.java:69)
> >
> >         at
> org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:256)
> >         at
> org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:185)
> >         at
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:240)
> >
> >         at
> org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
> >         at
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)
> >         at
> org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
> >
> >         at
> org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
> >         at
> org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
> >         at
> org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)
> >
> >         at
> org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
> >         at
> org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)
> >         at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> >
> >         at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> >         at java.lang.Thread.run(Thread.java:744)
>

Re: Using janino with Apache Twill

Posted by Terence Yim <ch...@gmail.com>.
Hi Peter,

I am not familiar with how janino works, but it seems to me that it
may not be using context ClassLoader to load classes or as least the
thread that is compiling the generated class does not have the context
ClassLoader set properly.

The way that Twill works is pretty straightforward. It creates a
"launcher.jar", which has no dependency on any library and start the
JVM in a YARN container like this:

java -cp launcher.jar ....

Hence the system classloader has no user/library classes, but only the
Launcher class.

Then in the Launcher.main() method, it creates a URLClassLoader, using
all the jars + .class files inside the "container.jar" file, to load
the user TwillRunnable. It also sets it as the context ClassLoader of
the thread that calls the "run()" method. So, if you want to load
class manually (through ClassLoader or Class.forName) in a different
thread than the "run()" thread, you'll have to use set the context
ClassLoader of that thread or explicitly construct the ClassLoader
with the correct parent ClassLoader.

Terence

On Mon, Jun 9, 2014 at 12:32 PM, Peter Grman <pe...@gmail.com> wrote:
> Hi, I'm currently in the process of porting an Apache project for Big-Data
> analysis (Apache Drill) to YARN. I'm using Twill for that manner and it
> almost works.
>
> The last problem, I can't seem to figure out, is an exception from janino.
> Apache Drill uses the compiler to create classes on the fly and that works
> fine when I run Drill directly. But when run in Apache Twill I get the
> exception from Janino that a class couldn't be loaded. I've added this
> specific class as a dependency and also could find it in the container
> jar-file which was created by Twill.
>
> And the class really is there, because the class which can't be found, is
> the same as the exception which is thrown. - Yes that same class to which
> the original CompileException is transformed in the end, is the same as the
> class which is imported as first and can't be found. I suspect that there
> is a problem with how twill loads the jars and that's the reason why janino
> can't find the class - since it works running it directly from command line
> - but I don't know where to look anymore.
>
> Any ideas?
> Thanks for your help
>
> The stacktrace from the exception is here:
> https://gist.github.com/pgrm/d29d33f356601b1a1a3f
> The class definition which can't be compiled alone is here:
> https://gist.github.com/pgrm/a589edb64317f5cb1207
> And my Twill-Application definition can be found here:
> https://github.com/pgrm/incubator-drill/blob/YARN-support/yarn-integration/src/main/java/org/apache/drill/yarn/integration/RunDrill.java
>
> And here also one more the the exception trace:
>
> 2014-06-09T18:30:40,093Z ERROR o.a.d.e.p.i.p.ProjectRecordBatch [zk1]
> [37daf04b-7d82-4d2f-987c-59851f2aeafe:frag:0:0]
> AbstractSingleRecordBatch:next(AbstractSingleRecordBatch.java:60) -
> Failure during query
> org.apache.drill.exec.exception.SchemaChangeException: Failure while
> attempting to load generated class
>
>         at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:243)
>         at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
>         at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)
>
>         at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
>         at org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
>         at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
>
>         at org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)
>         at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
>         at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)
>
>         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>         at java.lang.Thread.run(Thread.java:744)
> Caused by: org.apache.drill.exec.exception.ClassTransformationException:
> Failure generating transformation classes for value:
>  package org.apache.drill.exec.test.generated;
>
>
> import org.apache.drill.exec.exception.SchemaChangeException;
> import org.apache.drill.exec.expr.holders.BitHolder;
> import org.apache.drill.exec.expr.holders.VarCharHolder;
> import org.apache.drill.exec.ops.FragmentContext;
> import org.apache.drill.exec.record.RecordBatch;
> import org.apache.drill.exec.vector.RepeatedVarCharVector;
> import org.apache.drill.exec.vector.VarCharVector;
> import org.apache.drill.exec.vector.complex.impl.RepeatedVarCharReaderImpl;
>
> public class ProjectorGen0 {
>
>
>     RepeatedVarCharVector vv0;
>     RepeatedVarCharReaderImpl reader4;
>     VarCharVector vv5;
>
>
>     public boolean doEval(int inIndex, int outIndex)
>         throws SchemaChangeException
>
>     {
>         {
>             VarCharHolder out3 = new VarCharHolder();
>
>             complex:
>             vv0 .getAccessor().getReader().setPosition((inIndex));
>
>             reader4 .read(0, out3);
>             BitHolder out8 = new BitHolder();
>
>             out8 .value = 1;
>             if (!vv5 .getMutator().setSafe((outIndex), out3)) {
>
>                 out8 .value = 0;
>             }
>             if (out8 .value == 0) {
>                 return false;
>             }
>
>         }
>         {
>             return true;
>
>         }
>     }
>
>
>     public void doSetup(FragmentContext context, RecordBatch incoming,
> RecordBatch outgoing)
>         throws SchemaChangeException
>
>     {
>         {
>             int[] fieldIds1 = new int[ 1 ] ;
>
>             fieldIds1 [ 0 ] = 0;
>             Object tmp2 =
> (incoming).getValueAccessorById(RepeatedVarCharVector.class,
> fieldIds1).getValueVector();
>             if (tmp2 == null) {
>                 throw new SchemaChangeException("Failure while loading
> vector vv0 with id:
> org.apache.drill.exec.record.TypedFieldId@1cf4a5a0.");
>             }
>             vv0 = ((RepeatedVarCharVector) tmp2);
>
>             reader4 = ((RepeatedVarCharReaderImpl) vv0
> .getAccessor().getReader());
>             int[] fieldIds6 = new int[ 1 ] ;
>
>             fieldIds6 [ 0 ] = 0;
>             Object tmp7 =
> (outgoing).getValueAccessorById(VarCharVector.class,
> fieldIds6).getValueVector();
>
>             if (tmp7 == null) {
>                 throw new SchemaChangeException("Failure while loading
> vector vv5 with id:
> org.apache.drill.exec.record.TypedFieldId@1ce776c0.");
>             }
>             vv5 = ((VarCharVector) tmp7);
>
>         }
>     }
>
>
> }
>
>         at org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:302)
>         at org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:185)
>
>         at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:240)
>         at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
>         at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)
>
>         at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
>         at org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
>         at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
>
>         at org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)
>         at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
>         at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)
>
>         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>         at java.lang.Thread.run(Thread.java:744)
> Caused by: org.codehaus.commons.compiler.CompileException: Line 4,
> Column 8: Imported class
> "org.apache.drill.exec.exception.SchemaChangeException" could not be
> loaded
>         at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:9014)
>         at org.codehaus.janino.UnitCompiler.import2(UnitCompiler.java:192)
>         at org.codehaus.janino.UnitCompiler.access$000(UnitCompiler.java:104)
>         at org.codehaus.janino.UnitCompiler$1.visitSingleTypeImportDeclaration(UnitCompiler.java:166)
>         at org.codehaus.janino.Java$CompilationUnit$SingleTypeImportDeclaration.accept(Java.java:171)
>
>         at org.codehaus.janino.UnitCompiler.<init>(UnitCompiler.java:164)
>         at org.apache.drill.exec.compile.JaninoClassCompiler.getClassByteCode(JaninoClassCompiler.java:53)
>         at org.apache.drill.exec.compile.QueryClassLoader.getClassByteCode(QueryClassLoader.java:69)
>
>         at org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:256)
>         at org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:185)
>         at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:240)
>
>         at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
>         at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)
>         at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
>
>         at org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
>         at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
>         at org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)
>
>         at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
>         at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)
>         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>
>         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>         at java.lang.Thread.run(Thread.java:744)
>
> 2014-06-09T18:30:40,109Z ERROR o.a.d.e.p.i.ScreenCreator$ScreenRoot
> [zk1] [37daf04b-7d82-4d2f-987c-59851f2aeafe:frag:0:0]
> ErrorHelper:logAndConvertError(ErrorHelper.java:60) - Error
> 5625373a-729d-4f3f-a262-6f46f2f879d9: Screen received stop request
> sent.
> org.codehaus.commons.compiler.CompileException: Line 4, Column 8:
> Imported class "org.apache.drill.exec.exception.SchemaChangeException"
> could not be loaded
>         at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:9014)
>         at org.codehaus.janino.UnitCompiler.import2(UnitCompiler.java:192)
>         at org.codehaus.janino.UnitCompiler.access$000(UnitCompiler.java:104)
>         at org.codehaus.janino.UnitCompiler$1.visitSingleTypeImportDeclaration(UnitCompiler.java:166)
>         at org.codehaus.janino.Java$CompilationUnit$SingleTypeImportDeclaration.accept(Java.java:171)
>
>         at org.codehaus.janino.UnitCompiler.<init>(UnitCompiler.java:164)
>         at org.apache.drill.exec.compile.JaninoClassCompiler.getClassByteCode(JaninoClassCompiler.java:53)
>         at org.apache.drill.exec.compile.QueryClassLoader.getClassByteCode(QueryClassLoader.java:69)
>
>         at org.apache.drill.exec.compile.ClassTransformer.getImplementationClass(ClassTransformer.java:256)
>         at org.apache.drill.exec.ops.FragmentContext.getImplementationClass(FragmentContext.java:185)
>         at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setupNewSchema(ProjectRecordBatch.java:240)
>
>         at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:57)
>         at org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.next(ProjectRecordBatch.java:83)
>         at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
>
>         at org.apache.drill.exec.physical.impl.limit.LimitRecordBatch.next(LimitRecordBatch.java:99)
>         at org.apache.drill.exec.record.AbstractSingleRecordBatch.next(AbstractSingleRecordBatch.java:45)
>         at org.apache.drill.exec.physical.impl.svremover.RemovingRecordBatch.next(RemovingRecordBatch.java:94)
>
>         at org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.next(ScreenCreator.java:80)
>         at org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:104)
>         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>
>         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>         at java.lang.Thread.run(Thread.java:744)