You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "吴志龙 (JIRA)" <ji...@apache.org> on 2017/02/08 09:21:41 UTC

[jira] [Created] (SPARK-19510) org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 44, Column 42: A method named "evaluate" is not declared in any enclosing class nor any supertype, nor through a static import

吴志龙 created SPARK-19510:
---------------------------

             Summary: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 44, Column 42: A method named "evaluate" is not declared in any enclosing class nor any supertype, nor through a static import
                 Key: SPARK-19510
                 URL: https://issues.apache.org/jira/browse/SPARK-19510
             Project: Spark
          Issue Type: Bug
          Components: SQL
    Affects Versions: 2.1.0
         Environment: spark 2.1 on yarn client
hadoop 2.6.0
jdk 1.7

            Reporter: 吴志龙


spark sql add function
example:
1、add spark Fql_Base64_Decode.scala
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.{ByteArray, UTF8String}
import org.apache.spark.sql.catalyst.expressions.codegen._
import java.net.URLDecoder
/**
 * Base64_Decode 转码
 */
// scalastyle:off line.size.limit
@ExpressionDescription(
  usage = "_FUNC_(Base64) fql_base64_decode",
  extended = """
    Examples:
      > SELECT _FUNC_('VlVdQ0hTV1VFSFRVXkZB');
      return 201607290000239
  """)
case class Fql_Base64_Decode(child: Expression)  extends UnaryExpression with ImplicitCastInputTypes{
  override def dataType: DataType = StringType
  override def inputTypes: Seq[DataType] = Seq(StringType)

  protected override def nullSafeEval(string: Any): Any = {
    val base64 = string.asInstanceOf[UTF8String]
    org.apache.spark.sql.catalyst.expressions.FqlBase64Decode.evaluate(base64.toString())
  }

  override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
    nullSafeCodeGen(ctx, ev, (child) => {
      s"""
         ${ev.value} = Fql_Base64_Decode().evaluate(child.toString());
       """})
  }
  
}

2、add org.apache.spark.sql.catalyst.analysis.FunctionRegistry functions name

expression[Fql_Base64_Decode]("fql_base64_decode")

3、org.apache.spark.sql.functions.scala add functions 
def fql_base64_decode(e: Column): Column = withExpr { Fql_Base64_Decode(e.expr) }

4、compile package 
 mvn -Pyarn -Phadoop-2.6 -Dhadoop.version=2.6.0 -Phadoop-provided -Phive -Phive-thriftserver -Pnetlib-lgpl -DskipTests clean package
5、replace spark-catalyst_2.11-2.1.0.jar
6、start thrift 
7、beeline connection
8、test spark sql functions
0: jdbc:hive2://6.hadoop.com:10008> select fql_base64_encode('201607290000239');
+-------------------------------------+--+
| fql_base64_encode(201607290000239)  |
+-------------------------------------+--+
| VlVdQ0hTV1VFSFRVXkZB                |
+-------------------------------------+--+
1 row selected (0.11 seconds)
0: jdbc:hive2://6.hadoop.com:10008> 
0: jdbc:hive2://6.hadoop.com:10008> 
0: jdbc:hive2://6.hadoop.com:10008> select * from dp_tmp.test_wuzl;
+-----------------------+--+
|         code          |
+-----------------------+--+
| VlVdQ0hTV1VFSFRVXkZB  |
+-----------------------+--+
1 row selected (0.383 seconds)
0: jdbc:hive2://6.hadoop.com:10008> select fql_base64_encode(code) from dp_tmp.test_wuzl; 
Error: org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 7.0 failed 10 times, most recent failure: Lost task 0.9 in stage 7.0 (TID 54, 9.hadoop.com, executor 3): java.util.concurrent.ExecutionException: java.lang.Exception: failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 44, Column 42: A method named "evaluate" is not declared in any enclosing class nor any supertype, nor through a static import
/* 001 */ public java.lang.Object generate(Object[] references) {
/* 002 */   return new SpecificUnsafeProjection(references);
/* 003 */ }
/* 004 */
/* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
/* 006 */
/* 007 */   private Object[] references;
/* 008 */   private UnsafeRow result;
/* 009 */   private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder holder;
/* 010 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter;
/* 011 */
/* 012 */   public SpecificUnsafeProjection(Object[] references) {
/* 013 */     this.references = references;
/* 014 */     result = new UnsafeRow(1);
/* 015 */     this.holder = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(result, 32);
/* 016 */     this.rowWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 017 */
/* 018 */   }
/* 019 */
/* 020 */   public void initialize(int partitionIndex) {
/* 021 */
/* 022 */   }
/* 023 */
/* 024 */
/* 025 */
/* 026 */   // Scala.Function1 need this
/* 027 */   public java.lang.Object apply(java.lang.Object row) {
/* 028 */     return apply((InternalRow) row);
/* 029 */   }
/* 030 */
/* 031 */   public UnsafeRow apply(InternalRow i) {
/* 032 */     holder.reset();
/* 033 */
/* 034 */     rowWriter.zeroOutNullBytes();
/* 035 */
/* 036 */
/* 037 */     boolean isNull1 = i.isNullAt(0);
/* 038 */     UTF8String value1 = isNull1 ? null : (i.getUTF8String(0));
/* 039 */     boolean isNull = isNull1;
/* 040 */     UTF8String value = null;
/* 041 */
/* 042 */     if (!isNull1) {
/* 043 */
/* 044 */       value = new Fql_Base64_Encode().evaluate(child.toString());
/* 045 */
/* 046 */     }
/* 047 */     if (isNull) {
/* 048 */       rowWriter.setNullAt(0);
/* 049 */     } else {
/* 050 */       rowWriter.write(0, value);
/* 051 */     }
/* 052 */     result.setTotalSize(holder.totalSize());
/* 053 */     return result;
/* 054 */   }
/* 055 */ }




--
This message was sent by Atlassian JIRA
(v6.3.15#6346)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org