You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by minajagi <ch...@jpmorgan.com> on 2014/12/09 23:07:21 UTC

spark shell and hive context problem

Hi I'm working on Spark that comes with CDH 5.2.0

I'm trying to get a hive context in the shell and I'm running into problems
that I don't understand.

I have added hive-site.xml to the conf folder under /usr/lib/spark/conf as
indicated elsewhere


Here is what I see.Pls help

---------------------------------------------------------------------------------------------------
scala> import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.hive.HiveContext

scala> val hiveCtx = new org.apache.spark.sql.hive.HiveContext(sc)
error: bad symbolic reference. A signature in HiveContext.class refers to
term hive
in package org.apache.hadoop which is not available.
It may be completely missing from the current classpath, or the version on
the classpath might be incompatible with the version used when compiling
HiveContext.class.
error: 
     while compiling: <console>
        during phase: erasure
     library version: version 2.10.4
    compiler version: version 2.10.4
  reconstructed args: 

  last tree to typer: Apply(value $outer)
              symbol: value $outer (flags: <method> <synthetic> <stable>
<expandedname> <triedcooking>)
   symbol definition: val $outer(): $iwC.$iwC.type
                 tpe: $iwC.$iwC.type
       symbol owners: value $outer -> class $iwC -> class $iwC -> class $iwC
-> class $read -> package $line9
      context owners: class $iwC -> class $iwC -> class $iwC -> class $iwC
-> class $read -> package $line9

== Enclosing template or block ==

ClassDef( // class $iwC extends Serializable
  0
  "$iwC"
  []
  Template( // val <local $iwC>: <notype>, tree.tpe=$iwC
    "java.lang.Object", "scala.Serializable" // parents
    ValDef(
      private
      "_"
      <tpt>
      <empty>
    )
    // 5 statements
    DefDef( // def <init>(arg$outer: $iwC.$iwC.$iwC.type): $iwC
      <method> <triedcooking>
      "<init>"
      []
      // 1 parameter list
      ValDef( // $outer: $iwC.$iwC.$iwC.type
        
        "$outer"
        <tpt> // tree.tpe=$iwC.$iwC.$iwC.type
        <empty>
      )
      <tpt> // tree.tpe=$iwC
      Block( // tree.tpe=Unit
        Apply( // def <init>(): Object in class Object, tree.tpe=Object
          $iwC.super."<init>" // def <init>(): Object in class Object,
tree.tpe=()Object
          Nil
        )
        ()
      )
    )
    ValDef( // private[this] val hiveCtx:
org.apache.spark.sql.hive.HiveContext
      private <local> <triedcooking>
      "hiveCtx "
      <tpt> // tree.tpe=org.apache.spark.sql.hive.HiveContext
      Apply( // def <init>(sc: org.apache.spark.SparkContext):
org.apache.spark.sql.hive.HiveContext in class HiveContext,
tree.tpe=org.apache.spark.sql.hive.HiveContext
        new org.apache.spark.sql.hive.HiveContext."<init>" // def <init>(sc:
org.apache.spark.SparkContext): org.apache.spark.sql.hive.HiveContext in
class HiveContext, tree.tpe=(sc:
org.apache.spark.SparkContext)org.apache.spark.sql.hive.HiveContext
        Apply( // val sc(): org.apache.spark.SparkContext,
tree.tpe=org.apache.spark.SparkContext
         
$iwC.this.$line9$$read$$iwC$$iwC$$iwC$$iwC$$$outer().$line9$$read$$iwC$$iwC$$iwC$$$outer().$line9$$read$$iwC$$iwC$$$outer().$VAL1().$iw().$iw()."sc"
// val sc(): org.apache.spark.SparkContext,
tree.tpe=()org.apache.spark.SparkContext
          Nil
        )
      )
    )
    DefDef( // val hiveCtx(): org.apache.spark.sql.hive.HiveContext
      <method> <stable> <accessor>
      "hiveCtx"
      []
      List(Nil)
      <tpt> // tree.tpe=org.apache.spark.sql.hive.HiveContext
      $iwC.this."hiveCtx " // private[this] val hiveCtx:
org.apache.spark.sql.hive.HiveContext,
tree.tpe=org.apache.spark.sql.hive.HiveContext
    )
    ValDef( // protected val $outer: $iwC.$iwC.$iwC.type
      protected <synthetic> <paramaccessor> <triedcooking>
      "$outer "
      <tpt> // tree.tpe=$iwC.$iwC.$iwC.type
      <empty>
    )
    DefDef( // val $outer(): $iwC.$iwC.$iwC.type
      <method> <synthetic> <stable> <expandedname> <triedcooking>
      "$line9$$read$$iwC$$iwC$$iwC$$iwC$$$outer"
      []
      List(Nil)
      <tpt> // tree.tpe=Any
      $iwC.this."$outer " // protected val $outer: $iwC.$iwC.$iwC.type,
tree.tpe=$iwC.$iwC.$iwC.type
    )
  )
)

== Expanded type of tree ==

ThisType(class $iwC)

uncaught exception during compilation:
scala.reflect.internal.Types$TypeError
scala.reflect.internal.Types$TypeError: bad symbolic reference. A signature
in HiveContext.class refers to term conf
in value org.apache.hadoop.hive which is not available.
It may be completely missing from the current classpath, or the version on
the classpath might be incompatible with the version used when compiling
HiveContext.class.
That entry seems to have slain the compiler.  Shall I replay
your session? I can re-run each line except the last one.
[y/n]




--
View this message in context: http://apache-spark-user-list.1001560.n3.nabble.com/spark-shell-and-hive-context-problem-tp20597.html
Sent from the Apache Spark User List mailing list archive at Nabble.com.

---------------------------------------------------------------------
To unsubscribe, e-mail: user-unsubscribe@spark.apache.org
For additional commands, e-mail: user-help@spark.apache.org


Re: spark shell and hive context problem

Posted by Marcelo Vanzin <va...@cloudera.com>.
Hello,

In CDH 5.2 you need to manually add Hive classes to the classpath of
your Spark job if you want to use the Hive integration. Also, be aware
that since Spark 1.1 doesn't really support the version of Hive
shipped with CDH 5.2, this combination is to be considered extremely
experimental.

On Tue, Dec 9, 2014 at 2:07 PM, minajagi <ch...@jpmorgan.com> wrote:
> Hi I'm working on Spark that comes with CDH 5.2.0
>
> I'm trying to get a hive context in the shell and I'm running into problems
> that I don't understand.
>
> I have added hive-site.xml to the conf folder under /usr/lib/spark/conf as
> indicated elsewhere
>
>
> Here is what I see.Pls help
>
> ---------------------------------------------------------------------------------------------------
> scala> import org.apache.spark.sql.hive.HiveContext
> import org.apache.spark.sql.hive.HiveContext
>
> scala> val hiveCtx = new org.apache.spark.sql.hive.HiveContext(sc)
> error: bad symbolic reference. A signature in HiveContext.class refers to
> term hive
> in package org.apache.hadoop which is not available.
> It may be completely missing from the current classpath, or the version on
> the classpath might be incompatible with the version used when compiling
> HiveContext.class.
> error:
>      while compiling: <console>
>         during phase: erasure
>      library version: version 2.10.4
>     compiler version: version 2.10.4
>   reconstructed args:
>
>   last tree to typer: Apply(value $outer)
>               symbol: value $outer (flags: <method> <synthetic> <stable>
> <expandedname> <triedcooking>)
>    symbol definition: val $outer(): $iwC.$iwC.type
>                  tpe: $iwC.$iwC.type
>        symbol owners: value $outer -> class $iwC -> class $iwC -> class $iwC
> -> class $read -> package $line9
>       context owners: class $iwC -> class $iwC -> class $iwC -> class $iwC
> -> class $read -> package $line9
>
> == Enclosing template or block ==
>
> ClassDef( // class $iwC extends Serializable
>   0
>   "$iwC"
>   []
>   Template( // val <local $iwC>: <notype>, tree.tpe=$iwC
>     "java.lang.Object", "scala.Serializable" // parents
>     ValDef(
>       private
>       "_"
>       <tpt>
>       <empty>
>     )
>     // 5 statements
>     DefDef( // def <init>(arg$outer: $iwC.$iwC.$iwC.type): $iwC
>       <method> <triedcooking>
>       "<init>"
>       []
>       // 1 parameter list
>       ValDef( // $outer: $iwC.$iwC.$iwC.type
>
>         "$outer"
>         <tpt> // tree.tpe=$iwC.$iwC.$iwC.type
>         <empty>
>       )
>       <tpt> // tree.tpe=$iwC
>       Block( // tree.tpe=Unit
>         Apply( // def <init>(): Object in class Object, tree.tpe=Object
>           $iwC.super."<init>" // def <init>(): Object in class Object,
> tree.tpe=()Object
>           Nil
>         )
>         ()
>       )
>     )
>     ValDef( // private[this] val hiveCtx:
> org.apache.spark.sql.hive.HiveContext
>       private <local> <triedcooking>
>       "hiveCtx "
>       <tpt> // tree.tpe=org.apache.spark.sql.hive.HiveContext
>       Apply( // def <init>(sc: org.apache.spark.SparkContext):
> org.apache.spark.sql.hive.HiveContext in class HiveContext,
> tree.tpe=org.apache.spark.sql.hive.HiveContext
>         new org.apache.spark.sql.hive.HiveContext."<init>" // def <init>(sc:
> org.apache.spark.SparkContext): org.apache.spark.sql.hive.HiveContext in
> class HiveContext, tree.tpe=(sc:
> org.apache.spark.SparkContext)org.apache.spark.sql.hive.HiveContext
>         Apply( // val sc(): org.apache.spark.SparkContext,
> tree.tpe=org.apache.spark.SparkContext
>
> $iwC.this.$line9$$read$$iwC$$iwC$$iwC$$iwC$$$outer().$line9$$read$$iwC$$iwC$$iwC$$$outer().$line9$$read$$iwC$$iwC$$$outer().$VAL1().$iw().$iw()."sc"
> // val sc(): org.apache.spark.SparkContext,
> tree.tpe=()org.apache.spark.SparkContext
>           Nil
>         )
>       )
>     )
>     DefDef( // val hiveCtx(): org.apache.spark.sql.hive.HiveContext
>       <method> <stable> <accessor>
>       "hiveCtx"
>       []
>       List(Nil)
>       <tpt> // tree.tpe=org.apache.spark.sql.hive.HiveContext
>       $iwC.this."hiveCtx " // private[this] val hiveCtx:
> org.apache.spark.sql.hive.HiveContext,
> tree.tpe=org.apache.spark.sql.hive.HiveContext
>     )
>     ValDef( // protected val $outer: $iwC.$iwC.$iwC.type
>       protected <synthetic> <paramaccessor> <triedcooking>
>       "$outer "
>       <tpt> // tree.tpe=$iwC.$iwC.$iwC.type
>       <empty>
>     )
>     DefDef( // val $outer(): $iwC.$iwC.$iwC.type
>       <method> <synthetic> <stable> <expandedname> <triedcooking>
>       "$line9$$read$$iwC$$iwC$$iwC$$iwC$$$outer"
>       []
>       List(Nil)
>       <tpt> // tree.tpe=Any
>       $iwC.this."$outer " // protected val $outer: $iwC.$iwC.$iwC.type,
> tree.tpe=$iwC.$iwC.$iwC.type
>     )
>   )
> )
>
> == Expanded type of tree ==
>
> ThisType(class $iwC)
>
> uncaught exception during compilation:
> scala.reflect.internal.Types$TypeError
> scala.reflect.internal.Types$TypeError: bad symbolic reference. A signature
> in HiveContext.class refers to term conf
> in value org.apache.hadoop.hive which is not available.
> It may be completely missing from the current classpath, or the version on
> the classpath might be incompatible with the version used when compiling
> HiveContext.class.
> That entry seems to have slain the compiler.  Shall I replay
> your session? I can re-run each line except the last one.
> [y/n]
>
>
>
>
> --
> View this message in context: http://apache-spark-user-list.1001560.n3.nabble.com/spark-shell-and-hive-context-problem-tp20597.html
> Sent from the Apache Spark User List mailing list archive at Nabble.com.
>
> ---------------------------------------------------------------------
> To unsubscribe, e-mail: user-unsubscribe@spark.apache.org
> For additional commands, e-mail: user-help@spark.apache.org
>



-- 
Marcelo

---------------------------------------------------------------------
To unsubscribe, e-mail: user-unsubscribe@spark.apache.org
For additional commands, e-mail: user-help@spark.apache.org