You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Igor Tkachenko (JIRA)" <ji...@apache.org> on 2014/10/01 17:26:33 UTC

[jira] [Updated] (SPARK-3761) Class not found exception / sbt 13.5 / Scala 2.10.4

     [ https://issues.apache.org/jira/browse/SPARK-3761?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Igor Tkachenko updated SPARK-3761:
----------------------------------
    Description: 
I have Scala code:

val master = "spark://<server address>:7077"

    val sc = new SparkContext(new SparkConf()
      .setMaster(master)
      .setAppName("SparkQueryDemo 01")
      .set("spark.executor.memory", "512m"))

val count2 = sc         .textFile("hdfs://<server address>:8020/tmp/data/risk/account.txt")
      .filter(line  => line.contains("Word"))
      .count()

I've got such an error:
[error] (run-main-0) org.apache.spark.SparkException: Job aborted due to stage failure: Task 0.0:0 failed 4 times, most
recent failure: Exception failure in TID 6 on host <server address>: java.lang.ClassNotFoundExcept
ion: SimpleApp$$anonfun$1

My dependencies :

object Version {
  val spark        = "1.0.0-cdh5.1.0"
  val hadoop       = "2.4.1"
  val slf4j        = "1.7.6"
  val logback      = "1.1.1"
  val scalaTest    = "2.1.0"
  val mockito      = "1.9.5"
}

object Library {
  val sparkCore      = "org.apache.spark"  %% "spark-assembly"  % Version.spark
  val hadoopClient   = "org.apache.hadoop" %  "hadoop-client"   % Version.hadoop
  val slf4jApi       = "org.slf4j"         %  "slf4j-api"       % Version.slf4j
  val logbackClassic = "ch.qos.logback"    %  "logback-classic" % Version.logback
  val scalaTest      = "org.scalatest"     %% "scalatest"       % Version.scalaTest
  val mockitoAll     = "org.mockito"       %  "mockito-all"     % Version.mockito
}

My OS is Win 7

  was:
I have Scala code:

val master = "spark://<server address>:7077"

    val sc = new SparkContext(new SparkConf()
      .setMaster(master)
      .setAppName("SparkQueryDemo 01")
      .set("spark.executor.memory", "512m"))

val count2 = sc         .textFile("hdfs://<server address>:8020/tmp/data/risk/account.txt")
      .filter(line  => line.contains("Barclays"))
      .count()

I've got such an error:
[error] (run-main-0) org.apache.spark.SparkException: Job aborted due to stage failure: Task 0.0:0 failed 4 times, most
recent failure: Exception failure in TID 6 on host <server address>: java.lang.ClassNotFoundExcept
ion: SimpleApp$$anonfun$1

My dependencies :

object Version {
  val spark        = "1.0.0-cdh5.1.0"
  val hadoop       = "2.4.1"
  val slf4j        = "1.7.6"
  val logback      = "1.1.1"
  val scalaTest    = "2.1.0"
  val mockito      = "1.9.5"
}

object Library {
  val sparkCore      = "org.apache.spark"  %% "spark-assembly"  % Version.spark
  val hadoopClient   = "org.apache.hadoop" %  "hadoop-client"   % Version.hadoop
  val slf4jApi       = "org.slf4j"         %  "slf4j-api"       % Version.slf4j
  val logbackClassic = "ch.qos.logback"    %  "logback-classic" % Version.logback
  val scalaTest      = "org.scalatest"     %% "scalatest"       % Version.scalaTest
  val mockitoAll     = "org.mockito"       %  "mockito-all"     % Version.mockito
}

My OS is Win 7


> Class not found exception / sbt 13.5 / Scala 2.10.4
> ---------------------------------------------------
>
>                 Key: SPARK-3761
>                 URL: https://issues.apache.org/jira/browse/SPARK-3761
>             Project: Spark
>          Issue Type: Bug
>    Affects Versions: 1.0.0
>            Reporter: Igor Tkachenko
>
> I have Scala code:
> val master = "spark://<server address>:7077"
>     val sc = new SparkContext(new SparkConf()
>       .setMaster(master)
>       .setAppName("SparkQueryDemo 01")
>       .set("spark.executor.memory", "512m"))
> val count2 = sc         .textFile("hdfs://<server address>:8020/tmp/data/risk/account.txt")
>       .filter(line  => line.contains("Word"))
>       .count()
> I've got such an error:
> [error] (run-main-0) org.apache.spark.SparkException: Job aborted due to stage failure: Task 0.0:0 failed 4 times, most
> recent failure: Exception failure in TID 6 on host <server address>: java.lang.ClassNotFoundExcept
> ion: SimpleApp$$anonfun$1
> My dependencies :
> object Version {
>   val spark        = "1.0.0-cdh5.1.0"
>   val hadoop       = "2.4.1"
>   val slf4j        = "1.7.6"
>   val logback      = "1.1.1"
>   val scalaTest    = "2.1.0"
>   val mockito      = "1.9.5"
> }
> object Library {
>   val sparkCore      = "org.apache.spark"  %% "spark-assembly"  % Version.spark
>   val hadoopClient   = "org.apache.hadoop" %  "hadoop-client"   % Version.hadoop
>   val slf4jApi       = "org.slf4j"         %  "slf4j-api"       % Version.slf4j
>   val logbackClassic = "ch.qos.logback"    %  "logback-classic" % Version.logback
>   val scalaTest      = "org.scalatest"     %% "scalatest"       % Version.scalaTest
>   val mockitoAll     = "org.mockito"       %  "mockito-all"     % Version.mockito
> }
> My OS is Win 7



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org