You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by nancy henry <na...@gmail.com> on 2017/03/08 02:25:52 UTC

made spark job to throw exception still going under finished succeeded status in yarn

Hi Team,

Wrote below code to throw exception.. How to make below code to throw
exception and make the job to goto failed status in yarn if under some
condition but still close spark context and release resources ..


object Demo {
  def main(args: Array[String]) = {

    var a = 0; var c = 0;

    val sparkConf = new SparkConf()
    val sc = new SparkContext(sparkConf)

    val hiveSqlContext: HiveContext = new
org.apache.spark.sql.hive.HiveContext(sc)
    for (a <- 0 to args.length - 1) {
      val query = sc.textFile(args(a)).collect.filter(query =>
!query.contains("--")).mkString(" ")
      var queryarray = query.split(";")
      var b = query.split(";").length

      var querystatuscheck = true;
      for (c <- 0 to b - 1) {

        if (querystatuscheck) {

          if (!(StringUtils.isBlank(queryarray(c)))) {

            *val querystatus = Try { hiveSqlContext.sql(queryarray(c)) }*



            var b = c + 1
            querystatuscheck = querystatus.isSuccess
            System.out.println("Your" + b + "query status is " +
querystatus)
            System.out.println("querystatuschecktostring is" +
querystatuscheck.toString())
            querystatuscheck.toString() match {
              case "false" => {

              *  throw querystatus.failed.get*
                System.out.println("case true executed")
                sc.stop()
              }
              case _ => {
                sc.stop()
                System.out.println("case default executed")
              }

            }

          }

        }
      }

      System.out.println("Okay")

    }

  }

}