You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by Bonsen <he...@126.com> on 2016/01/04 04:26:08 UTC

sql:Exception in thread "main" scala.MatchError: StringType

(sbt) scala:
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.sql
object SimpleApp {
  def main(args: Array[String]) {
    val conf = new SparkConf()
    conf.setAppName("mytest").setMaster("spark://Master:7077")
    val sc = new SparkContext(conf)
    val sqlContext = new sql.SQLContext(sc)
    val
d=sqlContext.read.json("/home/hadoop/2015data_test/Data/Data/100808cb11e9898816ef15fcdde4e1d74cbc0b/Db6Jh2XeQ.json")
    sc.stop()
  }
}
______________________________________________________________________________________________
after sbt package :
./spark-submit --class "SimpleApp" 
/home/hadoop/Downloads/sbt/bin/target/scala-2.10/simple-project_2.10-1.0.jar
_______________________________________________________________________________________________
json fIle:
{
    "programmers": [
        {
            "firstName": "Brett",
            "lastName": "McLaughlin",
            "email": "aaaa"
        },
        {
            "firstName": "Jason",
            "lastName": "Hunter",
            "email": "bbbb"
        },
        {
            "firstName": "Elliotte",
            "lastName": "Harold",
            "email": "cccc"
        }
    ],
    "authors": [
        {
            "firstName": "Isaac",
            "lastName": "Asimov",
            "genre": "sciencefiction"
        },
        {
            "firstName": "Tad",
            "lastName": "Williams",
            "genre": "fantasy"
        },
        {
            "firstName": "Frank",
            "lastName": "Peretti",
            "genre": "christianfiction"
        }
    ],
    "musicians": [
        {
            "firstName": "Eric",
            "lastName": "Clapton",
            "instrument": "guitar"
        },
        {
            "firstName": "Sergei",
            "lastName": "Rachmaninoff",
            "instrument": "piano"
        }
    ]
}
_______________________________________________________________________________________________
Exception in thread "main" scala.MatchError: StringType (of class
org.apache.spark.sql.types.StringType$)
	at org.apache.spark.sql.json.InferSchema$.apply(InferSchema.scala:58)
	at
org.apache.spark.sql.json.JSONRelation$$anonfun$schema$1.apply(JSONRelation.scala:139)
_______________________________________________________________________________________________
why????



--
View this message in context: http://apache-spark-user-list.1001560.n3.nabble.com/sql-Exception-in-thread-main-scala-MatchError-StringType-tp25868.html
Sent from the Apache Spark User List mailing list archive at Nabble.com.

---------------------------------------------------------------------
To unsubscribe, e-mail: user-unsubscribe@spark.apache.org
For additional commands, e-mail: user-help@spark.apache.org


Re: sql:Exception in thread "main" scala.MatchError: StringType

Posted by Jeff Zhang <zj...@gmail.com>.
Spark only support one json object per line. You need to reformat your
file.

On Mon, Jan 4, 2016 at 11:26 AM, Bonsen <he...@126.com> wrote:

> (sbt) scala:
> import org.apache.spark.SparkContext
> import org.apache.spark.SparkConf
> import org.apache.spark.sql
> object SimpleApp {
>   def main(args: Array[String]) {
>     val conf = new SparkConf()
>     conf.setAppName("mytest").setMaster("spark://Master:7077")
>     val sc = new SparkContext(conf)
>     val sqlContext = new sql.SQLContext(sc)
>     val
>
> d=sqlContext.read.json("/home/hadoop/2015data_test/Data/Data/100808cb11e9898816ef15fcdde4e1d74cbc0b/Db6Jh2XeQ.json")
>     sc.stop()
>   }
> }
>
> ______________________________________________________________________________________________
> after sbt package :
> ./spark-submit --class "SimpleApp"
>
> /home/hadoop/Downloads/sbt/bin/target/scala-2.10/simple-project_2.10-1.0.jar
>
> _______________________________________________________________________________________________
> json fIle:
> {
>     "programmers": [
>         {
>             "firstName": "Brett",
>             "lastName": "McLaughlin",
>             "email": "aaaa"
>         },
>         {
>             "firstName": "Jason",
>             "lastName": "Hunter",
>             "email": "bbbb"
>         },
>         {
>             "firstName": "Elliotte",
>             "lastName": "Harold",
>             "email": "cccc"
>         }
>     ],
>     "authors": [
>         {
>             "firstName": "Isaac",
>             "lastName": "Asimov",
>             "genre": "sciencefiction"
>         },
>         {
>             "firstName": "Tad",
>             "lastName": "Williams",
>             "genre": "fantasy"
>         },
>         {
>             "firstName": "Frank",
>             "lastName": "Peretti",
>             "genre": "christianfiction"
>         }
>     ],
>     "musicians": [
>         {
>             "firstName": "Eric",
>             "lastName": "Clapton",
>             "instrument": "guitar"
>         },
>         {
>             "firstName": "Sergei",
>             "lastName": "Rachmaninoff",
>             "instrument": "piano"
>         }
>     ]
> }
>
> _______________________________________________________________________________________________
> Exception in thread "main" scala.MatchError: StringType (of class
> org.apache.spark.sql.types.StringType$)
>         at
> org.apache.spark.sql.json.InferSchema$.apply(InferSchema.scala:58)
>         at
>
> org.apache.spark.sql.json.JSONRelation$$anonfun$schema$1.apply(JSONRelation.scala:139)
>
> _______________________________________________________________________________________________
> why????
>
>
>
> --
> View this message in context:
> http://apache-spark-user-list.1001560.n3.nabble.com/sql-Exception-in-thread-main-scala-MatchError-StringType-tp25868.html
> Sent from the Apache Spark User List mailing list archive at Nabble.com.
>
> ---------------------------------------------------------------------
> To unsubscribe, e-mail: user-unsubscribe@spark.apache.org
> For additional commands, e-mail: user-help@spark.apache.org
>
>


-- 
Best Regards

Jeff Zhang