You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@carbondata.apache.org by Liang Chen <ch...@apache.org> on 2023/04/06 20:00:21 UTC

Re: Error while creating table

The user group is  user@carbondata.apache.org

Regards

Xinyu Zeng <xz...@gmail.com> 于2022年4月25日周一 11:13写道:

> Hi,
>
> Since there is no user group, I am using this email list to ask
> questions. Please let me know if there are other platforms for users
> to discuss.
>
> I am new to CarbonData and am following the quick start guide. On
> Ubuntu 20.04, I installed spark-3.1.1-bin-hadoop2.7.tgz and
> apache-carbondata-2.3.0-bin-spark3.1.1-hadoop2.7.2.jar. By using
> SparkSQL CLI, I got an error message while following the quick start
> guide(at the end of this email). Could someone give me some help?
> Thanks!
>
> Shawn
>
> java.lang.IncompatibleClassChangeError: class
> org.apache.spark.sql.hive.CarbonRelation has interface
> org.apache.spark.sql.catalyst.plans.logical.LeafNode as super class
>         at java.base/java.lang.ClassLoader.defineClass1(Native Method)
>         at
> java.base/java.lang.ClassLoader.defineClass(ClassLoader.java:1017)
>         at
> java.base/java.security.SecureClassLoader.defineClass(SecureClassLoader.java:174)
>         at java.base/java.net
> .URLClassLoader.defineClass(URLClassLoader.java:555)
>         at java.base/java.net
> .URLClassLoader$1.run(URLClassLoader.java:458)
>         at java.base/java.net
> .URLClassLoader$1.run(URLClassLoader.java:452)
>         at java.base/java.security.AccessController.doPrivileged(Native
> Method)
>         at java.base/java.net
> .URLClassLoader.findClass(URLClassLoader.java:451)
>         at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:589)
>         at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:522)
>         at
> org.apache.spark.sql.hive.CarbonMetaStoreFactory$.createCarbonMetaStore(CarbonMetaStore.scala:189)
>         at org.apache.spark.sql.CarbonEnv.init(CarbonEnv.scala:137)
>         at org.apache.spark.sql.CarbonEnv$.getInstance(CarbonEnv.scala:176)
>         at
> org.apache.spark.sql.parser.CarbonExtensionSqlParser.parsePlan(CarbonExtensionSqlParser.scala:44)
>         at
> org.apache.spark.sql.SparkSession.$anonfun$sql$2(SparkSession.scala:616)
>         at
> org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
>         at
> org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:616)
>         at
> org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
>         at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:613)
>         at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:651)
>         at
> org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:67)
>         at
> org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:384)
>         at
> org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:504)
>         at
> org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1$adapted(SparkSQLCLIDriver.scala:498)
>         at scala.collection.Iterator.foreach(Iterator.scala:943)
>         at scala.collection.Iterator.foreach$(Iterator.scala:943)
>         at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
>         at scala.collection.IterableLike.foreach(IterableLike.scala:74)
>         at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
>         at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
>         at
> org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:498)
>         at
> org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:287)
>         at
> org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
>         at
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
> Method)
>         at
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>         at
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.base/java.lang.reflect.Method.invoke(Method.java:566)
>         at
> org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
>         at org.apache.spark.deploy.SparkSubmit.org
> $apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:955)
>         at
> org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
>         at
> org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
>         at
> org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
>         at
> org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1043)
>         at
> org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1052)
>         at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> java.lang.IncompatibleClassChangeError: class
> org.apache.spark.sql.hive.CarbonRelation has interface
> org.apache.spark.sql.catalyst.plans.logical.LeafNode as super class
>         at java.base/java.lang.ClassLoader.defineClass1(Native Method)
>         at
> java.base/java.lang.ClassLoader.defineClass(ClassLoader.java:1017)
>         at
> java.base/java.security.SecureClassLoader.defineClass(SecureClassLoader.java:174)
>         at java.base/java.net
> .URLClassLoader.defineClass(URLClassLoader.java:555)
>         at java.base/java.net
> .URLClassLoader$1.run(URLClassLoader.java:458)
>         at java.base/java.net
> .URLClassLoader$1.run(URLClassLoader.java:452)
>         at java.base/java.security.AccessController.doPrivileged(Native
> Method)
>         at java.base/java.net
> .URLClassLoader.findClass(URLClassLoader.java:451)
>         at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:589)
>         at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:522)
>         at
> org.apache.spark.sql.hive.CarbonMetaStoreFactory$.createCarbonMetaStore(CarbonMetaStore.scala:189)
>         at org.apache.spark.sql.CarbonEnv.init(CarbonEnv.scala:137)
>         at org.apache.spark.sql.CarbonEnv$.getInstance(CarbonEnv.scala:176)
>         at
> org.apache.spark.sql.parser.CarbonExtensionSqlParser.parsePlan(CarbonExtensionSqlParser.scala:44)
>         at
> org.apache.spark.sql.SparkSession.$anonfun$sql$2(SparkSession.scala:616)
>         at
> org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
>         at
> org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:616)
>         at
> org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
>         at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:613)
>         at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:651)
>         at
> org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:67)
>         at
> org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:384)
>         at
> org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:504)
>         at
> org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1$adapted(SparkSQLCLIDriver.scala:498)
>         at scala.collection.Iterator.foreach(Iterator.scala:943)
>         at scala.collection.Iterator.foreach$(Iterator.scala:943)
>         at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
>         at scala.collection.IterableLike.foreach(IterableLike.scala:74)
>         at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
>         at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
>         at
> org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:498)
>         at
> org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:287)
>         at
> org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
>         at
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
> Method)
>         at
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>         at
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.base/java.lang.reflect.Method.invoke(Method.java:566)
>         at
> org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
>         at org.apache.spark.deploy.SparkSubmit.org
> $apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:955)
>         at
> org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
>         at
> org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
>         at
> org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
>         at
> org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1043)
>         at
> org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1052)
>         at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>