You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user-zh@flink.apache.org by norman <no...@cisco.com.INVALID> on 2020/11/17 06:22:46 UTC
java.lang.IncompatibleClassChangeError: Implementing class (using
blink-planner)
Issue when integrate with hive 2.1.1
Exception in thread "main" java.lang.IncompatibleClassChangeError:
Implementing class
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:338)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at
org.apache.flink.table.planner.delegation.PlannerBase.<init>(PlannerBase.scala:112)
at
org.apache.flink.table.planner.delegation.StreamPlanner.<init>(StreamPlanner.scala:48)
at
org.apache.flink.table.planner.delegation.BlinkPlannerFactory.create(BlinkPlannerFactory.java:50)
at
org.apache.flink.table.api.bridge.scala.internal.StreamTableEnvironmentImpl$.create(StreamTableEnvironmentImpl.scala:289)
at
org.apache.flink.table.api.bridge.scala.StreamTableEnvironment$.create(StreamTableEnvironment.scala:462)
code is straighforward:
val bs =
EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build()
val tEnv = StreamTableEnvironment.create(env, bs)
tEnv.registerCatalog(catalog, hive)
tEnv.useCatalog(catalog)
tEnv.executeSql(
"""SET table.sql-dialect=hive;
|CREATE TABLE wap_nohe_2 (
| user_id STRING,
| order_amount DOUBLE
|) PARTITIONED BY (dt STRING, hr STRING) STORED AS parquet
TBLPROPERTIES (
| 'partition.time-extractor.timestamp-pattern'='$dt $hr:00:00',
| 'sink.partition-commit.trigger'='partition-time',
| 'sink.partition-commit.delay'='1 h',
| 'sink.partition-commit.policy.kind'='metastore,success-file'
|);
|""".stripMargin)
build.sbt is following:
"org.apache.flink" %% "flink-table-planner-blink" % flinkVersion %
"provided",
//"org.apache.flink" %% "flink-table-runtime-blink" % flinkVersion %
"provided",
"org.apache.flink" %% "flink-table-api-scala-bridge" % flinkVersion %
"provided",
//"org.apache.flink" %% "flink-table-api-java-bridge" % flinkVersion %
"provided",
"org.apache.flink" %% "flink-connector-jdbc" % flinkVersion % "provided",
"org.apache.flink" %% "flink-connector-hive" % flinkVersion % "provided",
"org.apache.hive" % "hive-exec" % hiveVersion,
"org.postgresql" % "postgresql" % "42.2.18",
"org.apache.flink" %% "flink-clients" % flinkVersion % "provided",
"org.apache.flink" %% "flink-scala" % flinkVersion % "provided",
"org.apache.flink" %% "flink-streaming-scala" % flinkVersion % "provided",
"org.apache.flink" %% "flink-connector-kafka" % flinkVersion % "provided",
--
Sent from: http://apache-flink.147419.n8.nabble.com/