You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@flink.apache.org by "zhangsan (Jira)" <ji...@apache.org> on 2022/05/13 10:49:00 UTC
[jira] [Updated] (FLINK-27604) flink sql read hive on hbase throw NPE
[ https://issues.apache.org/jira/browse/FLINK-27604?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
zhangsan updated FLINK-27604:
-----------------------------
Description:
I have some table data on hbase, I usually read the hbase data by loading external tables through hive, I want to read the data through flink sql by reading hive tables, when I try with sql-client I get an error. I don't know if there is any way to solve this problem, but I can read the data using the spark engine.
----
Environment:
flink:1.13.6
hive:2.1.1-cdh6.2.0
hbase:2.1.0-cdh6.2.0
flinksql Execution tools:flink sql client
sql submit mode:yarn-per-job
----
flink lib directory
antlr-runtime-3.5.2.jar
flink-csv-1.13.6.jar
flink-dist_2.11-1.13.6.jar
flink-json-1.13.6.jar
flink-shaded-zookeeper-3.4.14.jar
flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar
flink-table_2.11-1.13.6.jar
flink-table-blink_2.11-1.13.6.jar
guava-14.0.1.jar
hadoop-mapreduce-client-core-3.0.0-cdh6.2.0.jar
hbase-client-2.1.0-cdh6.2.0.jar
hbase-common-2.1.0-cdh6.2.0.jar
hbase-protocol-2.1.0-cdh6.2.0.jar
hbase-server-2.1.0-cdh6.2.0.jar
hive-exec-2.1.1-cdh6.2.0.jar
hive-hbase-handler-2.1.1-cdh6.2.0.jar
htrace-core4-4.1.0-incubating.jar
log4j-1.2-api-2.17.1.jar
log4j-api-2.17.1.jar
log4j-core-2.17.1.jar
log4j-slf4j-impl-2.17.1.jar
protobuf-java-2.5.0.jar
----
step:
hive create table stament:
{code:java}
CREATE EXTERNAL TABLE `ods`.`student`(
`row_key` string,
`name` string,
`age` int,
`addr` string
)
ROW FORMAT SERDE
'org.apache.hadoop.hive.hbase.HBaseSerDe'
STORED BY
'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES (
'hbase.columns.mapping'=':key,FINAL:NAME,FINAL:AGE,FINAL:ADDR,'serialization.format'='1')
TBLPROPERTIES (
'hbase.table.name'='ODS:STUDENT'); {code}
catalog:hive catalog
sql: select * from ods.student;
----
error:
{code:java}
org.apache.flink.table.client.gateway.SqlExecutionException: Could not execute SQL statement. at org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:215) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.executeQuery(LocalExecutor.java:235) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.callSelect(CliClient.java:479) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.callOperation(CliClient.java:412) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.lambda$executeStatement$0(CliClient.java:327) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at java.util.Optional.ifPresent(Optional.java:159) ~[?:1.8.0_191] at org.apache.flink.table.client.cli.CliClient.executeStatement(CliClient.java:327) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.executeInteractive(CliClient.java:297) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.executeInInteractiveMode(CliClient.java:221) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.SqlClient.openCli(SqlClient.java:151) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.SqlClient.start(SqlClient.java:95) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.SqlClient.startClient(SqlClient.java:187) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.SqlClient.main(SqlClient.java:161) [flink-sql-client_2.11-1.13.6.jar:1.13.6]Caused by: org.apache.flink.connectors.hive.FlinkHiveException: Unable to instantiate the hadoop input format at org.apache.flink.connectors.hive.HiveSourceFileEnumerator.createMRSplits(HiveSourceFileEnumerator.java:100) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveSourceFileEnumerator.createInputSplits(HiveSourceFileEnumerator.java:71) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource.lambda$getDataStream$1(HiveTableSource.java:212) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveParallelismInference.logRunningTime(HiveParallelismInference.java:107) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveParallelismInference.infer(HiveParallelismInference.java:95) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource.getDataStream(HiveTableSource.java:207) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource$1.produceDataStream(HiveTableSource.java:123) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.common.CommonExecTableSourceScan.translateToPlanInternal(CommonExecTableSourceScan.java:96) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:134) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecEdge.translateToPlan(ExecEdge.java:247) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.java:114) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:134) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$1.apply(StreamPlanner.scala:70) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$1.apply(StreamPlanner.scala:69) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.Iterator$class.foreach(Iterator.scala:891) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractIterable.foreach(Iterable.scala:54) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractTraversable.map(Traversable.scala:104) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner.translateToPlan(StreamPlanner.scala:69) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:165) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1518) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeQueryOperation(TableEnvironmentImpl.java:791) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1225) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$executeOperation$3(LocalExecutor.java:213) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:90) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:213) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] ... 12 moreCaused by: java.lang.NullPointerException at java.lang.Class.forName0(Native Method) ~[?:1.8.0_191] at java.lang.Class.forName(Class.java:348) ~[?:1.8.0_191] at org.apache.flink.connectors.hive.HiveSourceFileEnumerator.createMRSplits(HiveSourceFileEnumerator.java:94) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveSourceFileEnumerator.createInputSplits(HiveSourceFileEnumerator.java:71) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource.lambda$getDataStream$1(HiveTableSource.java:212) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveParallelismInference.logRunningTime(HiveParallelismInference.java:107) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveParallelismInference.infer(HiveParallelismInference.java:95) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource.getDataStream(HiveTableSource.java:207) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource$1.produceDataStream(HiveTableSource.java:123) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.common.CommonExecTableSourceScan.translateToPlanInternal(CommonExecTableSourceScan.java:96) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:134) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecEdge.translateToPlan(ExecEdge.java:247) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.java:114) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:134) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$1.apply(StreamPlanner.scala:70) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$1.apply(StreamPlanner.scala:69) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.Iterator$class.foreach(Iterator.scala:891) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractIterable.foreach(Iterable.scala:54) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractTraversable.map(Traversable.scala:104) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner.translateToPlan(StreamPlanner.scala:69) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:165) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1518) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeQueryOperation(TableEnvironmentImpl.java:791) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1225) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$executeOperation$3(LocalExecutor.java:213) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:90) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:213) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] ... 12 more{code}
was:
I have some table data on hbase, I usually read the hbase data by loading external tables through hive, I want to read the data through flink sql by reading hive tables, when I try with sql-client I get an error. I don't know if there is any way to solve this problem, but I can read the data using the spark engine.
-----------------------------------------------------------------------------------------------------
Environment:
flink:1.13.6
hive:2.1.1-cdh6.2.0
hbase:2.1.0-cdh6.2.0
flinksql Execution tools:flink sql client
sql submit mode:yarn-per-job
------------------------------------------------------------------------------------------------------
flink lib directory
antlr-runtime-3.5.2.jar
flink-csv-1.13.6.jar
flink-dist_2.11-1.13.6.jar
flink-json-1.13.6.jar
flink-shaded-zookeeper-3.4.14.jar
flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar
flink-table_2.11-1.13.6.jar
flink-table-blink_2.11-1.13.6.jar
guava-14.0.1.jar
hadoop-mapreduce-client-core-3.0.0-cdh6.2.0.jar
hbase-client-2.1.0-cdh6.2.0.jar
hbase-common-2.1.0-cdh6.2.0.jar
hbase-protocol-2.1.0-cdh6.2.0.jar
hbase-server-2.1.0-cdh6.2.0.jar
hive-exec-2.1.1-cdh6.2.0.jar
hive-hbase-handler-2.1.1-cdh6.2.0.jar
htrace-core4-4.1.0-incubating.jar
log4j-1.2-api-2.17.1.jar
log4j-api-2.17.1.jar
log4j-core-2.17.1.jar
log4j-slf4j-impl-2.17.1.jar
protobuf-java-2.5.0.jar
---------------------------------------------------------------------------------------------
step:
hive create table stament:
{code:java}
CREATE EXTERNAL TABLE `ods`.`student`(
`row_key` string,
`name` string,
`age` int,
`addr` string
)
ROW FORMAT SERDE
'org.apache.hadoop.hive.hbase.HBaseSerDe'
STORED BY
'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES (
'hbase.columns.mapping'=':key,FINAL:NAME,FINAL:AGE,FINAL:ADDR,'serialization.format'='1')
TBLPROPERTIES (
'hbase.table.name'='ODS:STUDENT'); {code}
catalog:hive catalog
sql: select * from ods.student;
---------------------------------------------------
error:
{code:java}
org.apache.flink.table.client.gateway.SqlExecutionException: Could not execute SQL statement. at org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:215) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.executeQuery(LocalExecutor.java:235) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.callSelect(CliClient.java:479) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.callOperation(CliClient.java:412) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.lambda$executeStatement$0(CliClient.java:327) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at java.util.Optional.ifPresent(Optional.java:159) ~[?:1.8.0_191] at org.apache.flink.table.client.cli.CliClient.executeStatement(CliClient.java:327) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.executeInteractive(CliClient.java:297) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.executeInInteractiveMode(CliClient.java:221) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.SqlClient.openCli(SqlClient.java:151) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.SqlClient.start(SqlClient.java:95) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.SqlClient.startClient(SqlClient.java:187) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.SqlClient.main(SqlClient.java:161) [flink-sql-client_2.11-1.13.6.jar:1.13.6]Caused by: org.apache.flink.connectors.hive.FlinkHiveException: Unable to instantiate the hadoop input format at org.apache.flink.connectors.hive.HiveSourceFileEnumerator.createMRSplits(HiveSourceFileEnumerator.java:100) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveSourceFileEnumerator.createInputSplits(HiveSourceFileEnumerator.java:71) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource.lambda$getDataStream$1(HiveTableSource.java:212) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveParallelismInference.logRunningTime(HiveParallelismInference.java:107) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveParallelismInference.infer(HiveParallelismInference.java:95) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource.getDataStream(HiveTableSource.java:207) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource$1.produceDataStream(HiveTableSource.java:123) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.common.CommonExecTableSourceScan.translateToPlanInternal(CommonExecTableSourceScan.java:96) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:134) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecEdge.translateToPlan(ExecEdge.java:247) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.java:114) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:134) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$1.apply(StreamPlanner.scala:70) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$1.apply(StreamPlanner.scala:69) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.Iterator$class.foreach(Iterator.scala:891) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractIterable.foreach(Iterable.scala:54) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractTraversable.map(Traversable.scala:104) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner.translateToPlan(StreamPlanner.scala:69) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:165) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1518) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeQueryOperation(TableEnvironmentImpl.java:791) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1225) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$executeOperation$3(LocalExecutor.java:213) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:90) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:213) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] ... 12 moreCaused by: java.lang.NullPointerException at java.lang.Class.forName0(Native Method) ~[?:1.8.0_191] at java.lang.Class.forName(Class.java:348) ~[?:1.8.0_191] at org.apache.flink.connectors.hive.HiveSourceFileEnumerator.createMRSplits(HiveSourceFileEnumerator.java:94) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveSourceFileEnumerator.createInputSplits(HiveSourceFileEnumerator.java:71) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource.lambda$getDataStream$1(HiveTableSource.java:212) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveParallelismInference.logRunningTime(HiveParallelismInference.java:107) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveParallelismInference.infer(HiveParallelismInference.java:95) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource.getDataStream(HiveTableSource.java:207) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource$1.produceDataStream(HiveTableSource.java:123) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.common.CommonExecTableSourceScan.translateToPlanInternal(CommonExecTableSourceScan.java:96) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:134) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecEdge.translateToPlan(ExecEdge.java:247) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.java:114) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:134) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$1.apply(StreamPlanner.scala:70) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$1.apply(StreamPlanner.scala:69) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.Iterator$class.foreach(Iterator.scala:891) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractIterable.foreach(Iterable.scala:54) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractTraversable.map(Traversable.scala:104) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner.translateToPlan(StreamPlanner.scala:69) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:165) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1518) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeQueryOperation(TableEnvironmentImpl.java:791) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1225) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$executeOperation$3(LocalExecutor.java:213) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:90) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:213) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] ... 12 more {code}
> flink sql read hive on hbase throw NPE
> --------------------------------------
>
> Key: FLINK-27604
> URL: https://issues.apache.org/jira/browse/FLINK-27604
> Project: Flink
> Issue Type: Bug
> Components: Connectors / Hive
> Affects Versions: 1.13.6
> Reporter: zhangsan
> Priority: Major
>
> I have some table data on hbase, I usually read the hbase data by loading external tables through hive, I want to read the data through flink sql by reading hive tables, when I try with sql-client I get an error. I don't know if there is any way to solve this problem, but I can read the data using the spark engine.
> ----
> Environment:
> flink:1.13.6
> hive:2.1.1-cdh6.2.0
> hbase:2.1.0-cdh6.2.0
> flinksql Execution tools:flink sql client
> sql submit mode:yarn-per-job
> ----
> flink lib directory
> antlr-runtime-3.5.2.jar
> flink-csv-1.13.6.jar
> flink-dist_2.11-1.13.6.jar
> flink-json-1.13.6.jar
> flink-shaded-zookeeper-3.4.14.jar
> flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar
> flink-table_2.11-1.13.6.jar
> flink-table-blink_2.11-1.13.6.jar
> guava-14.0.1.jar
> hadoop-mapreduce-client-core-3.0.0-cdh6.2.0.jar
> hbase-client-2.1.0-cdh6.2.0.jar
> hbase-common-2.1.0-cdh6.2.0.jar
> hbase-protocol-2.1.0-cdh6.2.0.jar
> hbase-server-2.1.0-cdh6.2.0.jar
> hive-exec-2.1.1-cdh6.2.0.jar
> hive-hbase-handler-2.1.1-cdh6.2.0.jar
> htrace-core4-4.1.0-incubating.jar
> log4j-1.2-api-2.17.1.jar
> log4j-api-2.17.1.jar
> log4j-core-2.17.1.jar
> log4j-slf4j-impl-2.17.1.jar
> protobuf-java-2.5.0.jar
> ----
> step:
> hive create table stament:
> {code:java}
> CREATE EXTERNAL TABLE `ods`.`student`(
> `row_key` string,
> `name` string,
> `age` int,
> `addr` string
> )
> ROW FORMAT SERDE
> 'org.apache.hadoop.hive.hbase.HBaseSerDe'
> STORED BY
> 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
> WITH SERDEPROPERTIES (
> 'hbase.columns.mapping'=':key,FINAL:NAME,FINAL:AGE,FINAL:ADDR,'serialization.format'='1')
> TBLPROPERTIES (
> 'hbase.table.name'='ODS:STUDENT'); {code}
> catalog:hive catalog
> sql: select * from ods.student;
> ----
> error:
> {code:java}
> org.apache.flink.table.client.gateway.SqlExecutionException: Could not execute SQL statement. at org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:215) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.executeQuery(LocalExecutor.java:235) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.callSelect(CliClient.java:479) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.callOperation(CliClient.java:412) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.lambda$executeStatement$0(CliClient.java:327) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at java.util.Optional.ifPresent(Optional.java:159) ~[?:1.8.0_191] at org.apache.flink.table.client.cli.CliClient.executeStatement(CliClient.java:327) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.executeInteractive(CliClient.java:297) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.cli.CliClient.executeInInteractiveMode(CliClient.java:221) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.SqlClient.openCli(SqlClient.java:151) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.SqlClient.start(SqlClient.java:95) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.SqlClient.startClient(SqlClient.java:187) [flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.SqlClient.main(SqlClient.java:161) [flink-sql-client_2.11-1.13.6.jar:1.13.6]Caused by: org.apache.flink.connectors.hive.FlinkHiveException: Unable to instantiate the hadoop input format at org.apache.flink.connectors.hive.HiveSourceFileEnumerator.createMRSplits(HiveSourceFileEnumerator.java:100) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveSourceFileEnumerator.createInputSplits(HiveSourceFileEnumerator.java:71) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource.lambda$getDataStream$1(HiveTableSource.java:212) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveParallelismInference.logRunningTime(HiveParallelismInference.java:107) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveParallelismInference.infer(HiveParallelismInference.java:95) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource.getDataStream(HiveTableSource.java:207) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource$1.produceDataStream(HiveTableSource.java:123) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.common.CommonExecTableSourceScan.translateToPlanInternal(CommonExecTableSourceScan.java:96) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:134) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecEdge.translateToPlan(ExecEdge.java:247) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.java:114) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:134) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$1.apply(StreamPlanner.scala:70) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$1.apply(StreamPlanner.scala:69) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.Iterator$class.foreach(Iterator.scala:891) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractIterable.foreach(Iterable.scala:54) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractTraversable.map(Traversable.scala:104) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner.translateToPlan(StreamPlanner.scala:69) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:165) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1518) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeQueryOperation(TableEnvironmentImpl.java:791) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1225) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$executeOperation$3(LocalExecutor.java:213) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:90) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:213) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] ... 12 moreCaused by: java.lang.NullPointerException at java.lang.Class.forName0(Native Method) ~[?:1.8.0_191] at java.lang.Class.forName(Class.java:348) ~[?:1.8.0_191] at org.apache.flink.connectors.hive.HiveSourceFileEnumerator.createMRSplits(HiveSourceFileEnumerator.java:94) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveSourceFileEnumerator.createInputSplits(HiveSourceFileEnumerator.java:71) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource.lambda$getDataStream$1(HiveTableSource.java:212) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveParallelismInference.logRunningTime(HiveParallelismInference.java:107) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveParallelismInference.infer(HiveParallelismInference.java:95) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource.getDataStream(HiveTableSource.java:207) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.connectors.hive.HiveTableSource$1.produceDataStream(HiveTableSource.java:123) ~[flink-sql-connector-hive-2.2.0_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.common.CommonExecTableSourceScan.translateToPlanInternal(CommonExecTableSourceScan.java:96) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:134) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecEdge.translateToPlan(ExecEdge.java:247) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.java:114) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:134) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$1.apply(StreamPlanner.scala:70) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$1.apply(StreamPlanner.scala:69) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.Iterator$class.foreach(Iterator.scala:891) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractIterable.foreach(Iterable.scala:54) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at scala.collection.AbstractTraversable.map(Traversable.scala:104) ~[flink-dist_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.StreamPlanner.translateToPlan(StreamPlanner.scala:69) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:165) ~[flink-table-blink_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1518) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeQueryOperation(TableEnvironmentImpl.java:791) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1225) ~[flink-table_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$executeOperation$3(LocalExecutor.java:213) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:90) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] at org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:213) ~[flink-sql-client_2.11-1.13.6.jar:1.13.6] ... 12 more{code}
--
This message was sent by Atlassian Jira
(v8.20.7#820007)