You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user-zh@flink.apache.org by ghostviper <ch...@gmail.com> on 2020/11/16 09:02:48 UTC

pyflink利用sql ddl连接hbase-1.4.x出错Configuring the input format (null) failed: Cannot create connection to HBase

*环境配置如下:*
hbase-1.4.13
flink-1.11.1
python-3.6.1
pyflink-1.0

*已做配置如下:*
1.hadoop classpath下已经加入hbase路径 (:/opt/hbase/hbase-1.4.13/lib/*)
2.程序ddl配置如下:

source_ddl = """CREATE TABLE MySourceTable (
hbase_rowkey_name varchar, cf1 ROW<word varchar>) WITH (
        'connector.type' = 'hbase',
        'connector.version' = '1.4.3',  
        'connector.table-name' = 'flink-test',
        'connector.zookeeper.quorum' =
'es-zk-hadoop1:2181,es-zk-hadoop2:2181,es-zk-hadoop3:2181',
        'connector.zookeeper.znode.parent' = '/hbase')
"""

sink_ddl = """CREATE TABLE MySinkTable (
hbase_rowkey_name varchar, cf1 ROW<cnt bigint>) WITH (
        'connector.type' = 'hbase',
        'connector.version' = '1.4.3',  
        'connector.table-name' = 'flink-test-result',
        'connector.zookeeper.quorum' =
'es-zk-hadoop1:2181,es-zk-hadoop2:2181,es-zk-hadoop3:2181',
        'connector.zookeeper.znode.parent' = '/hbase')
"""
3.zookeeper无鉴权
4.hive能关联访问hbase
5.hbase shell命令能正确执行
6.hbase 集群状态正常
7.hbase lib目录下jar包如下:
./hbase-common-1.4.3.jar
./flink-connector-hbase_2.11-1.11.1.jar
./hbase-client-1.4.3.jar
./hbase-protocol-1.4.3.jar


*出错信息如下:*
Traceback (most recent call last):
  File "read_hbase.py", line 46, in <module>
    st_env.execute("7-read_and_hbase")
  File
"/home/chenxiaoyun/.pyenv/versions/3.6.1/lib/python3.6/site-packages/pyflink/table/table_environment.py",
line 1057, in execute
    return JobExecutionResult(self._j_tenv.execute(job_name))
  File
"/home/chenxiaoyun/.pyenv/versions/3.6.1/lib/python3.6/site-packages/py4j/java_gateway.py",
line 1286, in __call__
    answer, self.gateway_client, self.target_id, self.name)
  File
"/home/chenxiaoyun/.pyenv/versions/3.6.1/lib/python3.6/site-packages/pyflink/util/exceptions.py",
line 147, in deco
    return f(*a, **kw)
  File
"/home/chenxiaoyun/.pyenv/versions/3.6.1/lib/python3.6/site-packages/py4j/protocol.py",
line 328, in get_return_value
    format(target_id, ".", name), value)
py4j.protocol.Py4JJavaError: An error occurred while calling o7.execute.
: org.apache.flink.util.FlinkException: Failed to execute job
'7-read_and_hbase'.
	at
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1823)
	at
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.execute(StreamExecutionEnvironment.java:1713)
	at
org.apache.flink.streaming.api.environment.LocalStreamEnvironment.execute(LocalStreamEnvironment.java:74)
	at
org.apache.flink.table.planner.delegation.ExecutorBase.execute(ExecutorBase.java:52)
	at
org.apache.flink.table.api.internal.TableEnvironmentImpl.execute(TableEnvironmentImpl.java:1214)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at
org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
	at
org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
	at org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
	at
org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
	at
org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
	at
org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.flink.runtime.client.JobSubmissionException: Failed to
submit job.
	at
org.apache.flink.runtime.dispatcher.Dispatcher.lambda$internalSubmitJob$3(Dispatcher.java:344)
	at
java.util.concurrent.CompletableFuture.uniHandle(CompletableFuture.java:822)
	at
java.util.concurrent.CompletableFuture$UniHandle.tryFire(CompletableFuture.java:797)
	at
java.util.concurrent.CompletableFuture$Completion.run(CompletableFuture.java:442)
	at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:40)
	at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:44)
	at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
	at
akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
	at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
	at
akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
Caused by: org.apache.flink.runtime.client.JobExecutionException: Could not
instantiate JobManager.
	at
org.apache.flink.runtime.dispatcher.Dispatcher.lambda$createJobManagerRunner$6(Dispatcher.java:398)
	at
java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1590)
	... 6 more
Caused by: org.apache.flink.runtime.client.JobExecutionException: Cannot
initialize task 'Source: HBaseTableSource[schema=[hbase_rowkey_name, cf1],
projectFields=[1]] ->
SourceConversion(table=[default_catalog.default_database.MySourceTable,
source: [HBaseTableSource[schema=[hbase_rowkey_name, cf1],
projectFields=[1]]]], fields=[cf1]) -> Calc(select=[cf1.word AS cf1$word, 1
AS $f1])': Configuring the input format (null) failed: Cannot create
connection to HBase.
	at
org.apache.flink.runtime.executiongraph.ExecutionGraphBuilder.buildGraph(ExecutionGraphBuilder.java:216)
	at
org.apache.flink.runtime.scheduler.SchedulerBase.createExecutionGraph(SchedulerBase.java:269)
	at
org.apache.flink.runtime.scheduler.SchedulerBase.createAndRestoreExecutionGraph(SchedulerBase.java:242)
	at
org.apache.flink.runtime.scheduler.SchedulerBase.<init>(SchedulerBase.java:229)
	at
org.apache.flink.runtime.scheduler.DefaultScheduler.<init>(DefaultScheduler.java:119)
	at
org.apache.flink.runtime.scheduler.DefaultSchedulerFactory.createInstance(DefaultSchedulerFactory.java:103)
	at
org.apache.flink.runtime.jobmaster.JobMaster.createScheduler(JobMaster.java:284)
	at org.apache.flink.runtime.jobmaster.JobMaster.<init>(JobMaster.java:272)
	at
org.apache.flink.runtime.jobmaster.factories.DefaultJobMasterServiceFactory.createJobMasterService(DefaultJobMasterServiceFactory.java:98)
	at
org.apache.flink.runtime.jobmaster.factories.DefaultJobMasterServiceFactory.createJobMasterService(DefaultJobMasterServiceFactory.java:40)
	at
org.apache.flink.runtime.jobmaster.JobManagerRunnerImpl.<init>(JobManagerRunnerImpl.java:140)
	at
org.apache.flink.runtime.dispatcher.DefaultJobManagerRunnerFactory.createJobManagerRunner(DefaultJobManagerRunnerFactory.java:84)
	at
org.apache.flink.runtime.dispatcher.Dispatcher.lambda$createJobManagerRunner$6(Dispatcher.java:388)
	... 7 more
Caused by: java.lang.Exception: Configuring the input format (null) failed:
Cannot create connection to HBase.
	at
org.apache.flink.runtime.jobgraph.InputOutputFormatVertex.initializeOnMaster(InputOutputFormatVertex.java:79)
	at
org.apache.flink.runtime.executiongraph.ExecutionGraphBuilder.buildGraph(ExecutionGraphBuilder.java:212)
	... 19 more
Caused by: java.lang.RuntimeException: Cannot create connection to HBase.
	at
org.apache.flink.connector.hbase.source.HBaseRowInputFormat.connectToTable(HBaseRowInputFormat.java:99)
	at
org.apache.flink.connector.hbase.source.HBaseRowInputFormat.configure(HBaseRowInputFormat.java:69)
	at
org.apache.flink.runtime.jobgraph.InputOutputFormatVertex.initializeOnMaster(InputOutputFormatVertex.java:76)
	... 20 more
Caused by: java.io.IOException: java.lang.reflect.InvocationTargetException
	at
org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:240)
	at
org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:218)
	at
org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:119)
	at
org.apache.flink.connector.hbase.source.HBaseRowInputFormat.connectToTable(HBaseRowInputFormat.java:92)
	... 22 more
Caused by: java.lang.reflect.InvocationTargetException
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at
org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:238)
	... 25 more
Caused by: java.lang.NoClassDefFoundError: io/netty/channel/EventLoopGroup
	at java.lang.Class.forName0(Native Method)
	at java.lang.Class.forName(Class.java:348)
	at
org.apache.hadoop.conf.Configuration.getClassByNameOrNull(Configuration.java:2306)
	at
org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2271)
	at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2367)
	at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2393)
	at
org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.<init>(ConnectionManager.java:714)
	... 30 more
Caused by: java.lang.ClassNotFoundException: io.netty.channel.EventLoopGroup
	at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
	at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
	... 37 more




--
Sent from: http://apache-flink.147419.n8.nabble.com/

Re: pyflink利用sql ddl连接hbase-1.4.x出错Configuring the input format (null) failed: Cannot create connection to HBase

Posted by Wei Zhong <we...@gmail.com>.
Hi 你好,

看root cause是  io.netty.channel.EventLoopGroup 这个类找不到,能否检查一下classpath里是否包含netty的jar包,亦或相关jar包中是否shade了netty库?

> 在 2020年11月16日,17:02,ghostviper <ch...@gmail.com> 写道:
> 
> *环境配置如下:*
> hbase-1.4.13
> flink-1.11.1
> python-3.6.1
> pyflink-1.0
> 
> *已做配置如下:*
> 1.hadoop classpath下已经加入hbase路径 (:/opt/hbase/hbase-1.4.13/lib/*)
> 2.程序ddl配置如下:
> 
> source_ddl = """CREATE TABLE MySourceTable (
> hbase_rowkey_name varchar, cf1 ROW<word varchar>) WITH (
>        'connector.type' = 'hbase',
>        'connector.version' = '1.4.3',  
>        'connector.table-name' = 'flink-test',
>        'connector.zookeeper.quorum' =
> 'es-zk-hadoop1:2181,es-zk-hadoop2:2181,es-zk-hadoop3:2181',
>        'connector.zookeeper.znode.parent' = '/hbase')
> """
> 
> sink_ddl = """CREATE TABLE MySinkTable (
> hbase_rowkey_name varchar, cf1 ROW<cnt bigint>) WITH (
>        'connector.type' = 'hbase',
>        'connector.version' = '1.4.3',  
>        'connector.table-name' = 'flink-test-result',
>        'connector.zookeeper.quorum' =
> 'es-zk-hadoop1:2181,es-zk-hadoop2:2181,es-zk-hadoop3:2181',
>        'connector.zookeeper.znode.parent' = '/hbase')
> """
> 3.zookeeper无鉴权
> 4.hive能关联访问hbase
> 5.hbase shell命令能正确执行
> 6.hbase 集群状态正常
> 7.hbase lib目录下jar包如下:
> ./hbase-common-1.4.3.jar
> ./flink-connector-hbase_2.11-1.11.1.jar
> ./hbase-client-1.4.3.jar
> ./hbase-protocol-1.4.3.jar
> 
> 
> *出错信息如下:*
> Traceback (most recent call last):
>  File "read_hbase.py", line 46, in <module>
>    st_env.execute("7-read_and_hbase")
>  File
> "/home/chenxiaoyun/.pyenv/versions/3.6.1/lib/python3.6/site-packages/pyflink/table/table_environment.py",
> line 1057, in execute
>    return JobExecutionResult(self._j_tenv.execute(job_name))
>  File
> "/home/chenxiaoyun/.pyenv/versions/3.6.1/lib/python3.6/site-packages/py4j/java_gateway.py",
> line 1286, in __call__
>    answer, self.gateway_client, self.target_id, self.name)
>  File
> "/home/chenxiaoyun/.pyenv/versions/3.6.1/lib/python3.6/site-packages/pyflink/util/exceptions.py",
> line 147, in deco
>    return f(*a, **kw)
>  File
> "/home/chenxiaoyun/.pyenv/versions/3.6.1/lib/python3.6/site-packages/py4j/protocol.py",
> line 328, in get_return_value
>    format(target_id, ".", name), value)
> py4j.protocol.Py4JJavaError: An error occurred while calling o7.execute.
> : org.apache.flink.util.FlinkException: Failed to execute job
> '7-read_and_hbase'.
> 	at
> org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1823)
> 	at
> org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.execute(StreamExecutionEnvironment.java:1713)
> 	at
> org.apache.flink.streaming.api.environment.LocalStreamEnvironment.execute(LocalStreamEnvironment.java:74)
> 	at
> org.apache.flink.table.planner.delegation.ExecutorBase.execute(ExecutorBase.java:52)
> 	at
> org.apache.flink.table.api.internal.TableEnvironmentImpl.execute(TableEnvironmentImpl.java:1214)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> 	at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:498)
> 	at
> org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
> 	at
> org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
> 	at org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
> 	at
> org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
> 	at
> org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
> 	at
> org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
> 	at java.lang.Thread.run(Thread.java:748)
> Caused by: org.apache.flink.runtime.client.JobSubmissionException: Failed to
> submit job.
> 	at
> org.apache.flink.runtime.dispatcher.Dispatcher.lambda$internalSubmitJob$3(Dispatcher.java:344)
> 	at
> java.util.concurrent.CompletableFuture.uniHandle(CompletableFuture.java:822)
> 	at
> java.util.concurrent.CompletableFuture$UniHandle.tryFire(CompletableFuture.java:797)
> 	at
> java.util.concurrent.CompletableFuture$Completion.run(CompletableFuture.java:442)
> 	at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:40)
> 	at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:44)
> 	at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> 	at
> akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> 	at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> 	at
> akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> Caused by: org.apache.flink.runtime.client.JobExecutionException: Could not
> instantiate JobManager.
> 	at
> org.apache.flink.runtime.dispatcher.Dispatcher.lambda$createJobManagerRunner$6(Dispatcher.java:398)
> 	at
> java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1590)
> 	... 6 more
> Caused by: org.apache.flink.runtime.client.JobExecutionException: Cannot
> initialize task 'Source: HBaseTableSource[schema=[hbase_rowkey_name, cf1],
> projectFields=[1]] ->
> SourceConversion(table=[default_catalog.default_database.MySourceTable,
> source: [HBaseTableSource[schema=[hbase_rowkey_name, cf1],
> projectFields=[1]]]], fields=[cf1]) -> Calc(select=[cf1.word AS cf1$word, 1
> AS $f1])': Configuring the input format (null) failed: Cannot create
> connection to HBase.
> 	at
> org.apache.flink.runtime.executiongraph.ExecutionGraphBuilder.buildGraph(ExecutionGraphBuilder.java:216)
> 	at
> org.apache.flink.runtime.scheduler.SchedulerBase.createExecutionGraph(SchedulerBase.java:269)
> 	at
> org.apache.flink.runtime.scheduler.SchedulerBase.createAndRestoreExecutionGraph(SchedulerBase.java:242)
> 	at
> org.apache.flink.runtime.scheduler.SchedulerBase.<init>(SchedulerBase.java:229)
> 	at
> org.apache.flink.runtime.scheduler.DefaultScheduler.<init>(DefaultScheduler.java:119)
> 	at
> org.apache.flink.runtime.scheduler.DefaultSchedulerFactory.createInstance(DefaultSchedulerFactory.java:103)
> 	at
> org.apache.flink.runtime.jobmaster.JobMaster.createScheduler(JobMaster.java:284)
> 	at org.apache.flink.runtime.jobmaster.JobMaster.<init>(JobMaster.java:272)
> 	at
> org.apache.flink.runtime.jobmaster.factories.DefaultJobMasterServiceFactory.createJobMasterService(DefaultJobMasterServiceFactory.java:98)
> 	at
> org.apache.flink.runtime.jobmaster.factories.DefaultJobMasterServiceFactory.createJobMasterService(DefaultJobMasterServiceFactory.java:40)
> 	at
> org.apache.flink.runtime.jobmaster.JobManagerRunnerImpl.<init>(JobManagerRunnerImpl.java:140)
> 	at
> org.apache.flink.runtime.dispatcher.DefaultJobManagerRunnerFactory.createJobManagerRunner(DefaultJobManagerRunnerFactory.java:84)
> 	at
> org.apache.flink.runtime.dispatcher.Dispatcher.lambda$createJobManagerRunner$6(Dispatcher.java:388)
> 	... 7 more
> Caused by: java.lang.Exception: Configuring the input format (null) failed:
> Cannot create connection to HBase.
> 	at
> org.apache.flink.runtime.jobgraph.InputOutputFormatVertex.initializeOnMaster(InputOutputFormatVertex.java:79)
> 	at
> org.apache.flink.runtime.executiongraph.ExecutionGraphBuilder.buildGraph(ExecutionGraphBuilder.java:212)
> 	... 19 more
> Caused by: java.lang.RuntimeException: Cannot create connection to HBase.
> 	at
> org.apache.flink.connector.hbase.source.HBaseRowInputFormat.connectToTable(HBaseRowInputFormat.java:99)
> 	at
> org.apache.flink.connector.hbase.source.HBaseRowInputFormat.configure(HBaseRowInputFormat.java:69)
> 	at
> org.apache.flink.runtime.jobgraph.InputOutputFormatVertex.initializeOnMaster(InputOutputFormatVertex.java:76)
> 	... 20 more
> Caused by: java.io.IOException: java.lang.reflect.InvocationTargetException
> 	at
> org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:240)
> 	at
> org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:218)
> 	at
> org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:119)
> 	at
> org.apache.flink.connector.hbase.source.HBaseRowInputFormat.connectToTable(HBaseRowInputFormat.java:92)
> 	... 22 more
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> 	at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> 	at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
> 	at
> org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:238)
> 	... 25 more
> Caused by: java.lang.NoClassDefFoundError: io/netty/channel/EventLoopGroup
> 	at java.lang.Class.forName0(Native Method)
> 	at java.lang.Class.forName(Class.java:348)
> 	at
> org.apache.hadoop.conf.Configuration.getClassByNameOrNull(Configuration.java:2306)
> 	at
> org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2271)
> 	at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2367)
> 	at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2393)
> 	at
> org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.<init>(ConnectionManager.java:714)
> 	... 30 more
> Caused by: java.lang.ClassNotFoundException: io.netty.channel.EventLoopGroup
> 	at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
> 	at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
> 	at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349)
> 	at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
> 	... 37 more
> 
> 
> 
> 
> --
> Sent from: http://apache-flink.147419.n8.nabble.com/