You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user-zh@flink.apache.org by myfjdthink <jf...@kalengo.com> on 2020/09/17 06:50:07 UTC

python udf 提交到本地节点执行报错

操作系统 

Mac OS

flink --version 

Version: 1.11.1, Commit ID: 7eb514a


代码

from pyflink.table import StreamTableEnvironment, EnvironmentSettings,
DataTypes
from pyflink.table.udf import udf

# 1. create a TableEnvironment
env_settings =
EnvironmentSettings.new_instance().in_streaming_mode().use_blink_planner().build()
table_env = StreamTableEnvironment.create(environment_settings=env_settings)

# 2. create source Table
table_env.execute_sql("""
    CREATE TABLE datagen (
        id BIGINT,
        data STRING
    ) WITH (
        'connector' = 'datagen',
        'fields.id.kind' = 'sequence',
        'fields.id.start' = '1',
        'fields.id.end' = '20'
    )
""")

# 3. create sink Table
table_env.execute_sql("""
    CREATE TABLE print (
        id BIGINT,
        data STRING
    ) WITH (
        'connector' = 'print'
    )
""")

@udf(input_types=[DataTypes.BIGINT(), DataTypes.BIGINT()],
result_type=DataTypes.BIGINT(), udf_type="pandas")
def add(i, j):
    return i + j


table_env.get_config().get_configuration().set_string("taskmanager.memory.task.off-heap.size",
'80m')
table_env.register_function("add", add)
table_env.execute_sql("""INSERT INTO print
SELECT add(id, 1), data FROM datagen
""").get_job_client().get_job_execution_result().result()


执行执行 py 文件是可以正常运行的

用以下命令提交到 flink 上会报错

flink run -py src/etl/hello_world.py

报错信息


flink run -py src/etl/hello_world.py
WARNING: An illegal reflective access operation has occurred
WARNING: Illegal reflective access by
org.apache.flink.api.java.ClosureCleaner
(file:/Users/nick/flink-1.11.1/lib/flink-dist_2.12-1.11.1.jar) to field
java.lang.String.value
WARNING: Please consider reporting this to the maintainers of
org.apache.flink.api.java.ClosureCleaner
WARNING: Use --illegal-access=warn to enable warnings of further illegal
reflective access operations
WARNING: All illegal access operations will be denied in a future release
Job has been submitted with JobID f38ed31397d5bd6af813bd3048d49048
Traceback (most recent call last):
  File "src/etl/hello_world.py", line 41, in <module>
    """).get_job_client().get_job_execution_result().result()
  File
"/Users/nick/flink-1.11.1/opt/python/pyflink.zip/pyflink/common/completable_future.py",
line 78, in result
  File
"/Users/nick/flink-1.11.1/opt/python/py4j-0.10.8.1-src.zip/py4j/java_gateway.py",
line 1286, in __call__
  File
"/Users/nick/flink-1.11.1/opt/python/pyflink.zip/pyflink/util/exceptions.py",
line 147, in deco
  File
"/Users/nick/flink-1.11.1/opt/python/py4j-0.10.8.1-src.zip/py4j/protocol.py",
line 328, in get_return_value
py4j.protocol.Py4JJavaError: An error occurred while calling o71.get.
: java.util.concurrent.ExecutionException:
org.apache.flink.client.program.ProgramInvocationException: Job failed
(JobID: f38ed31397d5bd6af813bd3048d49048)
        at
java.base/java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:395)
        at
java.base/java.util.concurrent.CompletableFuture.get(CompletableFuture.java:2063)
        at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
Method)
        at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.base/java.lang.reflect.Method.invoke(Method.java:564)
        at
org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
        at
org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
        at
org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
        at
org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
        at
org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
        at
org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
        at java.base/java.lang.Thread.run(Thread.java:832)
Caused by: org.apache.flink.client.program.ProgramInvocationException: Job
failed (JobID: f38ed31397d5bd6af813bd3048d49048)
        at
org.apache.flink.client.deployment.ClusterClientJobClientAdapter.lambda$null$6(ClusterClientJobClientAdapter.java:116)
        at
java.base/java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:642)
        at
java.base/java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:506)
        at
java.base/java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:2137)
        at
org.apache.flink.client.program.rest.RestClusterClient.lambda$pollResourceAsync$22(RestClusterClient.java:602)
        at
java.base/java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:859)
        at
java.base/java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:837)
        at
java.base/java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:506)
        at
java.base/java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:2137)
        at
org.apache.flink.runtime.concurrent.FutureUtils.lambda$retryOperationWithDelay$8(FutureUtils.java:309)
        at
java.base/java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:859)
        at
java.base/java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:837)
        at
java.base/java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:506)
        at
java.base/java.util.concurrent.CompletableFuture.postFire(CompletableFuture.java:610)
        at
java.base/java.util.concurrent.CompletableFuture$UniCompose.tryFire(CompletableFuture.java:1159)
        at
java.base/java.util.concurrent.CompletableFuture$Completion.run(CompletableFuture.java:478)
        at
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:630)
        ... 1 more
Caused by: org.apache.flink.runtime.client.JobExecutionException: Job
execution failed.
        at
org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:147)
        at
org.apache.flink.client.deployment.ClusterClientJobClientAdapter.lambda$null$6(ClusterClientJobClientAdapter.java:114)
        ... 18 more
Caused by: org.apache.flink.runtime.JobException: Recovery is suppressed by
NoRestartBackoffTimeStrategy
        at
org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:116)
        at
org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:78)
        at
org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:192)
        at
org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:185)
        at
org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:179)
        at
org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:503)
        at
org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:386)
        at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
Method)
        at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.base/java.lang.reflect.Method.invoke(Method.java:564)
        at
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:284)
        at
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:199)
        at
org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:74)
        at
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:152)
        at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:26)
        at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:21)
        at scala.PartialFunction.applyOrElse(PartialFunction.scala:123)
        at scala.PartialFunction.applyOrElse$(PartialFunction.scala:122)
        at
akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:21)
        at
scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
        at
scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172)
        at
scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172)
        at akka.actor.Actor.aroundReceive(Actor.scala:517)
        at akka.actor.Actor.aroundReceive$(Actor.scala:515)
        at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:225)
        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:592)
        at akka.actor.ActorCell.invoke(ActorCell.scala:561)
        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:258)
        at akka.dispatch.Mailbox.run(Mailbox.scala:225)
        at akka.dispatch.Mailbox.exec(Mailbox.scala:235)
        at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
        at
akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
        at
akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
        at
akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
Caused by: java.lang.RuntimeException: Failed to create stage bundle
factory!
        at
org.apache.flink.python.AbstractPythonFunctionRunner.createStageBundleFactory(AbstractPythonFunctionRunner.java:197)
        at
org.apache.flink.python.AbstractPythonFunctionRunner.open(AbstractPythonFunctionRunner.java:164)
        at
org.apache.flink.table.runtime.runners.python.scalar.AbstractGeneralPythonScalarFunctionRunner.open(AbstractGeneralPythonScalarFunctionRunner.java:65)
        at
org.apache.flink.table.runtime.operators.python.AbstractStatelessFunctionOperator$ProjectUdfInputPythonScalarFunctionRunner.open(AbstractStatelessFunctionOperator.java:186)
        at
org.apache.flink.streaming.api.operators.python.AbstractPythonFunctionOperator.open(AbstractPythonFunctionOperator.java:143)
        at
org.apache.flink.table.runtime.operators.python.AbstractStatelessFunctionOperator.open(AbstractStatelessFunctionOperator.java:131)
        at
org.apache.flink.table.runtime.operators.python.scalar.AbstractPythonScalarFunctionOperator.open(AbstractPythonScalarFunctionOperator.java:88)
        at
org.apache.flink.table.runtime.operators.python.scalar.AbstractRowDataPythonScalarFunctionOperator.open(AbstractRowDataPythonScalarFunctionOperator.java:80)
        at
org.apache.flink.table.runtime.operators.python.scalar.RowDataPythonScalarFunctionOperator.open(RowDataPythonScalarFunctionOperator.java:64)
        at
org.apache.flink.streaming.runtime.tasks.OperatorChain.initializeStateAndOpenOperators(OperatorChain.java:291)
        at
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$beforeInvoke$0(StreamTask.java:473)
        at
org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.runThrowing(StreamTaskActionExecutor.java:92)
        at
org.apache.flink.streaming.runtime.tasks.StreamTask.beforeInvoke(StreamTask.java:469)
        at
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:522)
        at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:721)
        at org.apache.flink.runtime.taskmanager.Task.run(Task.java:546)
        at java.base/java.lang.Thread.run(Thread.java:832)
Caused by: java.io.IOException: Failed to execute the command: python -c
import pyflink;import
os;print(os.path.join(os.path.abspath(os.path.dirname(pyflink.__file__)),
'bin'))
output: Traceback (most recent call last):
  File "<string>", line 1, in <module>
ImportError: No module named pyflink

        at
org.apache.flink.python.util.PythonEnvironmentManagerUtils.execute(PythonEnvironmentManagerUtils.java:198)
        at
org.apache.flink.python.util.PythonEnvironmentManagerUtils.getPythonUdfRunnerScript(PythonEnvironmentManagerUtils.java:141)
        at
org.apache.flink.python.env.ProcessPythonEnvironmentManager.createEnvironment(ProcessPythonEnvironmentManager.java:179)
        at
org.apache.flink.python.AbstractPythonFunctionRunner.createPythonExecutionEnvironment(AbstractPythonFunctionRunner.java:249)
        at
org.apache.flink.table.runtime.runners.python.AbstractPythonStatelessFunctionRunner.createExecutableStage(AbstractPythonStatelessFunctionRunner.java:158)
        at
org.apache.flink.python.AbstractPythonFunctionRunner.createStageBundleFactory(AbstractPythonFunctionRunner.java:195)
        ... 16 more

org.apache.flink.client.program.ProgramAbortException
        at
org.apache.flink.client.python.PythonDriver.main(PythonDriver.java:95)
        at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
Method)
        at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.base/java.lang.reflect.Method.invoke(Method.java:564)
        at
org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:288)
        at
org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:198)
        at
org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:149)
        at
org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:699)
        at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:232)
        at
org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:916)
        at
org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:992)
        at
org.apache.flink.runtime.security.contexts.NoOpSecurityContext.runSecured(NoOpSecurityContext.java:30)
        at
org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:992)






--
Sent from: http://apache-flink.147419.n8.nabble.com/

Re: python udf 提交到本地节点执行报错

Posted by Xingbo Huang <hx...@gmail.com>.
Hi,

看报错是你的客户端环境所使用的的`python`解释器没有安装pyflink。-pyexec指定的是你udf运行的worker所使用的python环境,但是你在客户端编译作业的时候也需要python环境,那个python环境也需要安装pyflink。

Best,
Xingbo


陈康 <84...@qq.com> 于2021年1月25日周一 下午9:01写道:

> 你好、请教下配置pyflink、本地运行报错
> [root@hadoop01 ~]# pip list | grep flink
> apache-flink (1.12.0)
>
> [root@hadoop01 ~]# python3 -V
> Python 3.6.5
>
> flink run -m localhost:8081 -py datastream_tutorial.py -pyexec
> /usr/local/python3/bin/python3
>
>  File "datastream_tutorial.py", line 1, in <module>
>     from pyflink.common.serialization import SimpleStringEncoder
> ModuleNotFoundError: No module named 'pyflink.common.serialization'
> ,请问下你是如何配置环境变量的吗?谢谢
>
>
>
> --
> Sent from: http://apache-flink.147419.n8.nabble.com/
>

Re: python udf 提交到本地节点执行报错

Posted by 陈康 <84...@qq.com>.
你好、请教下配置pyflink、本地运行报错
[root@hadoop01 ~]# pip list | grep flink
apache-flink (1.12.0)

[root@hadoop01 ~]# python3 -V
Python 3.6.5

flink run -m localhost:8081 -py datastream_tutorial.py -pyexec
/usr/local/python3/bin/python3

 File "datastream_tutorial.py", line 1, in <module>
    from pyflink.common.serialization import SimpleStringEncoder
ModuleNotFoundError: No module named 'pyflink.common.serialization'
,请问下你是如何配置环境变量的吗?谢谢



--
Sent from: http://apache-flink.147419.n8.nabble.com/

Re: python udf 提交到本地节点执行报错

Posted by myfjdthink <jf...@kalengo.com>.
感谢搞定了,根据你提供的文档,我把命令改为

flink run -py src/etl/hello_world.py -pyexec
/usr/local/opt/python@3.7/bin/python3

指定了 python 执行器就行了



--
Sent from: http://apache-flink.147419.n8.nabble.com/

Re: python udf 提交到本地节点执行报错

Posted by Xingbo Huang <hx...@gmail.com>.
Hi,

你可以参考文档[1],里面的api
set_python_executable(python_exec)用来设置你的Python环境的,然后你需要确保你这个python环境有安装pyflink。

[1]
https://ci.apache.org/projects/flink/flink-docs-release-1.11/dev/python/table-api-users-guide/dependency_management.html#python-dependency

Best,
Xingbo

myfjdthink <jf...@kalengo.com> 于2020年9月17日周四 下午3:13写道:

> 你好,我的本地集群是单点的,直接使用文档里的
> bin/start-cluster.sh
> 命令启动的。
>
> 我扫了一遍文档,没找到介绍如何配置集群里的 pylink 相关的信息,可以麻烦你告诉我相关文档在哪里吗
>
>
>
> --
> Sent from: http://apache-flink.147419.n8.nabble.com/

Re: python udf 提交到本地节点执行报错

Posted by myfjdthink <jf...@kalengo.com>.
你好,我的本地集群是单点的,直接使用文档里的
bin/start-cluster.sh
命令启动的。

我扫了一遍文档,没找到介绍如何配置集群里的 pylink 相关的信息,可以麻烦你告诉我相关文档在哪里吗



--
Sent from: http://apache-flink.147419.n8.nabble.com/

Re: python udf 提交到本地节点执行报错

Posted by Xingbo Huang <hx...@gmail.com>.
Hi,

你可以看到报错信息里面有这么一条:
ImportError: No module named pyflink

看起来是你的集群环境使用的python环境没有安装pyflink

Best,
Xingbo

myfjdthink <jf...@kalengo.com> 于2020年9月17日周四 下午2:50写道:

> 操作系统
>
> Mac OS
>
> flink --version
>
> Version: 1.11.1, Commit ID: 7eb514a
>
>
> 代码
>
> from pyflink.table import StreamTableEnvironment, EnvironmentSettings,
> DataTypes
> from pyflink.table.udf import udf
>
> # 1. create a TableEnvironment
> env_settings =
>
> EnvironmentSettings.new_instance().in_streaming_mode().use_blink_planner().build()
> table_env =
> StreamTableEnvironment.create(environment_settings=env_settings)
>
> # 2. create source Table
> table_env.execute_sql("""
>     CREATE TABLE datagen (
>         id BIGINT,
>         data STRING
>     ) WITH (
>         'connector' = 'datagen',
>         'fields.id.kind' = 'sequence',
>         'fields.id.start' = '1',
>         'fields.id.end' = '20'
>     )
> """)
>
> # 3. create sink Table
> table_env.execute_sql("""
>     CREATE TABLE print (
>         id BIGINT,
>         data STRING
>     ) WITH (
>         'connector' = 'print'
>     )
> """)
>
> @udf(input_types=[DataTypes.BIGINT(), DataTypes.BIGINT()],
> result_type=DataTypes.BIGINT(), udf_type="pandas")
> def add(i, j):
>     return i + j
>
>
>
> table_env.get_config().get_configuration().set_string("taskmanager.memory.task.off-heap.size",
> '80m')
> table_env.register_function("add", add)
> table_env.execute_sql("""INSERT INTO print
> SELECT add(id, 1), data FROM datagen
> """).get_job_client().get_job_execution_result().result()
>
>
> 执行执行 py 文件是可以正常运行的
>
> 用以下命令提交到 flink 上会报错
>
> flink run -py src/etl/hello_world.py
>
> 报错信息
>
>
> flink run -py src/etl/hello_world.py
> WARNING: An illegal reflective access operation has occurred
> WARNING: Illegal reflective access by
> org.apache.flink.api.java.ClosureCleaner
> (file:/Users/nick/flink-1.11.1/lib/flink-dist_2.12-1.11.1.jar) to field
> java.lang.String.value
> WARNING: Please consider reporting this to the maintainers of
> org.apache.flink.api.java.ClosureCleaner
> WARNING: Use --illegal-access=warn to enable warnings of further illegal
> reflective access operations
> WARNING: All illegal access operations will be denied in a future release
> Job has been submitted with JobID f38ed31397d5bd6af813bd3048d49048
> Traceback (most recent call last):
>   File "src/etl/hello_world.py", line 41, in <module>
>     """).get_job_client().get_job_execution_result().result()
>   File
>
> "/Users/nick/flink-1.11.1/opt/python/pyflink.zip/pyflink/common/completable_future.py",
> line 78, in result
>   File
>
> "/Users/nick/flink-1.11.1/opt/python/py4j-0.10.8.1-src.zip/py4j/java_gateway.py",
> line 1286, in __call__
>   File
>
> "/Users/nick/flink-1.11.1/opt/python/pyflink.zip/pyflink/util/exceptions.py",
> line 147, in deco
>   File
>
> "/Users/nick/flink-1.11.1/opt/python/py4j-0.10.8.1-src.zip/py4j/protocol.py",
> line 328, in get_return_value
> py4j.protocol.Py4JJavaError: An error occurred while calling o71.get.
> : java.util.concurrent.ExecutionException:
> org.apache.flink.client.program.ProgramInvocationException: Job failed
> (JobID: f38ed31397d5bd6af813bd3048d49048)
>         at
>
> java.base/java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:395)
>         at
>
> java.base/java.util.concurrent.CompletableFuture.get(CompletableFuture.java:2063)
>         at
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
> Method)
>         at
>
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>         at
>
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.base/java.lang.reflect.Method.invoke(Method.java:564)
>         at
>
> org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
>         at
>
> org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
>         at
> org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
>         at
>
> org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
>         at
>
> org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
>         at
>
> org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
>         at java.base/java.lang.Thread.run(Thread.java:832)
> Caused by: org.apache.flink.client.program.ProgramInvocationException: Job
> failed (JobID: f38ed31397d5bd6af813bd3048d49048)
>         at
>
> org.apache.flink.client.deployment.ClusterClientJobClientAdapter.lambda$null$6(ClusterClientJobClientAdapter.java:116)
>         at
>
> java.base/java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:642)
>         at
>
> java.base/java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:506)
>         at
>
> java.base/java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:2137)
>         at
>
> org.apache.flink.client.program.rest.RestClusterClient.lambda$pollResourceAsync$22(RestClusterClient.java:602)
>         at
>
> java.base/java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:859)
>         at
>
> java.base/java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:837)
>         at
>
> java.base/java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:506)
>         at
>
> java.base/java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:2137)
>         at
>
> org.apache.flink.runtime.concurrent.FutureUtils.lambda$retryOperationWithDelay$8(FutureUtils.java:309)
>         at
>
> java.base/java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:859)
>         at
>
> java.base/java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:837)
>         at
>
> java.base/java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:506)
>         at
>
> java.base/java.util.concurrent.CompletableFuture.postFire(CompletableFuture.java:610)
>         at
>
> java.base/java.util.concurrent.CompletableFuture$UniCompose.tryFire(CompletableFuture.java:1159)
>         at
>
> java.base/java.util.concurrent.CompletableFuture$Completion.run(CompletableFuture.java:478)
>         at
>
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
>         at
>
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:630)
>         ... 1 more
> Caused by: org.apache.flink.runtime.client.JobExecutionException: Job
> execution failed.
>         at
>
> org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:147)
>         at
>
> org.apache.flink.client.deployment.ClusterClientJobClientAdapter.lambda$null$6(ClusterClientJobClientAdapter.java:114)
>         ... 18 more
> Caused by: org.apache.flink.runtime.JobException: Recovery is suppressed by
> NoRestartBackoffTimeStrategy
>         at
>
> org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:116)
>         at
>
> org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:78)
>         at
>
> org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:192)
>         at
>
> org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:185)
>         at
>
> org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:179)
>         at
>
> org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:503)
>         at
>
> org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:386)
>         at
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
> Method)
>         at
>
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>         at
>
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.base/java.lang.reflect.Method.invoke(Method.java:564)
>         at
>
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:284)
>         at
>
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:199)
>         at
>
> org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:74)
>         at
>
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:152)
>         at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:26)
>         at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:21)
>         at scala.PartialFunction.applyOrElse(PartialFunction.scala:123)
>         at scala.PartialFunction.applyOrElse$(PartialFunction.scala:122)
>         at
> akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:21)
>         at
> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
>         at
> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172)
>         at
> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172)
>         at akka.actor.Actor.aroundReceive(Actor.scala:517)
>         at akka.actor.Actor.aroundReceive$(Actor.scala:515)
>         at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:225)
>         at akka.actor.ActorCell.receiveMessage(ActorCell.scala:592)
>         at akka.actor.ActorCell.invoke(ActorCell.scala:561)
>         at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:258)
>         at akka.dispatch.Mailbox.run(Mailbox.scala:225)
>         at akka.dispatch.Mailbox.exec(Mailbox.scala:235)
>         at
> akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>         at
>
> akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>         at
> akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>         at
>
> akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
> Caused by: java.lang.RuntimeException: Failed to create stage bundle
> factory!
>         at
>
> org.apache.flink.python.AbstractPythonFunctionRunner.createStageBundleFactory(AbstractPythonFunctionRunner.java:197)
>         at
>
> org.apache.flink.python.AbstractPythonFunctionRunner.open(AbstractPythonFunctionRunner.java:164)
>         at
>
> org.apache.flink.table.runtime.runners.python.scalar.AbstractGeneralPythonScalarFunctionRunner.open(AbstractGeneralPythonScalarFunctionRunner.java:65)
>         at
>
> org.apache.flink.table.runtime.operators.python.AbstractStatelessFunctionOperator$ProjectUdfInputPythonScalarFunctionRunner.open(AbstractStatelessFunctionOperator.java:186)
>         at
>
> org.apache.flink.streaming.api.operators.python.AbstractPythonFunctionOperator.open(AbstractPythonFunctionOperator.java:143)
>         at
>
> org.apache.flink.table.runtime.operators.python.AbstractStatelessFunctionOperator.open(AbstractStatelessFunctionOperator.java:131)
>         at
>
> org.apache.flink.table.runtime.operators.python.scalar.AbstractPythonScalarFunctionOperator.open(AbstractPythonScalarFunctionOperator.java:88)
>         at
>
> org.apache.flink.table.runtime.operators.python.scalar.AbstractRowDataPythonScalarFunctionOperator.open(AbstractRowDataPythonScalarFunctionOperator.java:80)
>         at
>
> org.apache.flink.table.runtime.operators.python.scalar.RowDataPythonScalarFunctionOperator.open(RowDataPythonScalarFunctionOperator.java:64)
>         at
>
> org.apache.flink.streaming.runtime.tasks.OperatorChain.initializeStateAndOpenOperators(OperatorChain.java:291)
>         at
>
> org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$beforeInvoke$0(StreamTask.java:473)
>         at
>
> org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.runThrowing(StreamTaskActionExecutor.java:92)
>         at
>
> org.apache.flink.streaming.runtime.tasks.StreamTask.beforeInvoke(StreamTask.java:469)
>         at
>
> org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:522)
>         at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:721)
>         at org.apache.flink.runtime.taskmanager.Task.run(Task.java:546)
>         at java.base/java.lang.Thread.run(Thread.java:832)
> Caused by: java.io.IOException: Failed to execute the command: python -c
> import pyflink;import
> os;print(os.path.join(os.path.abspath(os.path.dirname(pyflink.__file__)),
> 'bin'))
> output: Traceback (most recent call last):
>   File "<string>", line 1, in <module>
> ImportError: No module named pyflink
>
>         at
>
> org.apache.flink.python.util.PythonEnvironmentManagerUtils.execute(PythonEnvironmentManagerUtils.java:198)
>         at
>
> org.apache.flink.python.util.PythonEnvironmentManagerUtils.getPythonUdfRunnerScript(PythonEnvironmentManagerUtils.java:141)
>         at
>
> org.apache.flink.python.env.ProcessPythonEnvironmentManager.createEnvironment(ProcessPythonEnvironmentManager.java:179)
>         at
>
> org.apache.flink.python.AbstractPythonFunctionRunner.createPythonExecutionEnvironment(AbstractPythonFunctionRunner.java:249)
>         at
>
> org.apache.flink.table.runtime.runners.python.AbstractPythonStatelessFunctionRunner.createExecutableStage(AbstractPythonStatelessFunctionRunner.java:158)
>         at
>
> org.apache.flink.python.AbstractPythonFunctionRunner.createStageBundleFactory(AbstractPythonFunctionRunner.java:195)
>         ... 16 more
>
> org.apache.flink.client.program.ProgramAbortException
>         at
> org.apache.flink.client.python.PythonDriver.main(PythonDriver.java:95)
>         at
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
> Method)
>         at
>
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>         at
>
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.base/java.lang.reflect.Method.invoke(Method.java:564)
>         at
>
> org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:288)
>         at
>
> org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:198)
>         at
> org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:149)
>         at
>
> org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:699)
>         at
> org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:232)
>         at
>
> org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:916)
>         at
>
> org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:992)
>         at
>
> org.apache.flink.runtime.security.contexts.NoOpSecurityContext.runSecured(NoOpSecurityContext.java:30)
>         at
> org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:992)
>
>
>
>
>
>
> --
> Sent from: http://apache-flink.147419.n8.nabble.com/
>