You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user-zh@flink.apache.org by liangjinghong <li...@huawei.com.INVALID> on 2022/02/21 03:41:46 UTC

答复: flink创建视图后,SELECT语句后使用OPTIONS报错

感谢老师的回复,然而,移除该包后,部署运行直接报错:
2022-02-19 12:50:23,231 ERROR org.apache.flink.runtime.webmonitor.handlers.JarRunHandler   [] - Exception occurred in REST handler: Could not execute application.
2022-02-19 12:50:49,375 WARN  org.apache.flink.runtime.webmonitor.handlers.JarRunHandler   [] - Configuring the job submission via query parameters is deprecated. Please migrate to submitting a JSON request instead.
2022-02-19 12:50:49,383 INFO  org.apache.flink.client.ClientUtils                          [] - Starting program (detached: true)
2022-02-19 12:50:49,383 INFO  org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend [] - Using predefined options: DEFAULT.
2022-02-19 12:50:49,383 INFO  org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend [] - Using default options factory: DefaultConfigurableOptionsFactory{configuredOptions={}}.
2022-02-19 12:50:49,383 INFO  com.test.ElasticScheduleTest                                 [] - StreamEnvironment established successfully.
2022-02-19 12:50:49,384 ERROR org.apache.flink.table.factories.TableFactoryService         [] - Could not load service provider for table factories.
java.util.ServiceConfigurationError: org.apache.flink.table.factories.TableFactory: Error accessing configuration file
	at java.util.ServiceLoader.fail(ServiceLoader.java:582) ~[?:?]
	at java.util.ServiceLoader$LazyClassPathLookupIterator.parse(ServiceLoader.java:1173) ~[?:?]
	at java.util.ServiceLoader$LazyClassPathLookupIterator.nextProviderClass(ServiceLoader.java:1206) ~[?:?]
	at java.util.ServiceLoader$LazyClassPathLookupIterator.hasNextService(ServiceLoader.java:1221) ~[?:?]
	at java.util.ServiceLoader$LazyClassPathLookupIterator.hasNext(ServiceLoader.java:1265) ~[?:?]
	at java.util.ServiceLoader$2.hasNext(ServiceLoader.java:1300) ~[?:?]
	at java.util.ServiceLoader$3.hasNext(ServiceLoader.java:1385) ~[?:?]
	at java.util.Iterator.forEachRemaining(Iterator.java:132) ~[?:?]
	at org.apache.flink.table.factories.TableFactoryService.discoverFactories(TableFactoryService.java:193) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.factories.TableFactoryService.findAllInternal(TableFactoryService.java:163) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.factories.TableFactoryService.findAll(TableFactoryService.java:121) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.factories.ComponentFactoryService.find(ComponentFactoryService.java:50) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl.lookupExecutor(StreamTableEnvironmentImpl.java:185) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl.create(StreamTableEnvironmentImpl.java:156) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.api.bridge.java.StreamTableEnvironment.create(StreamTableEnvironment.java:128) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at com.test.ElasticScheduleTest.main(ElasticScheduleTest.java:39) ~[56a5eaa6-83b6-4b28-b69c-2c6b510d8347_flinkSQL-1.0-SNAPSHOT.jar:?]
	at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?]
	at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:?]
	at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?]
	at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?]
	at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:355) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:222) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:114) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.client.deployment.application.DetachedApplicationRunner.tryExecuteJobs(DetachedApplicationRunner.java:84) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.client.deployment.application.DetachedApplicationRunner.run(DetachedApplicationRunner.java:70) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.runtime.webmonitor.handlers.JarRunHandler.lambda$handleRequest$0(JarRunHandler.java:102) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	at java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1700) [?:?]
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) [?:?]
	at java.util.concurrent.FutureTask.run(FutureTask.java:264) [?:?]
	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:304) [?:?]
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) [?:?]
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) [?:?]
	at java.lang.Thread.run(Thread.java:829) [?:?]
Caused by: java.nio.file.NoSuchFileException: /root/bigdata/flink/lib/flink-table_2.11-1.13.0.jar
	at sun.nio.fs.UnixException.translateToIOException(UnixException.java:92) ~[?:?]
	at sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:111) ~[?:?]
	at sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:116) ~[?:?]
	at sun.nio.fs.UnixFileAttributeViews$Basic.readAttributes(UnixFileAttributeViews.java:55) ~[?:?]
	at sun.nio.fs.UnixFileSystemProvider.readAttributes(UnixFileSystemProvider.java:149) ~[?:?]
	at sun.nio.fs.LinuxFileSystemProvider.readAttributes(LinuxFileSystemProvider.java:99) ~[?:?]
	at java.nio.file.Files.readAttributes(Files.java:1764) ~[?:?]
	at java.util.zip.ZipFile$Source.get(ZipFile.java:1259) ~[?:?]
	at java.util.zip.ZipFile$CleanableResource.<init>(ZipFile.java:831) ~[?:?]
	at java.util.zip.ZipFile$CleanableResource$FinalizableResource.<init>(ZipFile.java:857) ~[?:?]
	at java.util.zip.ZipFile$CleanableResource.get(ZipFile.java:846) ~[?:?]
	at java.util.zip.ZipFile.<init>(ZipFile.java:248) ~[?:?]
	at java.util.zip.ZipFile.<init>(ZipFile.java:177) ~[?:?]
	at java.util.jar.JarFile.<init>(JarFile.java:350) ~[?:?]
	at sun.net.www.protocol.jar.URLJarFile.<init>(URLJarFile.java:103) ~[?:?]
	at sun.net.www.protocol.jar.URLJarFile.getJarFile(URLJarFile.java:72) ~[?:?]
	at sun.net.www.protocol.jar.JarFileFactory.get(JarFileFactory.java:99) ~[?:?]
	at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:125) ~[?:?]
	at sun.net.www.protocol.jar.JarURLConnection.getInputStream(JarURLConnection.java:155) ~[?:?]
	at java.util.ServiceLoader$LazyClassPathLookupIterator.parse(ServiceLoader.java:1165) ~[?:?]
	... 31 more
2022-02-19 12:50:49,384 WARN  org.apache.flink.client.deployment.application.DetachedApplicationRunner [] - Could not execute application: 
org.apache.flink.client.program.ProgramInvocationException: The main method caused an error: Could not instantiate the executor. Make sure a planner module is on the classpath
	at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:372) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:222) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:114) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.client.deployment.application.DetachedApplicationRunner.tryExecuteJobs(DetachedApplicationRunner.java:84) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.client.deployment.application.DetachedApplicationRunner.run(DetachedApplicationRunner.java:70) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.runtime.webmonitor.handlers.JarRunHandler.lambda$handleRequest$0(JarRunHandler.java:102) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	at java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1700) [?:?]
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) [?:?]
	at java.util.concurrent.FutureTask.run(FutureTask.java:264) [?:?]
	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:304) [?:?]
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) [?:?]
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) [?:?]
	at java.lang.Thread.run(Thread.java:829) [?:?]
Caused by: org.apache.flink.table.api.TableException: Could not instantiate the executor. Make sure a planner module is on the classpath
	at org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl.lookupExecutor(StreamTableEnvironmentImpl.java:194) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl.create(StreamTableEnvironmentImpl.java:156) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.api.bridge.java.StreamTableEnvironment.create(StreamTableEnvironment.java:128) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at com.test.ElasticScheduleTest.main(ElasticScheduleTest.java:39) ~[?:?]
	at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?]
	at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:?]
	at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?]
	at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?]
	at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:355) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	... 12 more
Caused by: org.apache.flink.table.api.TableException: Could not load service provider for table factories.
	at org.apache.flink.table.factories.TableFactoryService.discoverFactories(TableFactoryService.java:197) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.factories.TableFactoryService.findAllInternal(TableFactoryService.java:163) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.factories.TableFactoryService.findAll(TableFactoryService.java:121) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.factories.ComponentFactoryService.find(ComponentFactoryService.java:50) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl.lookupExecutor(StreamTableEnvironmentImpl.java:185) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl.create(StreamTableEnvironmentImpl.java:156) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.api.bridge.java.StreamTableEnvironment.create(StreamTableEnvironment.java:128) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at com.test.ElasticScheduleTest.main(ElasticScheduleTest.java:39) ~[?:?]
	at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?]
	at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:?]
	at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?]
	at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?]
	at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:355) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	... 12 more
Caused by: java.util.ServiceConfigurationError: org.apache.flink.table.factories.TableFactory: Error accessing configuration file
	at java.util.ServiceLoader.fail(ServiceLoader.java:582) ~[?:?]
	at java.util.ServiceLoader$LazyClassPathLookupIterator.parse(ServiceLoader.java:1173) ~[?:?]
	at java.util.ServiceLoader$LazyClassPathLookupIterator.nextProviderClass(ServiceLoader.java:1206) ~[?:?]
	at java.util.ServiceLoader$LazyClassPathLookupIterator.hasNextService(ServiceLoader.java:1221) ~[?:?]
	at java.util.ServiceLoader$LazyClassPathLookupIterator.hasNext(ServiceLoader.java:1265) ~[?:?]
	at java.util.ServiceLoader$2.hasNext(ServiceLoader.java:1300) ~[?:?]
	at java.util.ServiceLoader$3.hasNext(ServiceLoader.java:1385) ~[?:?]
	at java.util.Iterator.forEachRemaining(Iterator.java:132) ~[?:?]
	at org.apache.flink.table.factories.TableFactoryService.discoverFactories(TableFactoryService.java:193) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.factories.TableFactoryService.findAllInternal(TableFactoryService.java:163) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.factories.TableFactoryService.findAll(TableFactoryService.java:121) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.factories.ComponentFactoryService.find(ComponentFactoryService.java:50) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl.lookupExecutor(StreamTableEnvironmentImpl.java:185) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl.create(StreamTableEnvironmentImpl.java:156) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.api.bridge.java.StreamTableEnvironment.create(StreamTableEnvironment.java:128) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at com.test.ElasticScheduleTest.main(ElasticScheduleTest.java:39) ~[?:?]
	at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?]
	at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:?]
	at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?]
	at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?]
	at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:355) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	... 12 more
Caused by: java.nio.file.NoSuchFileException: /root/bigdata/flink/lib/flink-table_2.11-1.13.0.jar
	at sun.nio.fs.UnixException.translateToIOException(UnixException.java:92) ~[?:?]
	at sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:111) ~[?:?]
	at sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:116) ~[?:?]
	at sun.nio.fs.UnixFileAttributeViews$Basic.readAttributes(UnixFileAttributeViews.java:55) ~[?:?]
	at sun.nio.fs.UnixFileSystemProvider.readAttributes(UnixFileSystemProvider.java:149) ~[?:?]
	at sun.nio.fs.LinuxFileSystemProvider.readAttributes(LinuxFileSystemProvider.java:99) ~[?:?]
	at java.nio.file.Files.readAttributes(Files.java:1764) ~[?:?]
	at java.util.zip.ZipFile$Source.get(ZipFile.java:1259) ~[?:?]
	at java.util.zip.ZipFile$CleanableResource.<init>(ZipFile.java:831) ~[?:?]
	at java.util.zip.ZipFile$CleanableResource$FinalizableResource.<init>(ZipFile.java:857) ~[?:?]
	at java.util.zip.ZipFile$CleanableResource.get(ZipFile.java:846) ~[?:?]
	at java.util.zip.ZipFile.<init>(ZipFile.java:248) ~[?:?]
	at java.util.zip.ZipFile.<init>(ZipFile.java:177) ~[?:?]
	at java.util.jar.JarFile.<init>(JarFile.java:350) ~[?:?]
	at sun.net.www.protocol.jar.URLJarFile.<init>(URLJarFile.java:103) ~[?:?]
	at sun.net.www.protocol.jar.URLJarFile.getJarFile(URLJarFile.java:72) ~[?:?]
	at sun.net.www.protocol.jar.JarFileFactory.get(JarFileFactory.java:99) ~[?:?]
	at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:125) ~[?:?]
	at sun.net.www.protocol.jar.JarURLConnection.getInputStream(JarURLConnection.java:155) ~[?:?]
	at java.util.ServiceLoader$LazyClassPathLookupIterator.parse(ServiceLoader.java:1165) ~[?:?]
	at java.util.ServiceLoader$LazyClassPathLookupIterator.nextProviderClass(ServiceLoader.java:1206) ~[?:?]
	at java.util.ServiceLoader$LazyClassPathLookupIterator.hasNextService(ServiceLoader.java:1221) ~[?:?]
	at java.util.ServiceLoader$LazyClassPathLookupIterator.hasNext(ServiceLoader.java:1265) ~[?:?]
	at java.util.ServiceLoader$2.hasNext(ServiceLoader.java:1300) ~[?:?]
	at java.util.ServiceLoader$3.hasNext(ServiceLoader.java:1385) ~[?:?]
	at java.util.Iterator.forEachRemaining(Iterator.java:132) ~[?:?]
	at org.apache.flink.table.factories.TableFactoryService.discoverFactories(TableFactoryService.java:193) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.factories.TableFactoryService.findAllInternal(TableFactoryService.java:163) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.factories.TableFactoryService.findAll(TableFactoryService.java:121) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.factories.ComponentFactoryService.find(ComponentFactoryService.java:50) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl.lookupExecutor(StreamTableEnvironmentImpl.java:185) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl.create(StreamTableEnvironmentImpl.java:156) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at org.apache.flink.table.api.bridge.java.StreamTableEnvironment.create(StreamTableEnvironment.java:128) ~[flink-table_2.11-1.13.0.jar:1.13.0]
	at com.test.ElasticScheduleTest.main(ElasticScheduleTest.java:39) ~[?:?]
	at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?]
	at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:?]
	at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?]
	at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?]
	at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:355) ~[flink-dist_2.11-1.13.0.jar:1.13.0]
	... 12 more

无论是在maven打包的时候把flink-table-planner-blink_2.11 打进去还是直接在lib目录下放flink-table-planner-blink_2.11包,都报一样的错,Caused by: java.nio.file.NoSuchFileException: /root/bigdata/flink/lib/flink-table_2.11-1.13.0.jar。
flink-table_2.11-1.13.0.jar这个包是flink1.13 tar包下lib本来就有的包,直接移除似乎不行,请问我该如何解决呢?
-----邮件原件-----
发件人: godfrey he [mailto:godfreyhe@gmail.com] 
发送时间: 2022年2月18日 14:15
收件人: user-zh <us...@flink.apache.org>
主题: Re: flink创建视图后,SELECT语句后使用OPTIONS报错

这个包:flink-table_2.11-1.13.0.jar (legacy planner 的包)

Best,
Godfrey

liangjinghong <li...@huawei.com.invalid> 于2022年2月17日周四 09:59写道:
>
> 感谢老师的回复,然而我的部署环境下的lib中没有您说的这个包,请问是要移除哪个包呢?
>
> 我的lib下有的包:
> flink-csv-1.13.0.jar
> flink-dist_2.11-1.13.0.jar
> flink-json-1.13.0.jar
> flink-shaded-zookeeper-3.4.14.jar
> flink-sql-connector-mysql-cdc-2.1.1.jar
> flink-table_2.11-1.13.0.jar
> flink-table-blink_2.11-1.13.0.jar
> log4j-1.2-api-2.12.1.jar
> log4j-api-2.12.1.jar
> log4j-core-2.12.1.jar
> log4j-slf4j-impl-2.12.1.jar
> -----邮件原件-----
> 发件人: godfrey he [mailto:godfreyhe@gmail.com]
> 发送时间: 2022年2月16日 16:47
> 收件人: user-zh <us...@flink.apache.org>
> 主题: Re: flink创建视图后,SELECT语句后使用OPTIONS报错
>
> Hi liangjinghong,
>
> 原因是 blink planner 中引入并修改了 SqlTableRef 类, 而 Legacy planner 中没有引入 
> SqlTableRef
> 类,从而导致加载到了Calcite 中 SqlTableRef (该类有问题)。
> 解决方案:如果只使用到了blink planner,可以把legacy planner 的包冲lib下移除。
>
> Best,
> Godfrey
>
> liangjinghong <li...@huawei.com.invalid> 于2022年2月14日周一 
> 17:26写道:
>
> > 各位老师们好,以下代码在开发环境中可以执行,打包部署后报错:
> >
> > 代码:
> >
> > CREATE VIEW used_num_common
> >
> > (toolName,region,type,flavor,used_num)
> >
> > AS
> >
> > select info.toolName as toolName,r.regionName as
> > region,f.type,f.flavor,count(1) as used_num from
> >
> > tbl_schedule_job/*+ OPTIONS('server-id'='1001-1031') */ job
> >
> > join
> >
> > tbl_schedule_task/*+ OPTIONS('server-id'='2001-2031') */ task
> >
> > on job.jobId = task.jobId
> >
> > join
> >
> > tbl_broker_node/*+ OPTIONS('server-id'='3001-3031') */  node
> >
> > on task.nodeId = node.id
> >
> > join
> >
> > tbl_app_info/*+ OPTIONS('server-id'='4001-4031') */ info
> >
> > on job.appId = info.appId
> >
> > join
> >
> > tbl_region r
> >
> > on node.region/*+ OPTIONS('server-id'='5001-5031') */ = r.region
> >
> > join
> >
> > tbl_flavor/*+ OPTIONS('server-id'='6001-6031') */  f
> >
> > on node.resourcesSpec = f.flavor
> >
> > where job.jobStatus in ('RUNNING','ERROR','INITING')
> >
> > and task.taskStatus in ('RUNNING','ERROR','INITING')
> >
> > and node.machineStatus <> 'DELETED'
> >
> > and toolName is not null
> >
> > group by info.toolName,r.regionName,f.type,f.flavor
> >
> > …
> >
> > 打包部署后报错如下:
> >
> > The main method caused an error: class org.apache.calcite.sql.SqlSyntax$6:
> > SPECIAL
> >
> > 2022-02-08 13:33:39,350 WARN
> > org.apache.flink.client.deployment.application.DetachedApplicationRu
> > nn
> > er []
> > - Could not execute application:
> >
> > org.apache.flink.client.program.ProgramInvocationException: The main 
> > method caused an error: class org.apache.calcite.sql.SqlSyntax$6:
> > SPECIAL
> >
> >     at
> > org.apache.flink.client.program.PackagedProgram.callMainMethod(Packa
> > ge
> > dProgram.java:372)
> > ~[flink-dist_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.flink.client.program.PackagedProgram.invokeInteractiveMod
> > eF
> > orExecution(PackagedProgram.java:222)
> > ~[flink-dist_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:
> > 11
> > 4)
> > ~[flink-dist_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.flink.client.deployment.application.DetachedApplicationRu
> > nn
> > er.tryExecuteJobs(DetachedApplicationRunner.java:84)
> > ~[flink-dist_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.flink.client.deployment.application.DetachedApplicationRu
> > nn
> > er.run(DetachedApplicationRunner.java:70)
> > ~[flink-dist_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.flink.runtime.webmonitor.handlers.JarRunHandler.lambda$ha
> > nd
> > leRequest$0(JarRunHandler.java:102)
> > ~[flink-dist_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFu
> > tu
> > re.java:1700)
> > [?:?]
> >
> >     at
> > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:5
> > 15
> > )
> > [?:?]
> >
> >     at java.util.concurrent.FutureTask.run(FutureTask.java:264) 
> > [?:?]
> >
> >     at
> > java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask
> > .r
> > un(ScheduledThreadPoolExecutor.java:304)
> > [?:?]
> >
> >     at
> > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor
> > .j
> > ava:1128)
> > [?:?]
> >
> >     at
> > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.
> > java:628)
> > [?:?]
> >
> >     at java.lang.Thread.run(Thread.java:829) [?:?]
> >
> > Caused by: java.lang.UnsupportedOperationException: class
> > org.apache.calcite.sql.SqlSyntax$6: SPECIAL
> >
> >     at org.apache.calcite.util.Util.needToImplement(Util.java:1075)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at 
> > org.apache.calcite.sql.SqlSyntax$6.unparse(SqlSyntax.java:116)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.calcite.sql.SqlOperator.unparse(SqlOperator.java:329)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.calcite.sql.SqlDialect.unparseCall(SqlDialect.java:453)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at org.apache.calcite.sql.SqlCall.unparse(SqlCall.java:101)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.calcite.sql.SqlJoin$SqlJoinOperator.unparse(SqlJoin.java:
> > 19
> > 9)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.calcite.sql.SqlDialect.unparseCall(SqlDialect.java:453)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at org.apache.calcite.sql.SqlCall.unparse(SqlCall.java:104)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.calcite.sql.SqlJoin$SqlJoinOperator.unparse(SqlJoin.java:
> > 19
> > 9)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.calcite.sql.SqlDialect.unparseCall(SqlDialect.java:453)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at org.apache.calcite.sql.SqlCall.unparse(SqlCall.java:104)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.calcite.sql.SqlJoin$SqlJoinOperator.unparse(SqlJoin.java:
> > 19
> > 9)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.calcite.sql.SqlDialect.unparseCall(SqlDialect.java:453)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at org.apache.calcite.sql.SqlCall.unparse(SqlCall.java:104)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.calcite.sql.SqlJoin$SqlJoinOperator.unparse(SqlJoin.java:
> > 19
> > 9)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.calcite.sql.SqlDialect.unparseCall(SqlDialect.java:453)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at org.apache.calcite.sql.SqlCall.unparse(SqlCall.java:104)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.calcite.sql.SqlSelectOperator.unparse(SqlSelectOperator.j
> > av
> > a:176)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.calcite.sql.SqlDialect.unparseCall(SqlDialect.java:453)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at org.apache.calcite.sql.SqlSelect.unparse(SqlSelect.java:246)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at org.apache.calcite.sql.SqlNode.toSqlString(SqlNode.java:154)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at org.apache.calcite.sql.SqlNode.toSqlString(SqlNode.java:176)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at org.apache.calcite.sql.SqlNode.toSqlString(SqlNode.java:185)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.flink.table.planner.operations.SqlToOperationConverter.ge
> > tQ
> > uotedSqlString(SqlToOperationConverter.java:962)
> > ~[flink-table-blink_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.flink.table.planner.utils.Expander$Expanded.substitute(Ex
> > pa
> > nder.java:183) ~[flink-table-blink_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.flink.table.planner.operations.SqlToOperationConverter.co
> > nv
> > ertViewQuery(SqlToOperationConverter.java:846)
> > ~[flink-table-blink_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.flink.table.planner.operations.SqlToOperationConverter.co
> > nv
> > ertCreateView(SqlToOperationConverter.java:815)
> > ~[flink-table-blink_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.flink.table.planner.operations.SqlToOperationConverter.co
> > nv
> > ert(SqlToOperationConverter.java:246)
> > ~[flink-table-blink_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.flink.table.planner.delegation.ParserImpl.parse(ParserImpl.
> > java:99) ~[flink-table-blink_2.11-1.13.0.jar:1.13.0]
> >
> >     at
> > org.apache.flink.table.api.internal.TableEnvironmentImpl.executeSql(
> > Ta
> > bleEnvironmentImpl.java:722)
> > ~[flink-table_2.11-1.13.0.jar:1.13.0]
> >
> >     at com.pro.TemporalJoinTest.main(TemporalJoinTest.java:182) 
> > ~[?:?]
> >
> >     at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
> > Method) ~[?:?]
> >
> >     at
> > jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAcc
> > es
> > sorImpl.java:62)
> > ~[?:?]
> >
> >     at
> > jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingM
> > et
> > hodAccessorImpl.java:43)
> > ~[?:?]
> >
> >     at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?]
> >
> >     at
> > org.apache.flink.client.program.PackagedProgram.callMainMethod(Packa
> > ge
> > dProgram.java:355)
> > ~[flink-dist_2.11-1.13.0.jar:1.13.0]
> >
> >     ... 12 more
> >
> > 2022-02-08 13:33:39,352 ERROR
> > org.apache.flink.runtime.webmonitor.handlers.JarRunHandler [] - 
> > Exception occurred in REST handler: Could not execute application.
> >
> >
> >
> > 看这个报错是格式的问题,但如果是格式的问题,本地应该也无法运行,也在网上看到了类似的报错,没有搜到解决方案。
> >
> > https://www.mail-archive.com/user-zh@flink.apache.org/msg10502.html
> >
> > flinksql1.12建view流式读取hive分区表不支持,报错信息:[ERROR] Could not execute SQL 
> > statement. Reason: java.lang.UnsupportedOperationException: class
> > org.apache.calcite.sql.SqlSyntax$6: SPECIAL
> >
> > 如果将OPTIONS放置在INSERT…SELECT..语句下,打包部署后也可以成功运行,但因为虚拟视图中使用了多个表,直接在
> > INSERT…SELECT语句中指定OPTIONS,会造成MySQL CDC同步SERVER ID冲突。
> >
> > 本地开发环境与部署环境添加的JAR包都是一样的,flink 1.13,MySQL CDC 2.1.1
> >
> >
> >
> > 本地引入的依赖,部署的时候会将不打包标签下的依赖排除:
> >
> > *<!--        flink-table**相关依赖(不打包)*
> > *-->         *<dependency>
> >             <groupId>org.apache.flink</groupId>
> >             
> > <artifactId>flink-streaming-java_${scala.binary.version}</
> > artifactId>
> >             <version>${flink.version}</version>
> >
> > *<!--            <scope>provided</scope>-->         *</dependency>
> >         *<!-- flink**运行时的*
> > *webUI -->         *<dependency>
> >             <groupId>org.apache.flink</groupId>
> >             <artifactId>flink-runtime-web_${scala.binary.version}</
> > artifactId>
> >             <version>${flink.version}</version>
> >
> > *<!--            <scope>provided</scope>-->         *</dependency>
> >         <dependency>
> >             <groupId>org.apache.flink</groupId>
> >
> > <artifactId>flink-streaming-scala_${scala.binary.version}</
> > artifactId>
> >             <version>${flink.version}</version>
> >
> > *<!--            <scope>provided</scope>-->         *</dependency>
> >         <dependency>
> >             <groupId>org.apache.flink</groupId>
> >             <artifactId
> > >flink-table-planner-blink_${scala.binary.version}</artifactId>
> >             <version>${flink.version}</version>
> >
> > *<!--            <scope>provided</scope>-->         *</dependency>
> >         <dependency>
> >             <groupId>org.apache.flink</groupId>
> >             <artifactId
> > >flink-table-api-java-bridge_${scala.binary.version}</artifactId>
> >             <version>${flink.version}</version>
> >
> > *<!--            <scope>provided</scope>-->         *</dependency>
> >         <dependency>
> >             <groupId>org.apache.flink</groupId>
> >             <artifactId>flink-core</artifactId>
> >             <version>${flink.version}</version>
> >
> > *<!--            <scope>provided</scope>-->         *</dependency>
> >         <dependency>
> >             <groupId>org.apache.flink</groupId>
> >             <artifactId>flink-json</artifactId>
> >             <version>${flink.version}</version>
> >
> > *<!--            <scope>provided</scope>-->         *</dependency>
> >
> >
> >         *<!--flink**连接客户端*
> > *-->         *<dependency>
> >             <groupId>mysql</groupId>
> >             <artifactId>mysql-connector-java</artifactId>
> >             <version>8.0.19</version>
> >         </dependency>
> >         <dependency>
> >             <groupId>org.postgresql</groupId>
> >             <artifactId>postgresql</artifactId>
> >             <version>9.2-1004-jdbc4</version>
> >
> > *<!--            <scope>provided</scope>-->         *</dependency>
> >         *<!--**必须打包*
> > *-->         *<dependency>
> >             <groupId>org.apache.flink</groupId>
> >
> > <artifactId>flink-connector-kafka_${scala.binary.version}</
> > artifactId>
> >             <version>${flink.version}</version>
> >
> > *<!--            <scope>provided</scope>-->         *</dependency>
> >         *<!--**必须打包*
> > *-->         *<dependency>
> >             <groupId>org.apache.kafka</groupId>
> >             <artifactId>kafka-clients</artifactId>
> >             <version>0.11.0.2</version>
> >
> > *<!--            <scope>provided</scope>-->         *</dependency>
> >         <dependency>
> >             <groupId>org.apache.flink</groupId>
> >             
> > <artifactId>flink-connector-jdbc_${scala.binary.version}</
> > artifactId>
> >             <version>${flink.version}</version>
> >
> > *<!--<scope>provided</scope>-->         *</dependency>
> >         *<!--**必须打包*
> >
> > *--> <!--        flink sql**相关依赖(不打包)*
> > *-->         *<dependency>
> >             <groupId>org.apache.flink</groupId>
> >             <artifactId>flink-clients_${scala.binary.version}</artifactId>
> >             <version>${flink.version}</version>
> >
> > *<!--            <scope>provided</scope>-->         *</dependency>
> >         <dependency>
> >             <groupId>org.apache.flink</groupId>
> >             <artifactId>flink-sql-client_${scala.binary.version}</
> > artifactId>
> >             <version>${flink.version}</version>
> >
> > *<!--            <scope>provided</scope>-->         *</dependency>
> >         <dependency>
> >             <groupId>com.ververica</groupId>
> >             <artifactId>flink-connector-mysql-cdc</artifactId>
> >
> > *<!-- the dependency is available only for stable releases. -->
> >             *<version>2.1.1</version>
> >
> > *<!--            <scope>provided</scope>-->         *</dependency>
> >
> > *<!--        **存储设置*
> > *-->         *<dependency>
> >             <groupId>org.apache.flink</groupId>
> >             <artifactId>flink-statebackend-rocksdb_2.11</artifactId>
> >             <version>${flink.version}</version>
> >
> > *<!--            <scope>provided</scope>-->         *</dependency>
> >
> >
> > *<!--        **工具类依赖*
> > *-->         *<dependency>
> >             <groupId>com.github.jsqlparser</groupId>
> >             <artifactId>jsqlparser</artifactId>
> >             <version>1.2</version>
> >         </dependency>
> >
> >         <dependency>
> >             <groupId>org.slf4j</groupId>
> >             <artifactId>slf4j-api</artifactId>
> >             <version>1.7.25</version>
> >
> > *<!--<scope>provided</scope>-->         *</dependency>
> >         <dependency>
> >             <groupId>org.slf4j</groupId>
> >             <artifactId>slf4j-simple</artifactId>
> >             <version>1.7.25</version>
> >
> > *<!--<scope>provided</scope>-->         *</dependency>
> >     </dependencies>
> >
> > 部署机器上的JAR包:
> >
> > 感谢您的阅读与解答。
> >