You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user-zh@flink.apache.org by 奇怪的不朽琴师 <11...@qq.com> on 2020/07/15 02:27:15 UTC

回复: pyflink1.11.0window

你好:
&nbsp; &nbsp; &nbsp;我按着你回复的建议改了source但是会报新的错误,请问这个是因为什么?我想调试一个window一直没有成功,请帮帮我,谢谢。
Traceback (most recent call last):
&nbsp; File "tou.py", line 71, in <module&gt;
&nbsp; &nbsp; from_kafka_to_kafka_demo()
&nbsp; File "tou.py", line 21, in from_kafka_to_kafka_demo
&nbsp; &nbsp; .select(" id,&nbsp; time1 , time1 ")\
&nbsp; File "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py", line 907, in select
&nbsp; &nbsp; return Table(self._j_table.select(fields), self._t_env)
&nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py", line 1286, in __call__
&nbsp; &nbsp; answer, self.gateway_client, self.target_id, self.name)
&nbsp; File "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line 147, in deco
&nbsp; &nbsp; return f(*a, **kw)
&nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/protocol.py", line 328, in get_return_value
&nbsp; &nbsp; format(target_id, ".", name), value)
py4j.protocol.Py4JJavaError: An error occurred while calling o26.select.
: org.apache.flink.table.api.ValidationException: A tumble window expects a size value literal.
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
&nbsp; &nbsp; &nbsp; &nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&nbsp; &nbsp; &nbsp; &nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&nbsp; &nbsp; &nbsp; &nbsp; at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&nbsp; &nbsp; &nbsp; &nbsp; at java.lang.reflect.Method.invoke(Method.java:498)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&nbsp; &nbsp; &nbsp; &nbsp; at java.lang.Thread.run(Thread.java:748)























def register_rides_source(st_env):
&nbsp; &nbsp; source_ddl = \
&nbsp; &nbsp; """
&nbsp; &nbsp; create table source1(
&nbsp; &nbsp; &nbsp;id int,
&nbsp; &nbsp; &nbsp;time1 timestamp,
&nbsp; &nbsp; &nbsp;type string,
&nbsp; &nbsp; &nbsp;WATERMARK FOR time1 as time1 - INTERVAL '2' SECOND
&nbsp; &nbsp; &nbsp;) with (
&nbsp; &nbsp; 'connector.type' = 'kafka',
&nbsp; &nbsp; 'update-mode' = 'append',
&nbsp; &nbsp; 'connector.topic' = 'tp1',
&nbsp; &nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
&nbsp; &nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
&nbsp; &nbsp; 'format.type' = 'json',
&nbsp; &nbsp; 'format.derive-schema' = 'true',
&nbsp; &nbsp; 'connector.version' = 'universal'
&nbsp; &nbsp; &nbsp;)
&nbsp; &nbsp; """
&nbsp; &nbsp; st_env.sql_update(source_ddl)

&nbsp; &nbsp;&nbsp;
&nbsp; &nbsp; s_env = StreamExecutionEnvironment.get_execution_environment()
&nbsp; &nbsp; s_env.set_parallelism(1)


&nbsp; &nbsp; st_env = StreamTableEnvironment.create(s_env)


&nbsp; &nbsp; register_rides_source(st_env)
&nbsp; &nbsp; register_rides_sink(st_env)


&nbsp; &nbsp; st_env.from_path("source1")\
&nbsp; &nbsp; &nbsp; &nbsp; .window(Tumble.over("2.secends").on("time1").alias("w")) \
&nbsp; &nbsp; &nbsp; &nbsp; .group_by("w") \
&nbsp; &nbsp; &nbsp; &nbsp; .select(" id,&nbsp; time1 , time1 ")\
&nbsp; &nbsp; &nbsp; &nbsp; .insert_into("sink1")
&nbsp; &nbsp;&nbsp;
&nbsp; &nbsp; st_env.execute("2-from_kafka_to_kafka")


代码如上








------------------&nbsp;原始邮件&nbsp;------------------
发件人:                                                                                                                        "user-zh"                                                                                    <acqua.csq@gmail.com&gt;;
发送时间:&nbsp;2020年7月10日(星期五) 上午9:17
收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;

主题:&nbsp;Re: pyflink1.11.0window



琴师你好,

你的source ddl里有指定time1为 time attribute吗?
create table source1(
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; id int,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; time1 timestamp,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; type string,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; WATERMARK FOR time1 as time1 - INTERVAL '2' SECOND
) with (...)

奇怪的不朽琴师 <1129656513@qq.com&gt; 于2020年7月10日周五 上午8:43写道:

&gt; ------------------&amp;nbsp;原始邮件&amp;nbsp;------------------
&gt; 发件人:
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; "奇怪的不朽琴师"
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; <
&gt; 1129656513@qq.com&amp;gt;;
&gt; 发送时间:&amp;nbsp;2020年7月9日(星期四) 下午5:08
&gt; 收件人:&amp;nbsp;"godfrey he"<godfreyhe@gmail.com&amp;gt;;
&gt;
&gt; 主题:&amp;nbsp;pyflink1.11.0window
&gt;
&gt;
&gt;
&gt; 你好:
&gt; &amp;nbsp; &amp;nbsp;我在使用pyflink1.11版本时,window开窗仍会报错
&gt; : org.apache.flink.table.api.ValidationException: A group window expects a
&gt; time attribute for grouping in a stream environment.
&gt;
&gt; 请问这个问题没有修复么?或者是我使用的方式不对,如果是使用不对,能提供一个正确的案例么?
&gt; 代码如下
&gt; 谢谢
&gt;
&gt;
&gt; def from_kafka_to_kafka_demo():
&gt; &amp;nbsp; &amp;nbsp; s_env =
&gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;nbsp; &amp;nbsp; s_env.set_parallelism(1)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; # use blink table planner
&gt; &amp;nbsp; &amp;nbsp; st_env = StreamTableEnvironment.create(s_env)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; # register source and sink
&gt; &amp;nbsp; &amp;nbsp; register_rides_source(st_env)
&gt; &amp;nbsp; &amp;nbsp; register_rides_sink(st_env)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; st_env.from_path("source1")\
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;
&gt; .window(Tumble.over("1.secends").on("time1").alias("w")) \
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .group_by("w") \
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .select(" id,&amp;nbsp; time1 , time1 ")\
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .insert_into("sink1")
&gt; &amp;nbsp; &amp;nbsp;
&gt; &amp;nbsp; &amp;nbsp; st_env.execute("2-from_kafka_to_kafka")
&gt;
&gt;
&gt;
&gt;
&gt; def register_rides_source(st_env):
&gt; &amp;nbsp; &amp;nbsp; source_ddl = \
&gt; &amp;nbsp; &amp;nbsp; '''
&gt; &amp;nbsp; &amp;nbsp; create table source1(
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; id int,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time1 timestamp,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;type string
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;) with (
&gt; &amp;nbsp; &amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;nbsp; &amp;nbsp; 'update-mode' = 'append',
&gt; &amp;nbsp; &amp;nbsp; 'connector.topic' = 'tp1',
&gt; &amp;nbsp; &amp;nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092'
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;)
&gt; &amp;nbsp; &amp;nbsp; '''
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update(source_ddl)
&gt;
&gt;
&gt;
&gt;
&gt; def register_rides_sink(st_env):
&gt; &amp;nbsp; &amp;nbsp; sink_ddl = \
&gt; &amp;nbsp; &amp;nbsp; '''
&gt; &amp;nbsp; &amp;nbsp; create table sink1(
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; id int,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time1 timestamp,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time2 timestamp
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;) with (
&gt; &amp;nbsp; &amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;nbsp; &amp;nbsp; 'update-mode' = 'append',
&gt; &amp;nbsp; &amp;nbsp; 'connector.topic' = 'tp3',
&gt; &amp;nbsp; &amp;nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092'
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;)
&gt; &amp;nbsp; &amp;nbsp; '''
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update(sink_ddl)
&gt;
&gt;
&gt;
&gt;
&gt; if __name__ == '__main__':
&gt; &amp;nbsp; &amp;nbsp; from_kafka_to_kafka_demo()
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp;

回复: 回复: pyflink1.11.0window

Posted by "chengyanan1008@foxmail.com" <ch...@foxmail.com>.
Hi,
你这个报错信息主要原因是kafkaTableSink是属于AppendStreamTableSink,而你的上一级数据源final_result2是由前面source1表group by得到的final_result与mysql维表join后而来,经过group by之后的StreamTable 已经变为RetractStreamTableSink,所以这里才会报错
请查阅有关AppendStreamTableSink、RetractStreamTableSink的资料。


关于文中出现 “&nbsp; &nbsp; &nbsp;” ,可以换一个邮箱编辑器。

希望能帮助到你



chengyanan1008@foxmail.com
 
发件人: 奇怪的不朽琴师
发送时间: 2020-07-21 10:23
收件人: user-zh
主题: 回复: pyflink1.11.0window
你好:
&nbsp; &nbsp; &nbsp;他这个会自动转译空格回车啥的符号,暂时没什么好办法,很难受,大佬这边有pyflink的多表关联的demo么,万分感谢!
 
 
发送
 
 
------------------&nbsp;原始邮件&nbsp;------------------
发件人:                                                                                                                        "user-zh"                                                                                    <acqua.csq@gmail.com&gt;;
发送时间:&nbsp;2020年7月20日(星期一) 晚上8:42
收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;
 
主题:&nbsp;Re: pyflink1.11.0window
 
 
 
看看异常信息, 是不是你的insert mode没配置对。
BTW, 你粘贴的文本带有很多"&amp;nbsp;", 有点影响可读性。
 
Best,
Shuiqiang
 
奇怪的不朽琴师 <1129656513@qq.com&gt; 于2020年7月20日周一 下午4:23写道:
 
&gt; HI :
&gt; &amp;nbsp; &amp;nbsp; 我现在有一个新的问题,我在此基础上加了一个关联,再写入kafka时报错,如下
&gt; Traceback (most recent call last):
&gt; &amp;nbsp; File
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line
&gt; 147, in deco
&gt; &amp;nbsp; &amp;nbsp; return f(*a, **kw)
&gt; &amp;nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
&gt; line 328, in get_return_value
&gt; &amp;nbsp; &amp;nbsp; format(target_id, ".", name), value)
&gt; py4j.protocol.Py4JJavaError: An error occurred while calling o5.sqlUpdate.
&gt; : org.apache.flink.table.api.TableException: AppendStreamTableSink
&gt; requires that Table has only insert changes.
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.scala:123)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.scala:48)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.plan.nodes.exec.ExecNode$class.translateToPlan(ExecNode.scala:58)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlan(StreamExecSink.scala:48)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$translateToPlan$1.apply(StreamPlanner.scala:60)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$translateToPlan$1.apply(StreamPlanner.scala:59)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.Iterator$class.foreach(Iterator.scala:891)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.AbstractIterable.foreach(Iterable.scala:54)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.AbstractTraversable.map(Traversable.scala:104)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.delegation.StreamPlanner.translateToPlan(StreamPlanner.scala:59)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:153)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:685)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.api.internal.TableEnvironmentImpl.sqlUpdate(TableEnvironmentImpl.java:495)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; java.lang.reflect.Method.invoke(Method.java:498)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at java.lang.Thread.run(Thread.java:748)
&gt;
&gt;
&gt;
&gt;
&gt; During handling of the above exception, another exception occurred:
&gt;
&gt;
&gt; Traceback (most recent call last):
&gt; &amp;nbsp; File "tou.py", line 99, in <module&amp;gt;
&gt; &amp;nbsp; &amp;nbsp; from_kafka_to_kafka_demo()
&gt; &amp;nbsp; File "tou.py", line 33, in from_kafka_to_kafka_demo
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update("insert into flink_result select
&gt; id,type,rowtime from final_result2")
&gt; &amp;nbsp; File
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/table/table_environment.py",
&gt; line 547, in sql_update
&gt; &amp;nbsp; &amp;nbsp; self._j_tenv.sqlUpdate(stmt)
&gt; &amp;nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
&gt; line 1286, in __call__
&gt; &amp;nbsp; &amp;nbsp; answer, self.gateway_client, self.target_id, self.name)
&gt; &amp;nbsp; File
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line
&gt; 154, in deco
&gt; &amp;nbsp; &amp;nbsp; raise exception_mapping[exception](s.split(': ', 1)[1],
&gt; stack_trace)
&gt; pyflink.util.exceptions.TableException: 'AppendStreamTableSink requires
&gt; that Table has only insert changes.'
&gt;
&gt;
&gt;
&gt;
&gt;
&gt; 这种应该如何实现,需求大概是一个流表(需要分组汇总)关联一个维表。
&gt;
&gt;
&gt; from pyflink.datastream import StreamExecutionEnvironment,
&gt; TimeCharacteristic
&gt; from pyflink.table import StreamTableEnvironment, DataTypes,
&gt; EnvironmentSettings,DataTypes, CsvTableSource, CsvTableSink
&gt; from pyflink.table.descriptors import Schema, Kafka, Json, Rowtime
&gt; from pyflink.table.window import Tumble&amp;nbsp;
&gt;
&gt;
&gt;
&gt;
&gt; def from_kafka_to_kafka_demo():
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; # use blink table planner
&gt; &amp;nbsp; &amp;nbsp; env = StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;nbsp; &amp;nbsp;
&gt; env.set_stream_time_characteristic(TimeCharacteristic.EventTime)
&gt; &amp;nbsp; &amp;nbsp; env_settings =
&gt; EnvironmentSettings.Builder().use_blink_planner().build()
&gt; &amp;nbsp; &amp;nbsp; st_env =
&gt; StreamTableEnvironment.create(stream_execution_environment=env,environment_settings=env_settings)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; # register source and sink
&gt; &amp;nbsp; &amp;nbsp; register_rides_source(st_env)
&gt; &amp;nbsp; &amp;nbsp; register_rides_sink(st_env)
&gt; &amp;nbsp; &amp;nbsp; register_mysql_source(st_env)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; query = """
&gt; &amp;nbsp; &amp;nbsp; select&amp;nbsp; cast(sum(t1.id) as int) as id, max(t1.type) as
&gt; type,cast(tumble_start(t1.time1, interval '4' second) as bigint) as rowtime
&gt; &amp;nbsp; &amp;nbsp; from source1 t1&amp;nbsp;
&gt; &amp;nbsp; &amp;nbsp; group by tumble(t1.time1, interval '4' second)
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; count_result = st_env.sql_query(query)
&gt; &amp;nbsp; &amp;nbsp; st_env.create_temporary_view('final_result', count_result)
&gt; &amp;nbsp; &amp;nbsp; query2 = """
&gt; &amp;nbsp; &amp;nbsp; select&amp;nbsp; t1.id,t2.type,t1.rowtime from final_result t1
&gt; left join dim_mysql t2 on t1.type=t2.id
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; count_result2 = st_env.sql_query(query2)
&gt; &amp;nbsp; &amp;nbsp; st_env.create_temporary_view('final_result2', count_result2)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update("insert into flink_result select
&gt; id,type,rowtime from final_result2")
&gt; &amp;nbsp; &amp;nbsp; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;nbsp; &amp;nbsp;&amp;nbsp;
&gt;
&gt;
&gt;
&gt;
&gt; def register_rides_source(st_env):
&gt; &amp;nbsp; &amp;nbsp; source_ddl = \
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; create table source1(
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;id int,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time2 varchar ,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time1 as TO_TIMESTAMP(time2,'yyyyMMddHHmmss'),
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;type string,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;WATERMARK FOR time1 as time1 - INTERVAL '2' SECOND
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;) with (
&gt; &amp;nbsp; &amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;nbsp; &amp;nbsp; 'connector.topic' = 'tp1',
&gt; &amp;nbsp; &amp;nbsp; 'connector.startup-mode' = 'latest-offset',
&gt; &amp;nbsp; &amp;nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
&gt; &amp;nbsp; &amp;nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
&gt; &amp;nbsp; &amp;nbsp; 'format.type' = 'json',
&gt; &amp;nbsp; &amp;nbsp; 'connector.version' = 'universal'
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;)
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update(source_ddl)
&gt;
&gt;
&gt; def register_mysql_source(st_env):
&gt; &amp;nbsp; &amp;nbsp; source_ddl = \
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; CREATE TABLE dim_mysql (
&gt; &amp;nbsp; &amp;nbsp; id varchar,&amp;nbsp; --&amp;nbsp;
&gt; &amp;nbsp; &amp;nbsp; type varchar --&amp;nbsp;
&gt; &amp;nbsp; &amp;nbsp; ) WITH (
&gt; &amp;nbsp; &amp;nbsp; 'connector.type' = 'jdbc',
&gt; &amp;nbsp; &amp;nbsp; 'connector.url' = 'jdbc:mysql://localhost:3390/test',
&gt; &amp;nbsp; &amp;nbsp; 'connector.table' = 'flink_test',
&gt; &amp;nbsp; &amp;nbsp; 'connector.driver' = 'com.mysql.jdbc.Driver',
&gt; &amp;nbsp; &amp;nbsp; 'connector.username' = '****',
&gt; &amp;nbsp; &amp;nbsp; 'connector.password' = '*****',
&gt; &amp;nbsp; &amp;nbsp; 'connector.lookup.cache.max-rows' = '5000',
&gt; &amp;nbsp; &amp;nbsp; 'connector.lookup.cache.ttl' = '10min'
&gt; &amp;nbsp; &amp;nbsp; )
&gt; &amp;nbsp; &amp;nbsp; """&amp;nbsp; &amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update(source_ddl)
&gt;
&gt;
&gt; def register_rides_sink(st_env):
&gt; &amp;nbsp; &amp;nbsp; sink_ddl = \
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; CREATE TABLE flink_result (
&gt; &amp;nbsp; &amp;nbsp; id int,&amp;nbsp; &amp;nbsp;
&gt; &amp;nbsp; &amp;nbsp; type varchar,
&gt; &amp;nbsp; &amp;nbsp; rtime bigint,
&gt; &amp;nbsp; &amp;nbsp; primary key(id)
&gt; &amp;nbsp; &amp;nbsp; ) WITH (
&gt; &amp;nbsp; &amp;nbsp; with (
&gt; &amp;nbsp; &amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;nbsp; &amp;nbsp; 'connector.topic' = 'tp4',
&gt; &amp;nbsp; &amp;nbsp; 'connector.startup-mode' = 'latest-offset',
&gt; &amp;nbsp; &amp;nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
&gt; &amp;nbsp; &amp;nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
&gt; &amp;nbsp; &amp;nbsp; 'format.type' = 'json',
&gt; &amp;nbsp; &amp;nbsp; 'connector.version' = 'universal'
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;)
&gt; &amp;nbsp; &amp;nbsp; )
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update(sink_ddl)
&gt;
&gt;
&gt;
&gt;
&gt; if __name__ == '__main__':
&gt; &amp;nbsp; &amp;nbsp; from_kafka_to_kafka_demo()
&gt;
&gt;
&gt;
&gt;
&gt;
&gt; ------------------&amp;nbsp;原始邮件&amp;nbsp;------------------
&gt; 发件人:
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; "我自己的邮箱"
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; <
&gt; 1129656513@qq.com&amp;gt;;
&gt; 发送时间:&amp;nbsp;2020年7月15日(星期三) 下午5:30
&gt; 收件人:&amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;gt;;
&gt;
&gt; 主题:&amp;nbsp;回复: pyflink1.11.0window
&gt;
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;非常感谢!
&gt;
&gt;
&gt;
&gt;
&gt; ------------------ 原始邮件 ------------------
&gt; 发件人:
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; "user-zh"
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; <
&gt; acqua.csq@gmail.com&amp;gt;;
&gt; 发送时间:&amp;nbsp;2020年7月15日(星期三) 下午5:23
&gt; 收件人:&amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;gt;;
&gt;
&gt; 主题:&amp;nbsp;Re: pyflink1.11.0window
&gt;
&gt;
&gt;
&gt; 下面这个例子从kafka读取json格式的数据, 然后做窗口聚合后写入es, 可以参考下代码结构, 修改相应数据字段。 这份代码你本地应该是不能运行的
&gt;
&gt; from pyflink.datastream import StreamExecutionEnvironment,
&gt; TimeCharacteristic
&gt; from pyflink.table import StreamTableEnvironment, DataTypes,
&gt; EnvironmentSettings
&gt; from pyflink.table.udf import udf
&gt;
&gt;
&gt; @udf(input_types=[DataTypes.INT()], result_type=DataTypes.STRING())
&gt; def platform_code_to_name(code):
&gt; &amp;nbsp; &amp;nbsp; return "mobile" if code == 0 else "pc"
&gt;
&gt;
&gt; def log_processing():
&gt; &amp;nbsp; &amp;nbsp; env = StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;nbsp; &amp;nbsp;
&gt; env.set_stream_time_characteristic(TimeCharacteristic.EventTime)
&gt; &amp;nbsp; &amp;nbsp; env_settings =
&gt; EnvironmentSettings.Builder().use_blink_planner().build()
&gt; &amp;nbsp; &amp;nbsp; t_env =
&gt; StreamTableEnvironment.create(stream_execution_environment=env,
&gt; environment_settings=env_settings)
&gt;
&gt; &amp;nbsp; &amp;nbsp; source_ddl = """
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; CREATE TABLE
&gt; payment_msg(
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; createTime VARCHAR,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; rt as TO_TIMESTAMP(createTime),
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; orderId BIGINT,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; payAmount DOUBLE,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; payPlatform INT,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; paySource INT,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; WATERMARK FOR rt as rt - INTERVAL '2' SECOND
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; ) WITH (
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp;
&gt; 'connector.type' = 'kafka',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp;
&gt; 'connector.version' = 'universal',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp;
&gt; 'connector.topic' = 'payment_msg_2',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp;
&gt; 'connector.properties.bootstrap.servers' = '0.0.0.0:9092',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; '
&gt; connector.properties.group.id' = 'test_3',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp;
&gt; 'connector.startup-mode' = 'latest-offset',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp;
&gt; 'format.type' = 'json'
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; )
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; t_env.sql_update(source_ddl)
&gt;
&gt; &amp;nbsp; &amp;nbsp; es_sink_ddl = """
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; CREATE TABLE
&gt; es_sink (
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; platform
&gt; VARCHAR,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; pay_amount
&gt; DOUBLE,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; rowtime
&gt; TIMESTAMP(3)
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; ) with (
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.type' = 'elasticsearch',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.version' = '7',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.hosts' = 'http://localhost:9200',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.index' = 'platform_pay_amount_1',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.document-type' = 'payment',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'update-mode' = 'upsert',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.flush-on-checkpoint' = 'true',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.key-delimiter' = '$',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.key-null-literal' = 'n/a',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.bulk-flush.max-size' = '42mb',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.bulk-flush.max-actions' = '32',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.bulk-flush.interval' = '1000',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.bulk-flush.backoff.delay' = '1000',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'format.type' = 'json'
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; )
&gt; &amp;nbsp; &amp;nbsp; """
&gt;
&gt; &amp;nbsp; &amp;nbsp; t_env.sql_update(es_sink_ddl)
&gt;
&gt; &amp;nbsp; &amp;nbsp; t_env.register_function('platformcodetoname',
&gt; platform_code_to_name)
&gt;
&gt; &amp;nbsp; &amp;nbsp; query = """
&gt; &amp;nbsp; &amp;nbsp; select platformcodetoname(payPlatform) as platform,
&gt; sum(payAmount)
&gt; as pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT)
&gt; as rowtime
&gt; &amp;nbsp; &amp;nbsp; from payment_msg
&gt; &amp;nbsp; &amp;nbsp; group by tumble(rt, interval '5' seconds), payPlatform
&gt; &amp;nbsp; &amp;nbsp; """
&gt;
&gt; &amp;nbsp; &amp;nbsp; count_result = t_env.sql_query(query)
&gt;
&gt; &amp;nbsp; &amp;nbsp; t_env.create_temporary_view('windowed_values', count_result)
&gt;
&gt; &amp;nbsp; &amp;nbsp; query2 = """
&gt; &amp;nbsp; &amp;nbsp; select platform, last_value(pay_amount), rowtime from
&gt; windowed_values group by platform, rowtime
&gt; &amp;nbsp; &amp;nbsp; """
&gt;
&gt; &amp;nbsp; &amp;nbsp; final_result = t_env.sql_query(query2)
&gt;
&gt; &amp;nbsp; &amp;nbsp; final_result.execute_insert(table_path='es_sink')
&gt;
&gt;
&gt; if __name__ == '__main__':
&gt; &amp;nbsp; &amp;nbsp; log_processing()
&gt;
&gt;
&gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;gt; 于2020年7月15日周三 下午4:40写道:
&gt;
&gt; &amp;gt; &amp;amp;nbsp;Shuiqiang,你好:
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;
&gt; &amp;gt;
&gt; &amp;amp;nbsp;hi,能否请求您贡献一下完整的代码的案例,我是初学者,官网的2-from_kafka_to_kafka.py这个没有窗口,我现在想要一个在此基础上有窗口的demo,尝试编了很久也未能解决。我在给这个demo加上窗口功能后总是有各种各样的问题,十分痛苦,如能帮助,感激不尽。
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; 恳请所有看到此封邮件的大佬!
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; 谢谢!
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; ------------------&amp;amp;nbsp;原始邮件&amp;amp;nbsp;------------------
&gt; &amp;gt; 发件人:
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; "user-zh"
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; <
&gt; &amp;gt; acqua.csq@gmail.com&amp;amp;gt;;
&gt; &amp;gt; 发送时间:&amp;amp;nbsp;2020年7月15日(星期三) 中午11:25
&gt; &amp;gt; 收件人:&amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;amp;gt;;
&gt; &amp;gt;
&gt; &amp;gt; 主题:&amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; 举个sql例子
&gt; &amp;gt; select platformcodetoname(payPlatform) as platform, sum(payAmount) as
&gt; &amp;gt; pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT) as
&gt; &amp;gt; rowtime
&gt; &amp;gt; from payment_msg group by tumble(rt, interval '5' seconds),
&gt; payPlatform
&gt; &amp;gt; 这个query 对每5s的tumble窗口做统计。
&gt; &amp;gt;
&gt; &amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;gt; 于2020年7月15日周三 上午11:10写道:
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;gt; Shuiqiang,你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;nbsp;我的目的是每间隔一段时间做一次汇总统计,比如每两秒做一下汇总,请问这个需求我改如何定义window?
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; ------------------&amp;amp;amp;nbsp;原始邮件&amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; 发件人:
&gt; &amp;gt;
&gt; &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; "user-zh"
&gt; &amp;gt;
&gt; &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; <
&gt; &amp;gt; &amp;amp;gt; acqua.csq@gmail.com&amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; 发送时间:&amp;amp;amp;nbsp;2020年7月15日(星期三) 上午10:51
&gt; &amp;gt; &amp;amp;gt; 收件人:&amp;amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org
&gt; &amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 主题:&amp;amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 琴师你好,
&gt; &amp;gt; &amp;amp;gt; 异常栈信息org.apache.flink.table.api.ValidationException: A
&gt; tumble window
&gt; &amp;gt; &amp;amp;gt; expects a size value literal.
&gt; &amp;gt; &amp;amp;gt; 看起来是接下tumble window定义的代码不太正确吧
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; Best,
&gt; &amp;gt; &amp;amp;gt; Shuiqiang
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;amp;gt; 于2020年7月15日周三
&gt; 上午10:27写道:
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;amp;nbsp;我按着你回复的建议改了source但是会报新的错误,请问这个是因为什么?我想调试一个window一直没有成功,请帮帮我,谢谢。
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; Traceback (most recent call last):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; File "tou.py", line 71, in
&gt; <module&amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; from_kafka_to_kafka_demo()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; File "tou.py", line 21, in
&gt; &amp;gt; from_kafka_to_kafka_demo
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; .select("
&gt; id,&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py", line
&gt; &amp;gt; &amp;amp;gt; 907,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; in select
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; return
&gt; &amp;gt; Table(self._j_table.select(fields),
&gt; &amp;gt; &amp;amp;gt; self._t_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt;
&gt; "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; line 1286, in __call__
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; answer,
&gt; self.gateway_client,
&gt; &amp;gt; self.target_id,
&gt; &amp;gt; &amp;amp;gt; self.name)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py",
&gt; &amp;gt; &amp;amp;gt; line
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 147, in deco
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; return
&gt; f(*a, **kw)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt; "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; line 328, in get_return_value
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; format(target_id, ".", name),
&gt; &amp;gt; value)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; py4j.protocol.Py4JJavaError: An error occurred
&gt; while calling
&gt; &amp;gt; &amp;amp;gt; o26.select.
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; :
&gt; org.apache.flink.table.api.ValidationException: A tumble
&gt; &amp;gt; window
&gt; &amp;gt; &amp;amp;gt; expects
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; a size value literal.
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; java.lang.reflect.Method.invoke(Method.java:498)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; java.lang.Thread.run(Thread.java:748)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; source_ddl = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; """
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; create
&gt; table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;type string,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;WATERMARK FOR
&gt; &amp;gt; time1 as time1 -
&gt; &amp;gt; &amp;amp;gt; INTERVAL '2' SECOND
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; &amp;amp;gt; 'localhost:9092',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.properties.zookeeper.connect' =
&gt; &amp;gt; &amp;amp;gt; 'localhost:2181',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'format.type' = 'json',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'format.derive-schema' =
&gt; &amp;gt; 'true',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.version' =
&gt; &amp;gt; 'universal'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; """
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.sql_update(source_ddl)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; s_env =
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; s_env.set_parallelism(1)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; st_env =
&gt; &amp;gt; StreamTableEnvironment.create(s_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; register_rides_sink(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; .window(Tumble.over("2.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .group_by("w") \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .select(" id,&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 代码如上
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt;
&gt; ------------------&amp;amp;amp;amp;nbsp;原始邮件&amp;amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 发件人:
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;gt;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; "user-zh"
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;gt;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; <
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; acqua.csq@gmail.com&amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 发送时间:&amp;amp;amp;amp;nbsp;2020年7月10日(星期五) 上午9:17
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 收件人:&amp;amp;amp;amp;nbsp;"user-zh"<
&gt; user-zh@flink.apache.org
&gt; &amp;gt; &amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 主题:&amp;amp;amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 琴师你好,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 你的source ddl里有指定time1为 time attribute吗?
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; create table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; id
&gt; &amp;gt; &amp;amp;gt; int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; type
&gt; &amp;gt; &amp;amp;gt; string,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; WATERMARK FOR time1 as time1 -
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; INTERVAL '2' SECOND
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; ) with (...)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;amp;amp;gt;
&gt; 于2020年7月10日周五
&gt; &amp;gt; 上午8:43写道:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; ------------------&amp;amp;amp;amp;amp;nbsp;原始邮件&amp;amp;amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; 发件人:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;amp;gt;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; "奇怪的不朽琴师"
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;amp;gt;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; <
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; 1129656513@qq.com
&gt; &amp;amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; 发送时间:&amp;amp;amp;amp;amp;nbsp;2020年7月9日(星期四) 下午5:08
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; 收件人:&amp;amp;amp;amp;amp;nbsp;"godfrey he"<
&gt; &amp;gt; godfreyhe@gmail.com
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; 主题:&amp;amp;amp;amp;amp;nbsp;pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; 你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;我在使用pyflink1.11版本时,window开窗仍会报错
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; :
&gt; &amp;gt; org.apache.flink.table.api.ValidationException: A group
&gt; &amp;gt; &amp;amp;gt; window
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; expects a
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; time attribute for grouping in
&gt; a stream
&gt; &amp;gt; environment.
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; 请问这个问题没有修复么?或者是我使用的方式不对,如果是使用不对,能提供一个正确的案例么?
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; 代码如下
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; 谢谢
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; def from_kafka_to_kafka_demo():
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; s_env =
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; s_env.set_parallelism(1)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; # use
&gt; &amp;gt; blink table planner
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; st_env =
&gt; &amp;gt; &amp;amp;gt; StreamTableEnvironment.create(s_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; #
&gt; &amp;gt; register source and sink
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; register_rides_sink(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; .window(Tumble.over("1.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; .group_by("w") \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; .select(" id,&amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; def
&gt; register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; source_ddl = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; create
&gt; &amp;gt; table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;type string
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; st_env.sql_update(source_ddl)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; def
&gt; register_rides_sink(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; sink_ddl
&gt; &amp;gt; = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; create
&gt; &amp;gt; table sink1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;time2 timestamp
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.topic' = 'tp3',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; st_env.sql_update(sink_ddl)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; if __name__ == '__main__':
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; from_kafka_to_kafka_demo()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;

回复: pyflink1.11.0window

Posted by 奇怪的不朽琴师 <11...@qq.com>.
你好:
&nbsp; &nbsp; &nbsp;他这个会自动转译空格回车啥的符号,暂时没什么好办法,很难受,大佬这边有pyflink的多表关联的demo么,万分感谢!


发送


------------------&nbsp;原始邮件&nbsp;------------------
发件人:                                                                                                                        "user-zh"                                                                                    <acqua.csq@gmail.com&gt;;
发送时间:&nbsp;2020年7月20日(星期一) 晚上8:42
收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;

主题:&nbsp;Re: pyflink1.11.0window



看看异常信息, 是不是你的insert mode没配置对。
BTW, 你粘贴的文本带有很多"&amp;nbsp;", 有点影响可读性。

Best,
Shuiqiang

奇怪的不朽琴师 <1129656513@qq.com&gt; 于2020年7月20日周一 下午4:23写道:

&gt; HI :
&gt; &amp;nbsp; &amp;nbsp; 我现在有一个新的问题,我在此基础上加了一个关联,再写入kafka时报错,如下
&gt; Traceback (most recent call last):
&gt; &amp;nbsp; File
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line
&gt; 147, in deco
&gt; &amp;nbsp; &amp;nbsp; return f(*a, **kw)
&gt; &amp;nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
&gt; line 328, in get_return_value
&gt; &amp;nbsp; &amp;nbsp; format(target_id, ".", name), value)
&gt; py4j.protocol.Py4JJavaError: An error occurred while calling o5.sqlUpdate.
&gt; : org.apache.flink.table.api.TableException: AppendStreamTableSink
&gt; requires that Table has only insert changes.
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.scala:123)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.scala:48)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.plan.nodes.exec.ExecNode$class.translateToPlan(ExecNode.scala:58)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlan(StreamExecSink.scala:48)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$translateToPlan$1.apply(StreamPlanner.scala:60)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$translateToPlan$1.apply(StreamPlanner.scala:59)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.Iterator$class.foreach(Iterator.scala:891)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.AbstractIterable.foreach(Iterable.scala:54)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; scala.collection.AbstractTraversable.map(Traversable.scala:104)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.delegation.StreamPlanner.translateToPlan(StreamPlanner.scala:59)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:153)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:685)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.api.internal.TableEnvironmentImpl.sqlUpdate(TableEnvironmentImpl.java:495)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; java.lang.reflect.Method.invoke(Method.java:498)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at java.lang.Thread.run(Thread.java:748)
&gt;
&gt;
&gt;
&gt;
&gt; During handling of the above exception, another exception occurred:
&gt;
&gt;
&gt; Traceback (most recent call last):
&gt; &amp;nbsp; File "tou.py", line 99, in <module&amp;gt;
&gt; &amp;nbsp; &amp;nbsp; from_kafka_to_kafka_demo()
&gt; &amp;nbsp; File "tou.py", line 33, in from_kafka_to_kafka_demo
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update("insert into flink_result select
&gt; id,type,rowtime from final_result2")
&gt; &amp;nbsp; File
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/table/table_environment.py",
&gt; line 547, in sql_update
&gt; &amp;nbsp; &amp;nbsp; self._j_tenv.sqlUpdate(stmt)
&gt; &amp;nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
&gt; line 1286, in __call__
&gt; &amp;nbsp; &amp;nbsp; answer, self.gateway_client, self.target_id, self.name)
&gt; &amp;nbsp; File
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line
&gt; 154, in deco
&gt; &amp;nbsp; &amp;nbsp; raise exception_mapping[exception](s.split(': ', 1)[1],
&gt; stack_trace)
&gt; pyflink.util.exceptions.TableException: 'AppendStreamTableSink requires
&gt; that Table has only insert changes.'
&gt;
&gt;
&gt;
&gt;
&gt;
&gt; 这种应该如何实现,需求大概是一个流表(需要分组汇总)关联一个维表。
&gt;
&gt;
&gt; from pyflink.datastream import StreamExecutionEnvironment,
&gt; TimeCharacteristic
&gt; from pyflink.table import StreamTableEnvironment, DataTypes,
&gt; EnvironmentSettings,DataTypes, CsvTableSource, CsvTableSink
&gt; from pyflink.table.descriptors import Schema, Kafka, Json, Rowtime
&gt; from pyflink.table.window import Tumble&amp;nbsp;
&gt;
&gt;
&gt;
&gt;
&gt; def from_kafka_to_kafka_demo():
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; # use blink table planner
&gt; &amp;nbsp; &amp;nbsp; env = StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;nbsp; &amp;nbsp;
&gt; env.set_stream_time_characteristic(TimeCharacteristic.EventTime)
&gt; &amp;nbsp; &amp;nbsp; env_settings =
&gt; EnvironmentSettings.Builder().use_blink_planner().build()
&gt; &amp;nbsp; &amp;nbsp; st_env =
&gt; StreamTableEnvironment.create(stream_execution_environment=env,environment_settings=env_settings)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; # register source and sink
&gt; &amp;nbsp; &amp;nbsp; register_rides_source(st_env)
&gt; &amp;nbsp; &amp;nbsp; register_rides_sink(st_env)
&gt; &amp;nbsp; &amp;nbsp; register_mysql_source(st_env)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; query = """
&gt; &amp;nbsp; &amp;nbsp; select&amp;nbsp; cast(sum(t1.id) as int) as id, max(t1.type) as
&gt; type,cast(tumble_start(t1.time1, interval '4' second) as bigint) as rowtime
&gt; &amp;nbsp; &amp;nbsp; from source1 t1&amp;nbsp;
&gt; &amp;nbsp; &amp;nbsp; group by tumble(t1.time1, interval '4' second)
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; count_result = st_env.sql_query(query)
&gt; &amp;nbsp; &amp;nbsp; st_env.create_temporary_view('final_result', count_result)
&gt; &amp;nbsp; &amp;nbsp; query2 = """
&gt; &amp;nbsp; &amp;nbsp; select&amp;nbsp; t1.id,t2.type,t1.rowtime from final_result t1
&gt; left join dim_mysql t2 on t1.type=t2.id
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; count_result2 = st_env.sql_query(query2)
&gt; &amp;nbsp; &amp;nbsp; st_env.create_temporary_view('final_result2', count_result2)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update("insert into flink_result select
&gt; id,type,rowtime from final_result2")
&gt; &amp;nbsp; &amp;nbsp; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;nbsp; &amp;nbsp;&amp;nbsp;
&gt;
&gt;
&gt;
&gt;
&gt; def register_rides_source(st_env):
&gt; &amp;nbsp; &amp;nbsp; source_ddl = \
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; create table source1(
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;id int,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time2 varchar ,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time1 as TO_TIMESTAMP(time2,'yyyyMMddHHmmss'),
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;type string,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;WATERMARK FOR time1 as time1 - INTERVAL '2' SECOND
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;) with (
&gt; &amp;nbsp; &amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;nbsp; &amp;nbsp; 'connector.topic' = 'tp1',
&gt; &amp;nbsp; &amp;nbsp; 'connector.startup-mode' = 'latest-offset',
&gt; &amp;nbsp; &amp;nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
&gt; &amp;nbsp; &amp;nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
&gt; &amp;nbsp; &amp;nbsp; 'format.type' = 'json',
&gt; &amp;nbsp; &amp;nbsp; 'connector.version' = 'universal'
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;)
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update(source_ddl)
&gt;
&gt;
&gt; def register_mysql_source(st_env):
&gt; &amp;nbsp; &amp;nbsp; source_ddl = \
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; CREATE TABLE dim_mysql (
&gt; &amp;nbsp; &amp;nbsp; id varchar,&amp;nbsp; --&amp;nbsp;
&gt; &amp;nbsp; &amp;nbsp; type varchar --&amp;nbsp;
&gt; &amp;nbsp; &amp;nbsp; ) WITH (
&gt; &amp;nbsp; &amp;nbsp; 'connector.type' = 'jdbc',
&gt; &amp;nbsp; &amp;nbsp; 'connector.url' = 'jdbc:mysql://localhost:3390/test',
&gt; &amp;nbsp; &amp;nbsp; 'connector.table' = 'flink_test',
&gt; &amp;nbsp; &amp;nbsp; 'connector.driver' = 'com.mysql.jdbc.Driver',
&gt; &amp;nbsp; &amp;nbsp; 'connector.username' = '****',
&gt; &amp;nbsp; &amp;nbsp; 'connector.password' = '*****',
&gt; &amp;nbsp; &amp;nbsp; 'connector.lookup.cache.max-rows' = '5000',
&gt; &amp;nbsp; &amp;nbsp; 'connector.lookup.cache.ttl' = '10min'
&gt; &amp;nbsp; &amp;nbsp; )
&gt; &amp;nbsp; &amp;nbsp; """&amp;nbsp; &amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update(source_ddl)
&gt;
&gt;
&gt; def register_rides_sink(st_env):
&gt; &amp;nbsp; &amp;nbsp; sink_ddl = \
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; CREATE TABLE flink_result (
&gt; &amp;nbsp; &amp;nbsp; id int,&amp;nbsp; &amp;nbsp;
&gt; &amp;nbsp; &amp;nbsp; type varchar,
&gt; &amp;nbsp; &amp;nbsp; rtime bigint,
&gt; &amp;nbsp; &amp;nbsp; primary key(id)
&gt; &amp;nbsp; &amp;nbsp; ) WITH (
&gt; &amp;nbsp; &amp;nbsp; with (
&gt; &amp;nbsp; &amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;nbsp; &amp;nbsp; 'connector.topic' = 'tp4',
&gt; &amp;nbsp; &amp;nbsp; 'connector.startup-mode' = 'latest-offset',
&gt; &amp;nbsp; &amp;nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
&gt; &amp;nbsp; &amp;nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
&gt; &amp;nbsp; &amp;nbsp; 'format.type' = 'json',
&gt; &amp;nbsp; &amp;nbsp; 'connector.version' = 'universal'
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;)
&gt; &amp;nbsp; &amp;nbsp; )
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update(sink_ddl)
&gt;
&gt;
&gt;
&gt;
&gt; if __name__ == '__main__':
&gt; &amp;nbsp; &amp;nbsp; from_kafka_to_kafka_demo()
&gt;
&gt;
&gt;
&gt;
&gt;
&gt; ------------------&amp;nbsp;原始邮件&amp;nbsp;------------------
&gt; 发件人:
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; "我自己的邮箱"
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; <
&gt; 1129656513@qq.com&amp;gt;;
&gt; 发送时间:&amp;nbsp;2020年7月15日(星期三) 下午5:30
&gt; 收件人:&amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;gt;;
&gt;
&gt; 主题:&amp;nbsp;回复: pyflink1.11.0window
&gt;
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;非常感谢!
&gt;
&gt;
&gt;
&gt;
&gt; ------------------ 原始邮件 ------------------
&gt; 发件人:
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; "user-zh"
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; <
&gt; acqua.csq@gmail.com&amp;gt;;
&gt; 发送时间:&amp;nbsp;2020年7月15日(星期三) 下午5:23
&gt; 收件人:&amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;gt;;
&gt;
&gt; 主题:&amp;nbsp;Re: pyflink1.11.0window
&gt;
&gt;
&gt;
&gt; 下面这个例子从kafka读取json格式的数据, 然后做窗口聚合后写入es, 可以参考下代码结构, 修改相应数据字段。 这份代码你本地应该是不能运行的
&gt;
&gt; from pyflink.datastream import StreamExecutionEnvironment,
&gt; TimeCharacteristic
&gt; from pyflink.table import StreamTableEnvironment, DataTypes,
&gt; EnvironmentSettings
&gt; from pyflink.table.udf import udf
&gt;
&gt;
&gt; @udf(input_types=[DataTypes.INT()], result_type=DataTypes.STRING())
&gt; def platform_code_to_name(code):
&gt; &amp;nbsp; &amp;nbsp; return "mobile" if code == 0 else "pc"
&gt;
&gt;
&gt; def log_processing():
&gt; &amp;nbsp; &amp;nbsp; env = StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;nbsp; &amp;nbsp;
&gt; env.set_stream_time_characteristic(TimeCharacteristic.EventTime)
&gt; &amp;nbsp; &amp;nbsp; env_settings =
&gt; EnvironmentSettings.Builder().use_blink_planner().build()
&gt; &amp;nbsp; &amp;nbsp; t_env =
&gt; StreamTableEnvironment.create(stream_execution_environment=env,
&gt; environment_settings=env_settings)
&gt;
&gt; &amp;nbsp; &amp;nbsp; source_ddl = """
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; CREATE TABLE
&gt; payment_msg(
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; createTime VARCHAR,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; rt as TO_TIMESTAMP(createTime),
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; orderId BIGINT,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; payAmount DOUBLE,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; payPlatform INT,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; paySource INT,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; WATERMARK FOR rt as rt - INTERVAL '2' SECOND
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; ) WITH (
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp;
&gt; 'connector.type' = 'kafka',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp;
&gt; 'connector.version' = 'universal',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp;
&gt; 'connector.topic' = 'payment_msg_2',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp;
&gt; 'connector.properties.bootstrap.servers' = '0.0.0.0:9092',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; '
&gt; connector.properties.group.id' = 'test_3',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp;
&gt; 'connector.startup-mode' = 'latest-offset',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp;
&gt; 'format.type' = 'json'
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; )
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; t_env.sql_update(source_ddl)
&gt;
&gt; &amp;nbsp; &amp;nbsp; es_sink_ddl = """
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; CREATE TABLE
&gt; es_sink (
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; platform
&gt; VARCHAR,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; pay_amount
&gt; DOUBLE,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; rowtime
&gt; TIMESTAMP(3)
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; ) with (
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.type' = 'elasticsearch',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.version' = '7',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.hosts' = 'http://localhost:9200',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.index' = 'platform_pay_amount_1',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.document-type' = 'payment',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'update-mode' = 'upsert',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.flush-on-checkpoint' = 'true',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.key-delimiter' = '$',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.key-null-literal' = 'n/a',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.bulk-flush.max-size' = '42mb',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.bulk-flush.max-actions' = '32',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.bulk-flush.interval' = '1000',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'connector.bulk-flush.backoff.delay' = '1000',
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; 'format.type' = 'json'
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &amp;nbsp; )
&gt; &amp;nbsp; &amp;nbsp; """
&gt;
&gt; &amp;nbsp; &amp;nbsp; t_env.sql_update(es_sink_ddl)
&gt;
&gt; &amp;nbsp; &amp;nbsp; t_env.register_function('platformcodetoname',
&gt; platform_code_to_name)
&gt;
&gt; &amp;nbsp; &amp;nbsp; query = """
&gt; &amp;nbsp; &amp;nbsp; select platformcodetoname(payPlatform) as platform,
&gt; sum(payAmount)
&gt; as pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT)
&gt; as rowtime
&gt; &amp;nbsp; &amp;nbsp; from payment_msg
&gt; &amp;nbsp; &amp;nbsp; group by tumble(rt, interval '5' seconds), payPlatform
&gt; &amp;nbsp; &amp;nbsp; """
&gt;
&gt; &amp;nbsp; &amp;nbsp; count_result = t_env.sql_query(query)
&gt;
&gt; &amp;nbsp; &amp;nbsp; t_env.create_temporary_view('windowed_values', count_result)
&gt;
&gt; &amp;nbsp; &amp;nbsp; query2 = """
&gt; &amp;nbsp; &amp;nbsp; select platform, last_value(pay_amount), rowtime from
&gt; windowed_values group by platform, rowtime
&gt; &amp;nbsp; &amp;nbsp; """
&gt;
&gt; &amp;nbsp; &amp;nbsp; final_result = t_env.sql_query(query2)
&gt;
&gt; &amp;nbsp; &amp;nbsp; final_result.execute_insert(table_path='es_sink')
&gt;
&gt;
&gt; if __name__ == '__main__':
&gt; &amp;nbsp; &amp;nbsp; log_processing()
&gt;
&gt;
&gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;gt; 于2020年7月15日周三 下午4:40写道:
&gt;
&gt; &amp;gt; &amp;amp;nbsp;Shuiqiang,你好:
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;
&gt; &amp;gt;
&gt; &amp;amp;nbsp;hi,能否请求您贡献一下完整的代码的案例,我是初学者,官网的2-from_kafka_to_kafka.py这个没有窗口,我现在想要一个在此基础上有窗口的demo,尝试编了很久也未能解决。我在给这个demo加上窗口功能后总是有各种各样的问题,十分痛苦,如能帮助,感激不尽。
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; 恳请所有看到此封邮件的大佬!
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; 谢谢!
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; ------------------&amp;amp;nbsp;原始邮件&amp;amp;nbsp;------------------
&gt; &amp;gt; 发件人:
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; "user-zh"
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; <
&gt; &amp;gt; acqua.csq@gmail.com&amp;amp;gt;;
&gt; &amp;gt; 发送时间:&amp;amp;nbsp;2020年7月15日(星期三) 中午11:25
&gt; &amp;gt; 收件人:&amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;amp;gt;;
&gt; &amp;gt;
&gt; &amp;gt; 主题:&amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; 举个sql例子
&gt; &amp;gt; select platformcodetoname(payPlatform) as platform, sum(payAmount) as
&gt; &amp;gt; pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT) as
&gt; &amp;gt; rowtime
&gt; &amp;gt; from payment_msg group by tumble(rt, interval '5' seconds),
&gt; payPlatform
&gt; &amp;gt; 这个query 对每5s的tumble窗口做统计。
&gt; &amp;gt;
&gt; &amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;gt; 于2020年7月15日周三 上午11:10写道:
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;gt; Shuiqiang,你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;nbsp;我的目的是每间隔一段时间做一次汇总统计,比如每两秒做一下汇总,请问这个需求我改如何定义window?
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; ------------------&amp;amp;amp;nbsp;原始邮件&amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; 发件人:
&gt; &amp;gt;
&gt; &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; "user-zh"
&gt; &amp;gt;
&gt; &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; <
&gt; &amp;gt; &amp;amp;gt; acqua.csq@gmail.com&amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; 发送时间:&amp;amp;amp;nbsp;2020年7月15日(星期三) 上午10:51
&gt; &amp;gt; &amp;amp;gt; 收件人:&amp;amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org
&gt; &amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 主题:&amp;amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 琴师你好,
&gt; &amp;gt; &amp;amp;gt; 异常栈信息org.apache.flink.table.api.ValidationException: A
&gt; tumble window
&gt; &amp;gt; &amp;amp;gt; expects a size value literal.
&gt; &amp;gt; &amp;amp;gt; 看起来是接下tumble window定义的代码不太正确吧
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; Best,
&gt; &amp;gt; &amp;amp;gt; Shuiqiang
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;amp;gt; 于2020年7月15日周三
&gt; 上午10:27写道:
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;amp;nbsp;我按着你回复的建议改了source但是会报新的错误,请问这个是因为什么?我想调试一个window一直没有成功,请帮帮我,谢谢。
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; Traceback (most recent call last):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; File "tou.py", line 71, in
&gt; <module&amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; from_kafka_to_kafka_demo()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; File "tou.py", line 21, in
&gt; &amp;gt; from_kafka_to_kafka_demo
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; .select("
&gt; id,&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py", line
&gt; &amp;gt; &amp;amp;gt; 907,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; in select
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; return
&gt; &amp;gt; Table(self._j_table.select(fields),
&gt; &amp;gt; &amp;amp;gt; self._t_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt;
&gt; "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; line 1286, in __call__
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; answer,
&gt; self.gateway_client,
&gt; &amp;gt; self.target_id,
&gt; &amp;gt; &amp;amp;gt; self.name)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py",
&gt; &amp;gt; &amp;amp;gt; line
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 147, in deco
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; return
&gt; f(*a, **kw)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt; "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; line 328, in get_return_value
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; format(target_id, ".", name),
&gt; &amp;gt; value)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; py4j.protocol.Py4JJavaError: An error occurred
&gt; while calling
&gt; &amp;gt; &amp;amp;gt; o26.select.
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; :
&gt; org.apache.flink.table.api.ValidationException: A tumble
&gt; &amp;gt; window
&gt; &amp;gt; &amp;amp;gt; expects
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; a size value literal.
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; java.lang.reflect.Method.invoke(Method.java:498)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; at
&gt; &amp;gt; &amp;amp;gt; java.lang.Thread.run(Thread.java:748)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; source_ddl = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; """
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; create
&gt; table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;type string,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;WATERMARK FOR
&gt; &amp;gt; time1 as time1 -
&gt; &amp;gt; &amp;amp;gt; INTERVAL '2' SECOND
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; &amp;amp;gt; 'localhost:9092',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.properties.zookeeper.connect' =
&gt; &amp;gt; &amp;amp;gt; 'localhost:2181',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'format.type' = 'json',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'format.derive-schema' =
&gt; &amp;gt; 'true',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.version' =
&gt; &amp;gt; 'universal'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; """
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.sql_update(source_ddl)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; s_env =
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; s_env.set_parallelism(1)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; st_env =
&gt; &amp;gt; StreamTableEnvironment.create(s_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; register_rides_sink(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; .window(Tumble.over("2.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .group_by("w") \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .select(" id,&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 代码如上
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt;
&gt; ------------------&amp;amp;amp;amp;nbsp;原始邮件&amp;amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 发件人:
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;gt;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; "user-zh"
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;gt;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; <
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; acqua.csq@gmail.com&amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 发送时间:&amp;amp;amp;amp;nbsp;2020年7月10日(星期五) 上午9:17
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 收件人:&amp;amp;amp;amp;nbsp;"user-zh"<
&gt; user-zh@flink.apache.org
&gt; &amp;gt; &amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 主题:&amp;amp;amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 琴师你好,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 你的source ddl里有指定time1为 time attribute吗?
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; create table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; id
&gt; &amp;gt; &amp;amp;gt; int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; type
&gt; &amp;gt; &amp;amp;gt; string,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; WATERMARK FOR time1 as time1 -
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; INTERVAL '2' SECOND
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; ) with (...)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;amp;amp;gt;
&gt; 于2020年7月10日周五
&gt; &amp;gt; 上午8:43写道:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; ------------------&amp;amp;amp;amp;amp;nbsp;原始邮件&amp;amp;amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; 发件人:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;amp;gt;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; "奇怪的不朽琴师"
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;amp;gt;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; <
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; 1129656513@qq.com
&gt; &amp;amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; 发送时间:&amp;amp;amp;amp;amp;nbsp;2020年7月9日(星期四) 下午5:08
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; 收件人:&amp;amp;amp;amp;amp;nbsp;"godfrey he"<
&gt; &amp;gt; godfreyhe@gmail.com
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; 主题:&amp;amp;amp;amp;amp;nbsp;pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; 你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;我在使用pyflink1.11版本时,window开窗仍会报错
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; :
&gt; &amp;gt; org.apache.flink.table.api.ValidationException: A group
&gt; &amp;gt; &amp;amp;gt; window
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; expects a
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; time attribute for grouping in
&gt; a stream
&gt; &amp;gt; environment.
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; 请问这个问题没有修复么?或者是我使用的方式不对,如果是使用不对,能提供一个正确的案例么?
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; 代码如下
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; 谢谢
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; def from_kafka_to_kafka_demo():
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; s_env =
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; s_env.set_parallelism(1)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; # use
&gt; &amp;gt; blink table planner
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; st_env =
&gt; &amp;gt; &amp;amp;gt; StreamTableEnvironment.create(s_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; #
&gt; &amp;gt; register source and sink
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; register_rides_sink(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; .window(Tumble.over("1.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; .group_by("w") \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; .select(" id,&amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; def
&gt; register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; source_ddl = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; create
&gt; &amp;gt; table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;type string
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; st_env.sql_update(source_ddl)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; def
&gt; register_rides_sink(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; sink_ddl
&gt; &amp;gt; = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; create
&gt; &amp;gt; table sink1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;time2 timestamp
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.topic' = 'tp3',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; st_env.sql_update(sink_ddl)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; if __name__ == '__main__':
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;gt; from_kafka_to_kafka_demo()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;gt; &amp;amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;amp;nbsp;

Re: pyflink1.11.0window

Posted by Shuiqiang Chen <ac...@gmail.com>.
看看异常信息, 是不是你的insert mode没配置对。
BTW, 你粘贴的文本带有很多"&nbsp;", 有点影响可读性。

Best,
Shuiqiang

奇怪的不朽琴师 <11...@qq.com> 于2020年7月20日周一 下午4:23写道:

> HI :
> &nbsp; &nbsp; 我现在有一个新的问题,我在此基础上加了一个关联,再写入kafka时报错,如下
> Traceback (most recent call last):
> &nbsp; File
> "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line
> 147, in deco
> &nbsp; &nbsp; return f(*a, **kw)
> &nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
> line 328, in get_return_value
> &nbsp; &nbsp; format(target_id, ".", name), value)
> py4j.protocol.Py4JJavaError: An error occurred while calling o5.sqlUpdate.
> : org.apache.flink.table.api.TableException: AppendStreamTableSink
> requires that Table has only insert changes.
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.scala:123)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.scala:48)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.planner.plan.nodes.exec.ExecNode$class.translateToPlan(ExecNode.scala:58)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlan(StreamExecSink.scala:48)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$translateToPlan$1.apply(StreamPlanner.scala:60)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$translateToPlan$1.apply(StreamPlanner.scala:59)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> scala.collection.Iterator$class.foreach(Iterator.scala:891)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> scala.collection.AbstractIterable.foreach(Iterable.scala:54)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> scala.collection.AbstractTraversable.map(Traversable.scala:104)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.planner.delegation.StreamPlanner.translateToPlan(StreamPlanner.scala:59)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:153)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:685)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.api.internal.TableEnvironmentImpl.sqlUpdate(TableEnvironmentImpl.java:495)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> java.lang.reflect.Method.invoke(Method.java:498)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
> &nbsp; &nbsp; &nbsp; &nbsp; at java.lang.Thread.run(Thread.java:748)
>
>
>
>
> During handling of the above exception, another exception occurred:
>
>
> Traceback (most recent call last):
> &nbsp; File "tou.py", line 99, in <module&gt;
> &nbsp; &nbsp; from_kafka_to_kafka_demo()
> &nbsp; File "tou.py", line 33, in from_kafka_to_kafka_demo
> &nbsp; &nbsp; st_env.sql_update("insert into flink_result select
> id,type,rowtime from final_result2")
> &nbsp; File
> "/usr/local/lib/python3.7/site-packages/pyflink/table/table_environment.py",
> line 547, in sql_update
> &nbsp; &nbsp; self._j_tenv.sqlUpdate(stmt)
> &nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
> line 1286, in __call__
> &nbsp; &nbsp; answer, self.gateway_client, self.target_id, self.name)
> &nbsp; File
> "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line
> 154, in deco
> &nbsp; &nbsp; raise exception_mapping[exception](s.split(': ', 1)[1],
> stack_trace)
> pyflink.util.exceptions.TableException: 'AppendStreamTableSink requires
> that Table has only insert changes.'
>
>
>
>
>
> 这种应该如何实现,需求大概是一个流表(需要分组汇总)关联一个维表。
>
>
> from pyflink.datastream import StreamExecutionEnvironment,
> TimeCharacteristic
> from pyflink.table import StreamTableEnvironment, DataTypes,
> EnvironmentSettings,DataTypes, CsvTableSource, CsvTableSink
> from pyflink.table.descriptors import Schema, Kafka, Json, Rowtime
> from pyflink.table.window import Tumble&nbsp;
>
>
>
>
> def from_kafka_to_kafka_demo():
>
>
> &nbsp; &nbsp; # use blink table planner
> &nbsp; &nbsp; env = StreamExecutionEnvironment.get_execution_environment()
> &nbsp; &nbsp;
> env.set_stream_time_characteristic(TimeCharacteristic.EventTime)
> &nbsp; &nbsp; env_settings =
> EnvironmentSettings.Builder().use_blink_planner().build()
> &nbsp; &nbsp; st_env =
> StreamTableEnvironment.create(stream_execution_environment=env,environment_settings=env_settings)
>
>
> &nbsp; &nbsp; # register source and sink
> &nbsp; &nbsp; register_rides_source(st_env)
> &nbsp; &nbsp; register_rides_sink(st_env)
> &nbsp; &nbsp; register_mysql_source(st_env)
>
>
> &nbsp; &nbsp; query = """
> &nbsp; &nbsp; select&nbsp; cast(sum(t1.id) as int) as id, max(t1.type) as
> type,cast(tumble_start(t1.time1, interval '4' second) as bigint) as rowtime
> &nbsp; &nbsp; from source1 t1&nbsp;
> &nbsp; &nbsp; group by tumble(t1.time1, interval '4' second)
> &nbsp; &nbsp; """
> &nbsp; &nbsp; count_result = st_env.sql_query(query)
> &nbsp; &nbsp; st_env.create_temporary_view('final_result', count_result)
> &nbsp; &nbsp; query2 = """
> &nbsp; &nbsp; select&nbsp; t1.id,t2.type,t1.rowtime from final_result t1
> left join dim_mysql t2 on t1.type=t2.id
> &nbsp; &nbsp; """
> &nbsp; &nbsp; count_result2 = st_env.sql_query(query2)
> &nbsp; &nbsp; st_env.create_temporary_view('final_result2', count_result2)
>
>
> &nbsp; &nbsp; st_env.sql_update("insert into flink_result select
> id,type,rowtime from final_result2")
> &nbsp; &nbsp; st_env.execute("2-from_kafka_to_kafka")
> &nbsp; &nbsp;&nbsp;
>
>
>
>
> def register_rides_source(st_env):
> &nbsp; &nbsp; source_ddl = \
> &nbsp; &nbsp; """
> &nbsp; &nbsp; create table source1(
> &nbsp; &nbsp; &nbsp;id int,
> &nbsp; &nbsp; &nbsp;time2 varchar ,
> &nbsp; &nbsp; &nbsp;time1 as TO_TIMESTAMP(time2,'yyyyMMddHHmmss'),
> &nbsp; &nbsp; &nbsp;type string,
> &nbsp; &nbsp; &nbsp;WATERMARK FOR time1 as time1 - INTERVAL '2' SECOND
> &nbsp; &nbsp; &nbsp;) with (
> &nbsp; &nbsp; 'connector.type' = 'kafka',
> &nbsp; &nbsp; 'connector.topic' = 'tp1',
> &nbsp; &nbsp; 'connector.startup-mode' = 'latest-offset',
> &nbsp; &nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
> &nbsp; &nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
> &nbsp; &nbsp; 'format.type' = 'json',
> &nbsp; &nbsp; 'connector.version' = 'universal'
> &nbsp; &nbsp; &nbsp;)
> &nbsp; &nbsp; """
> &nbsp; &nbsp; st_env.sql_update(source_ddl)
>
>
> def register_mysql_source(st_env):
> &nbsp; &nbsp; source_ddl = \
> &nbsp; &nbsp; """
> &nbsp; &nbsp; CREATE TABLE dim_mysql (
> &nbsp; &nbsp; id varchar,&nbsp; --&nbsp;
> &nbsp; &nbsp; type varchar --&nbsp;
> &nbsp; &nbsp; ) WITH (
> &nbsp; &nbsp; 'connector.type' = 'jdbc',
> &nbsp; &nbsp; 'connector.url' = 'jdbc:mysql://localhost:3390/test',
> &nbsp; &nbsp; 'connector.table' = 'flink_test',
> &nbsp; &nbsp; 'connector.driver' = 'com.mysql.jdbc.Driver',
> &nbsp; &nbsp; 'connector.username' = '****',
> &nbsp; &nbsp; 'connector.password' = '*****',
> &nbsp; &nbsp; 'connector.lookup.cache.max-rows' = '5000',
> &nbsp; &nbsp; 'connector.lookup.cache.ttl' = '10min'
> &nbsp; &nbsp; )
> &nbsp; &nbsp; """&nbsp; &nbsp;&nbsp;
> &nbsp; &nbsp; st_env.sql_update(source_ddl)
>
>
> def register_rides_sink(st_env):
> &nbsp; &nbsp; sink_ddl = \
> &nbsp; &nbsp; """
> &nbsp; &nbsp; CREATE TABLE flink_result (
> &nbsp; &nbsp; id int,&nbsp; &nbsp;
> &nbsp; &nbsp; type varchar,
> &nbsp; &nbsp; rtime bigint,
> &nbsp; &nbsp; primary key(id)
> &nbsp; &nbsp; ) WITH (
> &nbsp; &nbsp; with (
> &nbsp; &nbsp; 'connector.type' = 'kafka',
> &nbsp; &nbsp; 'connector.topic' = 'tp4',
> &nbsp; &nbsp; 'connector.startup-mode' = 'latest-offset',
> &nbsp; &nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
> &nbsp; &nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
> &nbsp; &nbsp; 'format.type' = 'json',
> &nbsp; &nbsp; 'connector.version' = 'universal'
> &nbsp; &nbsp; &nbsp;)
> &nbsp; &nbsp; )
> &nbsp; &nbsp; """
> &nbsp; &nbsp; st_env.sql_update(sink_ddl)
>
>
>
>
> if __name__ == '__main__':
> &nbsp; &nbsp; from_kafka_to_kafka_demo()
>
>
>
>
>
> ------------------&nbsp;原始邮件&nbsp;------------------
> 发件人:
>                                                   "我自己的邮箱"
>                                                                   <
> 1129656513@qq.com&gt;;
> 发送时间:&nbsp;2020年7月15日(星期三) 下午5:30
> 收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;
>
> 主题:&nbsp;回复: pyflink1.11.0window
>
>
>
> &nbsp; &nbsp; &nbsp;非常感谢!
>
>
>
>
> ------------------ 原始邮件 ------------------
> 发件人:
>                                                   "user-zh"
>                                                                     <
> acqua.csq@gmail.com&gt;;
> 发送时间:&nbsp;2020年7月15日(星期三) 下午5:23
> 收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;
>
> 主题:&nbsp;Re: pyflink1.11.0window
>
>
>
> 下面这个例子从kafka读取json格式的数据, 然后做窗口聚合后写入es, 可以参考下代码结构, 修改相应数据字段。 这份代码你本地应该是不能运行的
>
> from pyflink.datastream import StreamExecutionEnvironment,
> TimeCharacteristic
> from pyflink.table import StreamTableEnvironment, DataTypes,
> EnvironmentSettings
> from pyflink.table.udf import udf
>
>
> @udf(input_types=[DataTypes.INT()], result_type=DataTypes.STRING())
> def platform_code_to_name(code):
> &nbsp; &nbsp; return "mobile" if code == 0 else "pc"
>
>
> def log_processing():
> &nbsp; &nbsp; env = StreamExecutionEnvironment.get_execution_environment()
> &nbsp; &nbsp;
> env.set_stream_time_characteristic(TimeCharacteristic.EventTime)
> &nbsp; &nbsp; env_settings =
> EnvironmentSettings.Builder().use_blink_planner().build()
> &nbsp; &nbsp; t_env =
> StreamTableEnvironment.create(stream_execution_environment=env,
> environment_settings=env_settings)
>
> &nbsp; &nbsp; source_ddl = """
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; CREATE TABLE
> payment_msg(
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; createTime VARCHAR,
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; rt as TO_TIMESTAMP(createTime),
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; orderId BIGINT,
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; payAmount DOUBLE,
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; payPlatform INT,
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; paySource INT,
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; WATERMARK FOR rt as rt - INTERVAL '2' SECOND
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; ) WITH (
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp;
> 'connector.type' = 'kafka',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp;
> 'connector.version' = 'universal',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp;
> 'connector.topic' = 'payment_msg_2',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp;
> 'connector.properties.bootstrap.servers' = '0.0.0.0:9092',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; '
> connector.properties.group.id' = 'test_3',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp;
> 'connector.startup-mode' = 'latest-offset',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp;
> 'format.type' = 'json'
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; )
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; """
> &nbsp; &nbsp; t_env.sql_update(source_ddl)
>
> &nbsp; &nbsp; es_sink_ddl = """
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; CREATE TABLE
> es_sink (
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; platform
> VARCHAR,
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; pay_amount
> DOUBLE,
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; rowtime
> TIMESTAMP(3)
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; ) with (
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; 'connector.type' = 'elasticsearch',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; 'connector.version' = '7',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; 'connector.hosts' = 'http://localhost:9200',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; 'connector.index' = 'platform_pay_amount_1',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; 'connector.document-type' = 'payment',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; 'update-mode' = 'upsert',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; 'connector.flush-on-checkpoint' = 'true',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; 'connector.key-delimiter' = '$',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; 'connector.key-null-literal' = 'n/a',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; 'connector.bulk-flush.max-size' = '42mb',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; 'connector.bulk-flush.max-actions' = '32',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; 'connector.bulk-flush.interval' = '1000',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; 'connector.bulk-flush.backoff.delay' = '1000',
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; 'format.type' = 'json'
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; )
> &nbsp; &nbsp; """
>
> &nbsp; &nbsp; t_env.sql_update(es_sink_ddl)
>
> &nbsp; &nbsp; t_env.register_function('platformcodetoname',
> platform_code_to_name)
>
> &nbsp; &nbsp; query = """
> &nbsp; &nbsp; select platformcodetoname(payPlatform) as platform,
> sum(payAmount)
> as pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT)
> as rowtime
> &nbsp; &nbsp; from payment_msg
> &nbsp; &nbsp; group by tumble(rt, interval '5' seconds), payPlatform
> &nbsp; &nbsp; """
>
> &nbsp; &nbsp; count_result = t_env.sql_query(query)
>
> &nbsp; &nbsp; t_env.create_temporary_view('windowed_values', count_result)
>
> &nbsp; &nbsp; query2 = """
> &nbsp; &nbsp; select platform, last_value(pay_amount), rowtime from
> windowed_values group by platform, rowtime
> &nbsp; &nbsp; """
>
> &nbsp; &nbsp; final_result = t_env.sql_query(query2)
>
> &nbsp; &nbsp; final_result.execute_insert(table_path='es_sink')
>
>
> if __name__ == '__main__':
> &nbsp; &nbsp; log_processing()
>
>
> 奇怪的不朽琴师 <1129656513@qq.com&gt; 于2020年7月15日周三 下午4:40写道:
>
> &gt; &amp;nbsp;Shuiqiang,你好:
> &gt; &amp;nbsp; &amp;nbsp;
> &gt;
> &amp;nbsp;hi,能否请求您贡献一下完整的代码的案例,我是初学者,官网的2-from_kafka_to_kafka.py这个没有窗口,我现在想要一个在此基础上有窗口的demo,尝试编了很久也未能解决。我在给这个demo加上窗口功能后总是有各种各样的问题,十分痛苦,如能帮助,感激不尽。
> &gt;
> &gt;
> &gt; 恳请所有看到此封邮件的大佬!
> &gt;
> &gt;
> &gt; 谢谢!
> &gt;
> &gt;
> &gt;
> &gt;
> &gt; ------------------&amp;nbsp;原始邮件&amp;nbsp;------------------
> &gt; 发件人:
> &gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; "user-zh"
> &gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> &nbsp; <
> &gt; acqua.csq@gmail.com&amp;gt;;
> &gt; 发送时间:&amp;nbsp;2020年7月15日(星期三) 中午11:25
> &gt; 收件人:&amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;gt;;
> &gt;
> &gt; 主题:&amp;nbsp;Re: pyflink1.11.0window
> &gt;
> &gt;
> &gt;
> &gt; 举个sql例子
> &gt; select platformcodetoname(payPlatform) as platform, sum(payAmount) as
> &gt; pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT) as
> &gt; rowtime
> &gt; from payment_msg group by tumble(rt, interval '5' seconds),
> payPlatform
> &gt; 这个query 对每5s的tumble窗口做统计。
> &gt;
> &gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;gt; 于2020年7月15日周三 上午11:10写道:
> &gt;
> &gt; &amp;gt; Shuiqiang,你好:
> &gt; &amp;gt; &amp;amp;nbsp;
> &gt; &amp;amp;nbsp;我的目的是每间隔一段时间做一次汇总统计,比如每两秒做一下汇总,请问这个需求我改如何定义window?
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> ------------------&amp;amp;nbsp;原始邮件&amp;amp;nbsp;------------------
> &gt; &amp;gt; 发件人:
> &gt;
> &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
> &gt; "user-zh"
> &gt;
> &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
> &gt; <
> &gt; &amp;gt; acqua.csq@gmail.com&amp;amp;gt;;
> &gt; &amp;gt; 发送时间:&amp;amp;nbsp;2020年7月15日(星期三) 上午10:51
> &gt; &amp;gt; 收件人:&amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org
> &amp;amp;gt;;
> &gt; &amp;gt;
> &gt; &amp;gt; 主题:&amp;amp;nbsp;Re: pyflink1.11.0window
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; 琴师你好,
> &gt; &amp;gt; 异常栈信息org.apache.flink.table.api.ValidationException: A
> tumble window
> &gt; &amp;gt; expects a size value literal.
> &gt; &amp;gt; 看起来是接下tumble window定义的代码不太正确吧
> &gt; &amp;gt;
> &gt; &amp;gt; Best,
> &gt; &amp;gt; Shuiqiang
> &gt; &amp;gt;
> &gt; &amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;gt; 于2020年7月15日周三
> 上午10:27写道:
> &gt; &amp;gt;
> &gt; &amp;gt; &amp;amp;gt; 你好:
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> &amp;amp;amp;nbsp;我按着你回复的建议改了source但是会报新的错误,请问这个是因为什么?我想调试一个window一直没有成功,请帮帮我,谢谢。
> &gt; &amp;gt; &amp;amp;gt; Traceback (most recent call last):
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File "tou.py", line 71, in
> <module&amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> from_kafka_to_kafka_demo()
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File "tou.py", line 21, in
> &gt; from_kafka_to_kafka_demo
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; .select("
> id,&amp;amp;amp;nbsp;
> &gt; time1 , time1 ")\
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
> &gt; &amp;gt; &amp;amp;gt;
> &gt; "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py", line
> &gt; &amp;gt; 907,
> &gt; &amp;gt; &amp;amp;gt; in select
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; return
> &gt; Table(self._j_table.select(fields),
> &gt; &amp;gt; self._t_env)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
> &gt; &amp;gt;
> "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
> &gt; &amp;gt; &amp;amp;gt; line 1286, in __call__
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; answer,
> self.gateway_client,
> &gt; self.target_id,
> &gt; &amp;gt; self.name)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
> &gt; &amp;gt; &amp;amp;gt;
> &gt; "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py",
> &gt; &amp;gt; line
> &gt; &amp;gt; &amp;amp;gt; 147, in deco
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; return
> f(*a, **kw)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
> &gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
> &gt; &amp;gt; &amp;amp;gt; line 328, in get_return_value
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> format(target_id, ".", name),
> &gt; value)
> &gt; &amp;gt; &amp;amp;gt; py4j.protocol.Py4JJavaError: An error occurred
> while calling
> &gt; &amp;gt; o26.select.
> &gt; &amp;gt; &amp;amp;gt; :
> org.apache.flink.table.api.ValidationException: A tumble
> &gt; window
> &gt; &amp;gt; expects
> &gt; &amp;gt; &amp;amp;gt; a size value literal.
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt; java.lang.reflect.Method.invoke(Method.java:498)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; at
> &gt; &amp;gt; java.lang.Thread.run(Thread.java:748)
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; def register_rides_source(st_env):
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> source_ddl = \
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; """
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; create
> table source1(
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;id int,
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;time1 timestamp,
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;type string,
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;WATERMARK FOR
> &gt; time1 as time1 -
> &gt; &amp;gt; INTERVAL '2' SECOND
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;) with (
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> 'connector.type' = 'kafka',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> 'update-mode' = 'append',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> 'connector.topic' = 'tp1',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; 'connector.properties.bootstrap.servers' =
> &gt; &amp;gt; 'localhost:9092',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; 'connector.properties.zookeeper.connect' =
> &gt; &amp;gt; 'localhost:2181',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> 'format.type' = 'json',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> 'format.derive-schema' =
> &gt; 'true',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> 'connector.version' =
> &gt; 'universal'
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; """
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> st_env.sql_update(source_ddl)
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; s_env =
> &gt; &amp;gt; &amp;amp;gt;
> StreamExecutionEnvironment.get_execution_environment()
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> s_env.set_parallelism(1)
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env =
> &gt; StreamTableEnvironment.create(s_env)
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> register_rides_source(st_env)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> register_rides_sink(st_env)
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> st_env.from_path("source1")\
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; &amp;gt; &amp;amp;gt;
> .window(Tumble.over("2.secends").on("time1").alias("w")) \
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; .group_by("w") \
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; .select(" id,&amp;amp;amp;nbsp;
> &gt; &amp;gt; time1 , time1 ")\
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; .insert_into("sink1")
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; st_env.execute("2-from_kafka_to_kafka")
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; 代码如上
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt;
> ------------------&amp;amp;amp;nbsp;原始邮件&amp;amp;amp;nbsp;------------------
> &gt; &amp;gt; &amp;amp;gt; 发件人:
> &gt; &amp;gt;
> &gt;
> &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
> &gt; &amp;gt; "user-zh"
> &gt; &amp;gt;
> &gt;
> &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
> &gt; &amp;gt; <
> &gt; &amp;gt; &amp;amp;gt; acqua.csq@gmail.com&amp;amp;amp;gt;;
> &gt; &amp;gt; &amp;amp;gt; 发送时间:&amp;amp;amp;nbsp;2020年7月10日(星期五) 上午9:17
> &gt; &amp;gt; &amp;amp;gt; 收件人:&amp;amp;amp;nbsp;"user-zh"<
> user-zh@flink.apache.org
> &gt; &amp;amp;amp;gt;;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; 主题:&amp;amp;amp;nbsp;Re: pyflink1.11.0window
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; 琴师你好,
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; 你的source ddl里有指定time1为 time attribute吗?
> &gt; &amp;gt; &amp;amp;gt; create table source1(
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
> &gt; id
> &gt; &amp;gt; int,
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
> &gt; &amp;gt; time1 timestamp,
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
> &gt; type
> &gt; &amp;gt; string,
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
> &gt; &amp;gt; WATERMARK FOR time1 as time1 -
> &gt; &amp;gt; &amp;amp;gt; INTERVAL '2' SECOND
> &gt; &amp;gt; &amp;amp;gt; ) with (...)
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;amp;gt;
> 于2020年7月10日周五
> &gt; 上午8:43写道:
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> ------------------&amp;amp;amp;amp;nbsp;原始邮件&amp;amp;amp;amp;nbsp;------------------
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 发件人:
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> &amp;amp;amp;gt;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
> &gt; &amp;gt; &amp;amp;gt; "奇怪的不朽琴师"
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt;
> &gt;
> &amp;amp;amp;gt;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
> &gt; &amp;gt; &amp;amp;gt; <
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 1129656513@qq.com
> &amp;amp;amp;amp;gt;;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> 发送时间:&amp;amp;amp;amp;nbsp;2020年7月9日(星期四) 下午5:08
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> 收件人:&amp;amp;amp;amp;nbsp;"godfrey he"<
> &gt; godfreyhe@gmail.com
> &gt; &amp;gt; &amp;amp;amp;amp;gt;;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> 主题:&amp;amp;amp;amp;nbsp;pyflink1.11.0window
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 你好:
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp;我在使用pyflink1.11版本时,window开窗仍会报错
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; :
> &gt; org.apache.flink.table.api.ValidationException: A group
> &gt; &amp;gt; window
> &gt; &amp;gt; &amp;amp;gt; expects a
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; time attribute for grouping in
> a stream
> &gt; environment.
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> 请问这个问题没有修复么?或者是我使用的方式不对,如果是使用不对,能提供一个正确的案例么?
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 代码如下
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 谢谢
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def from_kafka_to_kafka_demo():
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp; s_env =
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; StreamExecutionEnvironment.get_execution_environment()
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; s_env.set_parallelism(1)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp; # use
> &gt; blink table planner
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp; st_env =
> &gt; &amp;gt; StreamTableEnvironment.create(s_env)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp; #
> &gt; register source and sink
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; register_rides_source(st_env)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; register_rides_sink(st_env)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; st_env.from_path("source1")\
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; .window(Tumble.over("1.secends").on("time1").alias("w")) \
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
> &gt; &amp;gt; .group_by("w") \
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
> &gt; &amp;gt; .select(" id,&amp;amp;amp;amp;nbsp;
> &gt; &amp;gt; &amp;amp;gt; time1 , time1 ")\
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
> &gt; &amp;gt; .insert_into("sink1")
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;gt; st_env.execute("2-from_kafka_to_kafka")
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def
> register_rides_source(st_env):
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; source_ddl = \
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp; '''
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp; create
> &gt; table source1(
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
> &gt; &amp;gt; id int,
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp;time1 timestamp,
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp;type string
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp;) with (
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; 'connector.type' = 'kafka',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; 'update-mode' = 'append',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; 'connector.topic' = 'tp1',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;gt; 'connector.properties.bootstrap.servers' =
> &gt; &amp;gt; &amp;amp;gt; 'localhost:9092'
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp;)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp; '''
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; st_env.sql_update(source_ddl)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def
> register_rides_sink(st_env):
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp; sink_ddl
> &gt; = \
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp; '''
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp; create
> &gt; table sink1(
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
> &gt; &amp;gt; id int,
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp;time1 timestamp,
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp;time2 timestamp
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp;) with (
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; 'connector.type' = 'kafka',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; 'update-mode' = 'append',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; 'connector.topic' = 'tp3',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;gt; 'connector.properties.bootstrap.servers' =
> &gt; &amp;gt; &amp;amp;gt; 'localhost:9092'
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; &amp;amp;amp;amp;nbsp;)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp; '''
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; st_env.sql_update(sink_ddl)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; if __name__ == '__main__':
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;
> &gt; from_kafka_to_kafka_demo()
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
> &amp;amp;amp;amp;nbsp;

回复: 回复: pyflink1.11.0window

Posted by "chengyanan1008@foxmail.com" <ch...@foxmail.com>.
Hi,因为你的Sink只支持数据的insert插入,请检查insert 语句
关键报错信息是这一句:
“AppendStreamTableSink requires that Table has only insert changes.”



chengyanan1008@foxmail.com
 
发件人: 奇怪的不朽琴师
发送时间: 2020-07-20 16:23
收件人: user-zh
主题: 回复: pyflink1.11.0window
HI :
&nbsp; &nbsp; 我现在有一个新的问题,我在此基础上加了一个关联,再写入kafka时报错,如下
Traceback (most recent call last):
&nbsp; File "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line 147, in deco
&nbsp; &nbsp; return f(*a, **kw)
&nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/protocol.py", line 328, in get_return_value
&nbsp; &nbsp; format(target_id, ".", name), value)
py4j.protocol.Py4JJavaError: An error occurred while calling o5.sqlUpdate.
: org.apache.flink.table.api.TableException: AppendStreamTableSink requires that Table has only insert changes.
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.scala:123)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.scala:48)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.plan.nodes.exec.ExecNode$class.translateToPlan(ExecNode.scala:58)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlan(StreamExecSink.scala:48)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$translateToPlan$1.apply(StreamPlanner.scala:60)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$translateToPlan$1.apply(StreamPlanner.scala:59)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.Iterator$class.foreach(Iterator.scala:891)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.AbstractTraversable.map(Traversable.scala:104)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.delegation.StreamPlanner.translateToPlan(StreamPlanner.scala:59)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:153)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:685)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.api.internal.TableEnvironmentImpl.sqlUpdate(TableEnvironmentImpl.java:495)
&nbsp; &nbsp; &nbsp; &nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&nbsp; &nbsp; &nbsp; &nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&nbsp; &nbsp; &nbsp; &nbsp; at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&nbsp; &nbsp; &nbsp; &nbsp; at java.lang.reflect.Method.invoke(Method.java:498)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&nbsp; &nbsp; &nbsp; &nbsp; at java.lang.Thread.run(Thread.java:748)
 
 
 
 
During handling of the above exception, another exception occurred:
 
 
Traceback (most recent call last):
&nbsp; File "tou.py", line 99, in <module&gt;
&nbsp; &nbsp; from_kafka_to_kafka_demo()
&nbsp; File "tou.py", line 33, in from_kafka_to_kafka_demo
&nbsp; &nbsp; st_env.sql_update("insert into flink_result select id,type,rowtime from final_result2")
&nbsp; File "/usr/local/lib/python3.7/site-packages/pyflink/table/table_environment.py", line 547, in sql_update
&nbsp; &nbsp; self._j_tenv.sqlUpdate(stmt)
&nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py", line 1286, in __call__
&nbsp; &nbsp; answer, self.gateway_client, self.target_id, self.name)
&nbsp; File "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line 154, in deco
&nbsp; &nbsp; raise exception_mapping[exception](s.split(': ', 1)[1], stack_trace)
pyflink.util.exceptions.TableException: 'AppendStreamTableSink requires that Table has only insert changes.'
 
 
 
 
 
这种应该如何实现,需求大概是一个流表(需要分组汇总)关联一个维表。
 
 
from pyflink.datastream import StreamExecutionEnvironment, TimeCharacteristic
from pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings,DataTypes, CsvTableSource, CsvTableSink
from pyflink.table.descriptors import Schema, Kafka, Json, Rowtime
from pyflink.table.window import Tumble&nbsp;
 
 
 
 
def from_kafka_to_kafka_demo():
 
 
&nbsp; &nbsp; # use blink table planner
&nbsp; &nbsp; env = StreamExecutionEnvironment.get_execution_environment()
&nbsp; &nbsp; env.set_stream_time_characteristic(TimeCharacteristic.EventTime)
&nbsp; &nbsp; env_settings = EnvironmentSettings.Builder().use_blink_planner().build()
&nbsp; &nbsp; st_env = StreamTableEnvironment.create(stream_execution_environment=env,environment_settings=env_settings)
 
 
&nbsp; &nbsp; # register source and sink
&nbsp; &nbsp; register_rides_source(st_env)
&nbsp; &nbsp; register_rides_sink(st_env)
&nbsp; &nbsp; register_mysql_source(st_env)
 
 
&nbsp; &nbsp; query = """
&nbsp; &nbsp; select&nbsp; cast(sum(t1.id) as int) as id, max(t1.type) as type,cast(tumble_start(t1.time1, interval '4' second) as bigint) as rowtime
&nbsp; &nbsp; from source1 t1&nbsp;
&nbsp; &nbsp; group by tumble(t1.time1, interval '4' second)
&nbsp; &nbsp; """
&nbsp; &nbsp; count_result = st_env.sql_query(query)
&nbsp; &nbsp; st_env.create_temporary_view('final_result', count_result)
&nbsp; &nbsp; query2 = """
&nbsp; &nbsp; select&nbsp; t1.id,t2.type,t1.rowtime from final_result t1 left join dim_mysql t2 on t1.type=t2.id
&nbsp; &nbsp; """
&nbsp; &nbsp; count_result2 = st_env.sql_query(query2)
&nbsp; &nbsp; st_env.create_temporary_view('final_result2', count_result2)
 
 
&nbsp; &nbsp; st_env.sql_update("insert into flink_result select id,type,rowtime from final_result2")
&nbsp; &nbsp; st_env.execute("2-from_kafka_to_kafka")
&nbsp; &nbsp;&nbsp;
 
 
 
 
def register_rides_source(st_env):
&nbsp; &nbsp; source_ddl = \
&nbsp; &nbsp; """
&nbsp; &nbsp; create table source1(
&nbsp; &nbsp; &nbsp;id int,
&nbsp; &nbsp; &nbsp;time2 varchar ,
&nbsp; &nbsp; &nbsp;time1 as TO_TIMESTAMP(time2,'yyyyMMddHHmmss'),
&nbsp; &nbsp; &nbsp;type string,
&nbsp; &nbsp; &nbsp;WATERMARK FOR time1 as time1 - INTERVAL '2' SECOND
&nbsp; &nbsp; &nbsp;) with (
&nbsp; &nbsp; 'connector.type' = 'kafka',
&nbsp; &nbsp; 'connector.topic' = 'tp1',
&nbsp; &nbsp; 'connector.startup-mode' = 'latest-offset',
&nbsp; &nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
&nbsp; &nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
&nbsp; &nbsp; 'format.type' = 'json',
&nbsp; &nbsp; 'connector.version' = 'universal'
&nbsp; &nbsp; &nbsp;)
&nbsp; &nbsp; """
&nbsp; &nbsp; st_env.sql_update(source_ddl)
 
 
def register_mysql_source(st_env):
&nbsp; &nbsp; source_ddl = \
&nbsp; &nbsp; """
&nbsp; &nbsp; CREATE TABLE dim_mysql (
&nbsp; &nbsp; id varchar,&nbsp; --&nbsp;
&nbsp; &nbsp; type varchar --&nbsp;
&nbsp; &nbsp; ) WITH (
&nbsp; &nbsp; 'connector.type' = 'jdbc',
&nbsp; &nbsp; 'connector.url' = 'jdbc:mysql://localhost:3390/test',
&nbsp; &nbsp; 'connector.table' = 'flink_test',
&nbsp; &nbsp; 'connector.driver' = 'com.mysql.jdbc.Driver',
&nbsp; &nbsp; 'connector.username' = '****',
&nbsp; &nbsp; 'connector.password' = '*****',
&nbsp; &nbsp; 'connector.lookup.cache.max-rows' = '5000',
&nbsp; &nbsp; 'connector.lookup.cache.ttl' = '10min'
&nbsp; &nbsp; )
&nbsp; &nbsp; """&nbsp; &nbsp;&nbsp;
&nbsp; &nbsp; st_env.sql_update(source_ddl)
 
 
def register_rides_sink(st_env):
&nbsp; &nbsp; sink_ddl = \
&nbsp; &nbsp; """
&nbsp; &nbsp; CREATE TABLE flink_result (
&nbsp; &nbsp; id int,&nbsp; &nbsp;
&nbsp; &nbsp; type varchar,
&nbsp; &nbsp; rtime bigint,
&nbsp; &nbsp; primary key(id)
&nbsp; &nbsp; ) WITH (
&nbsp; &nbsp; with (
&nbsp; &nbsp; 'connector.type' = 'kafka',
&nbsp; &nbsp; 'connector.topic' = 'tp4',
&nbsp; &nbsp; 'connector.startup-mode' = 'latest-offset',
&nbsp; &nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
&nbsp; &nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
&nbsp; &nbsp; 'format.type' = 'json',
&nbsp; &nbsp; 'connector.version' = 'universal'
&nbsp; &nbsp; &nbsp;)
&nbsp; &nbsp; )
&nbsp; &nbsp; """
&nbsp; &nbsp; st_env.sql_update(sink_ddl)
 
 
 
 
if __name__ == '__main__':
&nbsp; &nbsp; from_kafka_to_kafka_demo()
 
 
 
 
 
------------------&nbsp;原始邮件&nbsp;------------------
发件人:                                                                                                                        "我自己的邮箱"                                                                                    <1129656513@qq.com&gt;;
发送时间:&nbsp;2020年7月15日(星期三) 下午5:30
收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;
 
主题:&nbsp;回复: pyflink1.11.0window
 
 
 
&nbsp; &nbsp; &nbsp;非常感谢!
 
 
 
 
------------------ 原始邮件 ------------------
发件人:                                                                                                                        "user-zh"                                                                                    <acqua.csq@gmail.com&gt;;
发送时间:&nbsp;2020年7月15日(星期三) 下午5:23
收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;
 
主题:&nbsp;Re: pyflink1.11.0window
 
 
 
下面这个例子从kafka读取json格式的数据, 然后做窗口聚合后写入es, 可以参考下代码结构, 修改相应数据字段。 这份代码你本地应该是不能运行的
 
from pyflink.datastream import StreamExecutionEnvironment, TimeCharacteristic
from pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings
from pyflink.table.udf import udf
 
 
@udf(input_types=[DataTypes.INT()], result_type=DataTypes.STRING())
def platform_code_to_name(code):
&nbsp; &nbsp; return "mobile" if code == 0 else "pc"
 
 
def log_processing():
&nbsp; &nbsp; env = StreamExecutionEnvironment.get_execution_environment()
&nbsp; &nbsp; env.set_stream_time_characteristic(TimeCharacteristic.EventTime)
&nbsp; &nbsp; env_settings = EnvironmentSettings.Builder().use_blink_planner().build()
&nbsp; &nbsp; t_env = StreamTableEnvironment.create(stream_execution_environment=env,
environment_settings=env_settings)
 
&nbsp; &nbsp; source_ddl = """
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; CREATE TABLE payment_msg(
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; createTime VARCHAR,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; rt as TO_TIMESTAMP(createTime),
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; orderId BIGINT,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; payAmount DOUBLE,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; payPlatform INT,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; paySource INT,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; WATERMARK FOR rt as rt - INTERVAL '2' SECOND
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; ) WITH (
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.type' = 'kafka',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.version' = 'universal',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.topic' = 'payment_msg_2',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.properties.bootstrap.servers' = '0.0.0.0:9092',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.properties.group.id' = 'test_3',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.startup-mode' = 'latest-offset',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'format.type' = 'json'
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; )
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; """
&nbsp; &nbsp; t_env.sql_update(source_ddl)
 
&nbsp; &nbsp; es_sink_ddl = """
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; CREATE TABLE es_sink (
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; platform VARCHAR,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; pay_amount DOUBLE,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; rowtime TIMESTAMP(3)
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; ) with (
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.type' = 'elasticsearch',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.version' = '7',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.hosts' = 'http://localhost:9200',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.index' = 'platform_pay_amount_1',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.document-type' = 'payment',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'update-mode' = 'upsert',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.flush-on-checkpoint' = 'true',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.key-delimiter' = '$',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.key-null-literal' = 'n/a',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.bulk-flush.max-size' = '42mb',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.bulk-flush.max-actions' = '32',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.bulk-flush.interval' = '1000',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.bulk-flush.backoff.delay' = '1000',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'format.type' = 'json'
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; )
&nbsp; &nbsp; """
 
&nbsp; &nbsp; t_env.sql_update(es_sink_ddl)
 
&nbsp; &nbsp; t_env.register_function('platformcodetoname', platform_code_to_name)
 
&nbsp; &nbsp; query = """
&nbsp; &nbsp; select platformcodetoname(payPlatform) as platform, sum(payAmount)
as pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT)
as rowtime
&nbsp; &nbsp; from payment_msg
&nbsp; &nbsp; group by tumble(rt, interval '5' seconds), payPlatform
&nbsp; &nbsp; """
 
&nbsp; &nbsp; count_result = t_env.sql_query(query)
 
&nbsp; &nbsp; t_env.create_temporary_view('windowed_values', count_result)
 
&nbsp; &nbsp; query2 = """
&nbsp; &nbsp; select platform, last_value(pay_amount), rowtime from
windowed_values group by platform, rowtime
&nbsp; &nbsp; """
 
&nbsp; &nbsp; final_result = t_env.sql_query(query2)
 
&nbsp; &nbsp; final_result.execute_insert(table_path='es_sink')
 
 
if __name__ == '__main__':
&nbsp; &nbsp; log_processing()
 
 
奇怪的不朽琴师 <1129656513@qq.com&gt; 于2020年7月15日周三 下午4:40写道:
 
&gt; &amp;nbsp;Shuiqiang,你好:
&gt; &amp;nbsp; &amp;nbsp;
&gt; &amp;nbsp;hi,能否请求您贡献一下完整的代码的案例,我是初学者,官网的2-from_kafka_to_kafka.py这个没有窗口,我现在想要一个在此基础上有窗口的demo,尝试编了很久也未能解决。我在给这个demo加上窗口功能后总是有各种各样的问题,十分痛苦,如能帮助,感激不尽。
&gt;
&gt;
&gt; 恳请所有看到此封邮件的大佬!
&gt;
&gt;
&gt; 谢谢!
&gt;
&gt;
&gt;
&gt;
&gt; ------------------&amp;nbsp;原始邮件&amp;nbsp;------------------
&gt; 发件人:
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; "user-zh"
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; <
&gt; acqua.csq@gmail.com&amp;gt;;
&gt; 发送时间:&amp;nbsp;2020年7月15日(星期三) 中午11:25
&gt; 收件人:&amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;gt;;
&gt;
&gt; 主题:&amp;nbsp;Re: pyflink1.11.0window
&gt;
&gt;
&gt;
&gt; 举个sql例子
&gt; select platformcodetoname(payPlatform) as platform, sum(payAmount) as
&gt; pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT) as
&gt; rowtime
&gt; from payment_msg group by tumble(rt, interval '5' seconds), payPlatform
&gt; 这个query 对每5s的tumble窗口做统计。
&gt;
&gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;gt; 于2020年7月15日周三 上午11:10写道:
&gt;
&gt; &amp;gt; Shuiqiang,你好:
&gt; &amp;gt; &amp;amp;nbsp;
&gt; &amp;amp;nbsp;我的目的是每间隔一段时间做一次汇总统计,比如每两秒做一下汇总,请问这个需求我改如何定义window?
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; ------------------&amp;amp;nbsp;原始邮件&amp;amp;nbsp;------------------
&gt; &amp;gt; 发件人:
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; "user-zh"
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; <
&gt; &amp;gt; acqua.csq@gmail.com&amp;amp;gt;;
&gt; &amp;gt; 发送时间:&amp;amp;nbsp;2020年7月15日(星期三) 上午10:51
&gt; &amp;gt; 收件人:&amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;amp;gt;;
&gt; &amp;gt;
&gt; &amp;gt; 主题:&amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; 琴师你好,
&gt; &amp;gt; 异常栈信息org.apache.flink.table.api.ValidationException: A tumble window
&gt; &amp;gt; expects a size value literal.
&gt; &amp;gt; 看起来是接下tumble window定义的代码不太正确吧
&gt; &amp;gt;
&gt; &amp;gt; Best,
&gt; &amp;gt; Shuiqiang
&gt; &amp;gt;
&gt; &amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;gt; 于2020年7月15日周三 上午10:27写道:
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;gt; 你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;我按着你回复的建议改了source但是会报新的错误,请问这个是因为什么?我想调试一个window一直没有成功,请帮帮我,谢谢。
&gt; &amp;gt; &amp;amp;gt; Traceback (most recent call last):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File "tou.py", line 71, in <module&amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; from_kafka_to_kafka_demo()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File "tou.py", line 21, in
&gt; from_kafka_to_kafka_demo
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; .select(" id,&amp;amp;amp;nbsp;
&gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt;
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py", line
&gt; &amp;gt; 907,
&gt; &amp;gt; &amp;amp;gt; in select
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; return
&gt; Table(self._j_table.select(fields),
&gt; &amp;gt; self._t_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
&gt; &amp;gt; &amp;amp;gt; line 1286, in __call__
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; answer, self.gateway_client,
&gt; self.target_id,
&gt; &amp;gt; self.name)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt;
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py",
&gt; &amp;gt; line
&gt; &amp;gt; &amp;amp;gt; 147, in deco
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; return f(*a, **kw)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
&gt; &amp;gt; &amp;amp;gt; line 328, in get_return_value
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; format(target_id, ".", name),
&gt; value)
&gt; &amp;gt; &amp;amp;gt; py4j.protocol.Py4JJavaError: An error occurred while calling
&gt; &amp;gt; o26.select.
&gt; &amp;gt; &amp;amp;gt; : org.apache.flink.table.api.ValidationException: A tumble
&gt; window
&gt; &amp;gt; expects
&gt; &amp;gt; &amp;amp;gt; a size value literal.
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt; sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt; java.lang.reflect.Method.invoke(Method.java:498)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; java.lang.Thread.run(Thread.java:748)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; def register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; source_ddl = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; """
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; create table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;type string,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;WATERMARK FOR
&gt; time1 as time1 -
&gt; &amp;gt; INTERVAL '2' SECOND
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; 'localhost:9092',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; 'connector.properties.zookeeper.connect' =
&gt; &amp;gt; 'localhost:2181',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'format.type' = 'json',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'format.derive-schema' =
&gt; 'true',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.version' =
&gt; 'universal'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; """
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env.sql_update(source_ddl)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; s_env =
&gt; &amp;gt; &amp;amp;gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; s_env.set_parallelism(1)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env =
&gt; StreamTableEnvironment.create(s_env)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; register_rides_sink(st_env)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; .window(Tumble.over("2.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .group_by("w") \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .select(" id,&amp;amp;amp;nbsp;
&gt; &amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 代码如上
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; ------------------&amp;amp;amp;nbsp;原始邮件&amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; 发件人:
&gt; &amp;gt;
&gt; &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; "user-zh"
&gt; &amp;gt;
&gt; &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; <
&gt; &amp;gt; &amp;amp;gt; acqua.csq@gmail.com&amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; 发送时间:&amp;amp;amp;nbsp;2020年7月10日(星期五) 上午9:17
&gt; &amp;gt; &amp;amp;gt; 收件人:&amp;amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org
&gt; &amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 主题:&amp;amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 琴师你好,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 你的source ddl里有指定time1为 time attribute吗?
&gt; &amp;gt; &amp;amp;gt; create table source1(
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; id
&gt; &amp;gt; int,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; time1 timestamp,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; type
&gt; &amp;gt; string,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; WATERMARK FOR time1 as time1 -
&gt; &amp;gt; &amp;amp;gt; INTERVAL '2' SECOND
&gt; &amp;gt; &amp;amp;gt; ) with (...)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;amp;gt; 于2020年7月10日周五
&gt; 上午8:43写道:
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt;
&gt; ------------------&amp;amp;amp;amp;nbsp;原始邮件&amp;amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 发件人:
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;gt;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; "奇怪的不朽琴师"
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;gt;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; <
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 1129656513@qq.com&amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 发送时间:&amp;amp;amp;amp;nbsp;2020年7月9日(星期四) 下午5:08
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 收件人:&amp;amp;amp;amp;nbsp;"godfrey he"<
&gt; godfreyhe@gmail.com
&gt; &amp;gt; &amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 主题:&amp;amp;amp;amp;nbsp;pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;我在使用pyflink1.11版本时,window开窗仍会报错
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; :
&gt; org.apache.flink.table.api.ValidationException: A group
&gt; &amp;gt; window
&gt; &amp;gt; &amp;amp;gt; expects a
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; time attribute for grouping in a stream
&gt; environment.
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 请问这个问题没有修复么?或者是我使用的方式不对,如果是使用不对,能提供一个正确的案例么?
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 代码如下
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 谢谢
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def from_kafka_to_kafka_demo():
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; s_env =
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; s_env.set_parallelism(1)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; # use
&gt; blink table planner
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; st_env =
&gt; &amp;gt; StreamTableEnvironment.create(s_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; #
&gt; register source and sink
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; register_rides_sink(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; .window(Tumble.over("1.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .group_by("w") \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .select(" id,&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; source_ddl = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; create
&gt; table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;type string
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; &amp;amp;gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.sql_update(source_ddl)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def register_rides_sink(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; sink_ddl
&gt; = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; create
&gt; table sink1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;time2 timestamp
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.topic' = 'tp3',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; &amp;amp;gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.sql_update(sink_ddl)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; if __name__ == '__main__':
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; from_kafka_to_kafka_demo()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;

回复: pyflink1.11.0window

Posted by 奇怪的不朽琴师 <11...@qq.com>.
HI :
&nbsp; &nbsp; 我现在有一个新的问题,我在此基础上加了一个关联,再写入kafka时报错,如下
Traceback (most recent call last):
&nbsp; File "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line 147, in deco
&nbsp; &nbsp; return f(*a, **kw)
&nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/protocol.py", line 328, in get_return_value
&nbsp; &nbsp; format(target_id, ".", name), value)
py4j.protocol.Py4JJavaError: An error occurred while calling o5.sqlUpdate.
: org.apache.flink.table.api.TableException: AppendStreamTableSink requires that Table has only insert changes.
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.scala:123)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.scala:48)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.plan.nodes.exec.ExecNode$class.translateToPlan(ExecNode.scala:58)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlan(StreamExecSink.scala:48)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$translateToPlan$1.apply(StreamPlanner.scala:60)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$translateToPlan$1.apply(StreamPlanner.scala:59)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.Iterator$class.foreach(Iterator.scala:891)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.AbstractTraversable.map(Traversable.scala:104)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.delegation.StreamPlanner.translateToPlan(StreamPlanner.scala:59)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:153)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:685)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.api.internal.TableEnvironmentImpl.sqlUpdate(TableEnvironmentImpl.java:495)
&nbsp; &nbsp; &nbsp; &nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&nbsp; &nbsp; &nbsp; &nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&nbsp; &nbsp; &nbsp; &nbsp; at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&nbsp; &nbsp; &nbsp; &nbsp; at java.lang.reflect.Method.invoke(Method.java:498)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&nbsp; &nbsp; &nbsp; &nbsp; at java.lang.Thread.run(Thread.java:748)




During handling of the above exception, another exception occurred:


Traceback (most recent call last):
&nbsp; File "tou.py", line 99, in <module&gt;
&nbsp; &nbsp; from_kafka_to_kafka_demo()
&nbsp; File "tou.py", line 33, in from_kafka_to_kafka_demo
&nbsp; &nbsp; st_env.sql_update("insert into flink_result select id,type,rowtime from final_result2")
&nbsp; File "/usr/local/lib/python3.7/site-packages/pyflink/table/table_environment.py", line 547, in sql_update
&nbsp; &nbsp; self._j_tenv.sqlUpdate(stmt)
&nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py", line 1286, in __call__
&nbsp; &nbsp; answer, self.gateway_client, self.target_id, self.name)
&nbsp; File "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line 154, in deco
&nbsp; &nbsp; raise exception_mapping[exception](s.split(': ', 1)[1], stack_trace)
pyflink.util.exceptions.TableException: 'AppendStreamTableSink requires that Table has only insert changes.'





这种应该如何实现,需求大概是一个流表(需要分组汇总)关联一个维表。


from pyflink.datastream import StreamExecutionEnvironment, TimeCharacteristic
from pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings,DataTypes, CsvTableSource, CsvTableSink
from pyflink.table.descriptors import Schema, Kafka, Json, Rowtime
from pyflink.table.window import Tumble&nbsp;




def from_kafka_to_kafka_demo():


&nbsp; &nbsp; # use blink table planner
&nbsp; &nbsp; env = StreamExecutionEnvironment.get_execution_environment()
&nbsp; &nbsp; env.set_stream_time_characteristic(TimeCharacteristic.EventTime)
&nbsp; &nbsp; env_settings = EnvironmentSettings.Builder().use_blink_planner().build()
&nbsp; &nbsp; st_env = StreamTableEnvironment.create(stream_execution_environment=env,environment_settings=env_settings)


&nbsp; &nbsp; # register source and sink
&nbsp; &nbsp; register_rides_source(st_env)
&nbsp; &nbsp; register_rides_sink(st_env)
&nbsp; &nbsp; register_mysql_source(st_env)


&nbsp; &nbsp; query = """
&nbsp; &nbsp; select&nbsp; cast(sum(t1.id) as int) as id, max(t1.type) as type,cast(tumble_start(t1.time1, interval '4' second) as bigint) as rowtime
&nbsp; &nbsp; from source1 t1&nbsp;
&nbsp; &nbsp; group by tumble(t1.time1, interval '4' second)
&nbsp; &nbsp; """
&nbsp; &nbsp; count_result = st_env.sql_query(query)
&nbsp; &nbsp; st_env.create_temporary_view('final_result', count_result)
&nbsp; &nbsp; query2 = """
&nbsp; &nbsp; select&nbsp; t1.id,t2.type,t1.rowtime from final_result t1 left join dim_mysql t2 on t1.type=t2.id
&nbsp; &nbsp; """
&nbsp; &nbsp; count_result2 = st_env.sql_query(query2)
&nbsp; &nbsp; st_env.create_temporary_view('final_result2', count_result2)


&nbsp; &nbsp; st_env.sql_update("insert into flink_result select id,type,rowtime from final_result2")
&nbsp; &nbsp; st_env.execute("2-from_kafka_to_kafka")
&nbsp; &nbsp;&nbsp;




def register_rides_source(st_env):
&nbsp; &nbsp; source_ddl = \
&nbsp; &nbsp; """
&nbsp; &nbsp; create table source1(
&nbsp; &nbsp; &nbsp;id int,
&nbsp; &nbsp; &nbsp;time2 varchar ,
&nbsp; &nbsp; &nbsp;time1 as TO_TIMESTAMP(time2,'yyyyMMddHHmmss'),
&nbsp; &nbsp; &nbsp;type string,
&nbsp; &nbsp; &nbsp;WATERMARK FOR time1 as time1 - INTERVAL '2' SECOND
&nbsp; &nbsp; &nbsp;) with (
&nbsp; &nbsp; 'connector.type' = 'kafka',
&nbsp; &nbsp; 'connector.topic' = 'tp1',
&nbsp; &nbsp; 'connector.startup-mode' = 'latest-offset',
&nbsp; &nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
&nbsp; &nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
&nbsp; &nbsp; 'format.type' = 'json',
&nbsp; &nbsp; 'connector.version' = 'universal'
&nbsp; &nbsp; &nbsp;)
&nbsp; &nbsp; """
&nbsp; &nbsp; st_env.sql_update(source_ddl)


def register_mysql_source(st_env):
&nbsp; &nbsp; source_ddl = \
&nbsp; &nbsp; """
&nbsp; &nbsp; CREATE TABLE dim_mysql (
&nbsp; &nbsp; id varchar,&nbsp; --&nbsp;
&nbsp; &nbsp; type varchar --&nbsp;
&nbsp; &nbsp; ) WITH (
&nbsp; &nbsp; 'connector.type' = 'jdbc',
&nbsp; &nbsp; 'connector.url' = 'jdbc:mysql://localhost:3390/test',
&nbsp; &nbsp; 'connector.table' = 'flink_test',
&nbsp; &nbsp; 'connector.driver' = 'com.mysql.jdbc.Driver',
&nbsp; &nbsp; 'connector.username' = '****',
&nbsp; &nbsp; 'connector.password' = '*****',
&nbsp; &nbsp; 'connector.lookup.cache.max-rows' = '5000',
&nbsp; &nbsp; 'connector.lookup.cache.ttl' = '10min'
&nbsp; &nbsp; )
&nbsp; &nbsp; """&nbsp; &nbsp;&nbsp;
&nbsp; &nbsp; st_env.sql_update(source_ddl)


def register_rides_sink(st_env):
&nbsp; &nbsp; sink_ddl = \
&nbsp; &nbsp; """
&nbsp; &nbsp; CREATE TABLE flink_result (
&nbsp; &nbsp; id int,&nbsp; &nbsp;
&nbsp; &nbsp; type varchar,
&nbsp; &nbsp; rtime bigint,
&nbsp; &nbsp; primary key(id)
&nbsp; &nbsp; ) WITH (
&nbsp; &nbsp; with (
&nbsp; &nbsp; 'connector.type' = 'kafka',
&nbsp; &nbsp; 'connector.topic' = 'tp4',
&nbsp; &nbsp; 'connector.startup-mode' = 'latest-offset',
&nbsp; &nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
&nbsp; &nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
&nbsp; &nbsp; 'format.type' = 'json',
&nbsp; &nbsp; 'connector.version' = 'universal'
&nbsp; &nbsp; &nbsp;)
&nbsp; &nbsp; )
&nbsp; &nbsp; """
&nbsp; &nbsp; st_env.sql_update(sink_ddl)




if __name__ == '__main__':
&nbsp; &nbsp; from_kafka_to_kafka_demo()





------------------&nbsp;原始邮件&nbsp;------------------
发件人:                                                                                                                        "我自己的邮箱"                                                                                    <1129656513@qq.com&gt;;
发送时间:&nbsp;2020年7月15日(星期三) 下午5:30
收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;

主题:&nbsp;回复: pyflink1.11.0window



&nbsp; &nbsp; &nbsp;非常感谢!




------------------ 原始邮件 ------------------
发件人:                                                                                                                        "user-zh"                                                                                    <acqua.csq@gmail.com&gt;;
发送时间:&nbsp;2020年7月15日(星期三) 下午5:23
收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;

主题:&nbsp;Re: pyflink1.11.0window



下面这个例子从kafka读取json格式的数据, 然后做窗口聚合后写入es, 可以参考下代码结构, 修改相应数据字段。 这份代码你本地应该是不能运行的

from pyflink.datastream import StreamExecutionEnvironment, TimeCharacteristic
from pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings
from pyflink.table.udf import udf


@udf(input_types=[DataTypes.INT()], result_type=DataTypes.STRING())
def platform_code_to_name(code):
&nbsp; &nbsp; return "mobile" if code == 0 else "pc"


def log_processing():
&nbsp; &nbsp; env = StreamExecutionEnvironment.get_execution_environment()
&nbsp; &nbsp; env.set_stream_time_characteristic(TimeCharacteristic.EventTime)
&nbsp; &nbsp; env_settings = EnvironmentSettings.Builder().use_blink_planner().build()
&nbsp; &nbsp; t_env = StreamTableEnvironment.create(stream_execution_environment=env,
environment_settings=env_settings)

&nbsp; &nbsp; source_ddl = """
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; CREATE TABLE payment_msg(
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; createTime VARCHAR,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; rt as TO_TIMESTAMP(createTime),
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; orderId BIGINT,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; payAmount DOUBLE,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; payPlatform INT,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; paySource INT,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; WATERMARK FOR rt as rt - INTERVAL '2' SECOND
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; ) WITH (
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.type' = 'kafka',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.version' = 'universal',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.topic' = 'payment_msg_2',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.properties.bootstrap.servers' = '0.0.0.0:9092',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.properties.group.id' = 'test_3',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.startup-mode' = 'latest-offset',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'format.type' = 'json'
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; )
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; """
&nbsp; &nbsp; t_env.sql_update(source_ddl)

&nbsp; &nbsp; es_sink_ddl = """
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; CREATE TABLE es_sink (
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; platform VARCHAR,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; pay_amount DOUBLE,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; rowtime TIMESTAMP(3)
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; ) with (
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.type' = 'elasticsearch',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.version' = '7',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.hosts' = 'http://localhost:9200',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.index' = 'platform_pay_amount_1',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.document-type' = 'payment',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'update-mode' = 'upsert',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.flush-on-checkpoint' = 'true',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.key-delimiter' = '$',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.key-null-literal' = 'n/a',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.bulk-flush.max-size' = '42mb',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.bulk-flush.max-actions' = '32',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.bulk-flush.interval' = '1000',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.bulk-flush.backoff.delay' = '1000',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'format.type' = 'json'
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; )
&nbsp; &nbsp; """

&nbsp; &nbsp; t_env.sql_update(es_sink_ddl)

&nbsp; &nbsp; t_env.register_function('platformcodetoname', platform_code_to_name)

&nbsp; &nbsp; query = """
&nbsp; &nbsp; select platformcodetoname(payPlatform) as platform, sum(payAmount)
as pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT)
as rowtime
&nbsp; &nbsp; from payment_msg
&nbsp; &nbsp; group by tumble(rt, interval '5' seconds), payPlatform
&nbsp; &nbsp; """

&nbsp; &nbsp; count_result = t_env.sql_query(query)

&nbsp; &nbsp; t_env.create_temporary_view('windowed_values', count_result)

&nbsp; &nbsp; query2 = """
&nbsp; &nbsp; select platform, last_value(pay_amount), rowtime from
windowed_values group by platform, rowtime
&nbsp; &nbsp; """

&nbsp; &nbsp; final_result = t_env.sql_query(query2)

&nbsp; &nbsp; final_result.execute_insert(table_path='es_sink')


if __name__ == '__main__':
&nbsp; &nbsp; log_processing()


奇怪的不朽琴师 <1129656513@qq.com&gt; 于2020年7月15日周三 下午4:40写道:

&gt; &amp;nbsp;Shuiqiang,你好:
&gt; &amp;nbsp; &amp;nbsp;
&gt; &amp;nbsp;hi,能否请求您贡献一下完整的代码的案例,我是初学者,官网的2-from_kafka_to_kafka.py这个没有窗口,我现在想要一个在此基础上有窗口的demo,尝试编了很久也未能解决。我在给这个demo加上窗口功能后总是有各种各样的问题,十分痛苦,如能帮助,感激不尽。
&gt;
&gt;
&gt; 恳请所有看到此封邮件的大佬!
&gt;
&gt;
&gt; 谢谢!
&gt;
&gt;
&gt;
&gt;
&gt; ------------------&amp;nbsp;原始邮件&amp;nbsp;------------------
&gt; 发件人:
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; "user-zh"
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; <
&gt; acqua.csq@gmail.com&amp;gt;;
&gt; 发送时间:&amp;nbsp;2020年7月15日(星期三) 中午11:25
&gt; 收件人:&amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;gt;;
&gt;
&gt; 主题:&amp;nbsp;Re: pyflink1.11.0window
&gt;
&gt;
&gt;
&gt; 举个sql例子
&gt; select platformcodetoname(payPlatform) as platform, sum(payAmount) as
&gt; pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT) as
&gt; rowtime
&gt; from payment_msg group by tumble(rt, interval '5' seconds), payPlatform
&gt; 这个query 对每5s的tumble窗口做统计。
&gt;
&gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;gt; 于2020年7月15日周三 上午11:10写道:
&gt;
&gt; &amp;gt; Shuiqiang,你好:
&gt; &amp;gt; &amp;amp;nbsp;
&gt; &amp;amp;nbsp;我的目的是每间隔一段时间做一次汇总统计,比如每两秒做一下汇总,请问这个需求我改如何定义window?
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; ------------------&amp;amp;nbsp;原始邮件&amp;amp;nbsp;------------------
&gt; &amp;gt; 发件人:
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; "user-zh"
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; <
&gt; &amp;gt; acqua.csq@gmail.com&amp;amp;gt;;
&gt; &amp;gt; 发送时间:&amp;amp;nbsp;2020年7月15日(星期三) 上午10:51
&gt; &amp;gt; 收件人:&amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;amp;gt;;
&gt; &amp;gt;
&gt; &amp;gt; 主题:&amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; 琴师你好,
&gt; &amp;gt; 异常栈信息org.apache.flink.table.api.ValidationException: A tumble window
&gt; &amp;gt; expects a size value literal.
&gt; &amp;gt; 看起来是接下tumble window定义的代码不太正确吧
&gt; &amp;gt;
&gt; &amp;gt; Best,
&gt; &amp;gt; Shuiqiang
&gt; &amp;gt;
&gt; &amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;gt; 于2020年7月15日周三 上午10:27写道:
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;gt; 你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;我按着你回复的建议改了source但是会报新的错误,请问这个是因为什么?我想调试一个window一直没有成功,请帮帮我,谢谢。
&gt; &amp;gt; &amp;amp;gt; Traceback (most recent call last):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File "tou.py", line 71, in <module&amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; from_kafka_to_kafka_demo()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File "tou.py", line 21, in
&gt; from_kafka_to_kafka_demo
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; .select(" id,&amp;amp;amp;nbsp;
&gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt;
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py", line
&gt; &amp;gt; 907,
&gt; &amp;gt; &amp;amp;gt; in select
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; return
&gt; Table(self._j_table.select(fields),
&gt; &amp;gt; self._t_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
&gt; &amp;gt; &amp;amp;gt; line 1286, in __call__
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; answer, self.gateway_client,
&gt; self.target_id,
&gt; &amp;gt; self.name)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt;
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py",
&gt; &amp;gt; line
&gt; &amp;gt; &amp;amp;gt; 147, in deco
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; return f(*a, **kw)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
&gt; &amp;gt; &amp;amp;gt; line 328, in get_return_value
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; format(target_id, ".", name),
&gt; value)
&gt; &amp;gt; &amp;amp;gt; py4j.protocol.Py4JJavaError: An error occurred while calling
&gt; &amp;gt; o26.select.
&gt; &amp;gt; &amp;amp;gt; : org.apache.flink.table.api.ValidationException: A tumble
&gt; window
&gt; &amp;gt; expects
&gt; &amp;gt; &amp;amp;gt; a size value literal.
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt; sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt; java.lang.reflect.Method.invoke(Method.java:498)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; java.lang.Thread.run(Thread.java:748)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; def register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; source_ddl = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; """
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; create table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;type string,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;WATERMARK FOR
&gt; time1 as time1 -
&gt; &amp;gt; INTERVAL '2' SECOND
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; 'localhost:9092',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; 'connector.properties.zookeeper.connect' =
&gt; &amp;gt; 'localhost:2181',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'format.type' = 'json',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'format.derive-schema' =
&gt; 'true',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.version' =
&gt; 'universal'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; """
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env.sql_update(source_ddl)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; s_env =
&gt; &amp;gt; &amp;amp;gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; s_env.set_parallelism(1)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env =
&gt; StreamTableEnvironment.create(s_env)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; register_rides_sink(st_env)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; .window(Tumble.over("2.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .group_by("w") \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .select(" id,&amp;amp;amp;nbsp;
&gt; &amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 代码如上
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; ------------------&amp;amp;amp;nbsp;原始邮件&amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; 发件人:
&gt; &amp;gt;
&gt; &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; "user-zh"
&gt; &amp;gt;
&gt; &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; <
&gt; &amp;gt; &amp;amp;gt; acqua.csq@gmail.com&amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; 发送时间:&amp;amp;amp;nbsp;2020年7月10日(星期五) 上午9:17
&gt; &amp;gt; &amp;amp;gt; 收件人:&amp;amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org
&gt; &amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 主题:&amp;amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 琴师你好,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 你的source ddl里有指定time1为 time attribute吗?
&gt; &amp;gt; &amp;amp;gt; create table source1(
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; id
&gt; &amp;gt; int,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; time1 timestamp,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; type
&gt; &amp;gt; string,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; WATERMARK FOR time1 as time1 -
&gt; &amp;gt; &amp;amp;gt; INTERVAL '2' SECOND
&gt; &amp;gt; &amp;amp;gt; ) with (...)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;amp;gt; 于2020年7月10日周五
&gt; 上午8:43写道:
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt;
&gt; ------------------&amp;amp;amp;amp;nbsp;原始邮件&amp;amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 发件人:
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;gt;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; "奇怪的不朽琴师"
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;gt;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; <
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 1129656513@qq.com&amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 发送时间:&amp;amp;amp;amp;nbsp;2020年7月9日(星期四) 下午5:08
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 收件人:&amp;amp;amp;amp;nbsp;"godfrey he"<
&gt; godfreyhe@gmail.com
&gt; &amp;gt; &amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 主题:&amp;amp;amp;amp;nbsp;pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;我在使用pyflink1.11版本时,window开窗仍会报错
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; :
&gt; org.apache.flink.table.api.ValidationException: A group
&gt; &amp;gt; window
&gt; &amp;gt; &amp;amp;gt; expects a
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; time attribute for grouping in a stream
&gt; environment.
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 请问这个问题没有修复么?或者是我使用的方式不对,如果是使用不对,能提供一个正确的案例么?
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 代码如下
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 谢谢
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def from_kafka_to_kafka_demo():
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; s_env =
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; s_env.set_parallelism(1)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; # use
&gt; blink table planner
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; st_env =
&gt; &amp;gt; StreamTableEnvironment.create(s_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; #
&gt; register source and sink
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; register_rides_sink(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; .window(Tumble.over("1.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .group_by("w") \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .select(" id,&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; source_ddl = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; create
&gt; table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;type string
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; &amp;amp;gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.sql_update(source_ddl)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def register_rides_sink(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; sink_ddl
&gt; = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; create
&gt; table sink1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;time2 timestamp
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.topic' = 'tp3',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; &amp;amp;gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.sql_update(sink_ddl)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; if __name__ == '__main__':
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; from_kafka_to_kafka_demo()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;

回复: pyflink1.11.0window

Posted by 奇怪的不朽琴师 <11...@qq.com>.
&nbsp; &nbsp; &nbsp;非常感谢!




------------------&nbsp;原始邮件&nbsp;------------------
发件人:                                                                                                                        "user-zh"                                                                                    <acqua.csq@gmail.com&gt;;
发送时间:&nbsp;2020年7月15日(星期三) 下午5:23
收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;

主题:&nbsp;Re: pyflink1.11.0window



下面这个例子从kafka读取json格式的数据, 然后做窗口聚合后写入es, 可以参考下代码结构, 修改相应数据字段。 这份代码你本地应该是不能运行的

from pyflink.datastream import StreamExecutionEnvironment, TimeCharacteristic
from pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings
from pyflink.table.udf import udf


@udf(input_types=[DataTypes.INT()], result_type=DataTypes.STRING())
def platform_code_to_name(code):
&nbsp;&nbsp;&nbsp; return "mobile" if code == 0 else "pc"


def log_processing():
&nbsp;&nbsp;&nbsp; env = StreamExecutionEnvironment.get_execution_environment()
&nbsp;&nbsp;&nbsp; env.set_stream_time_characteristic(TimeCharacteristic.EventTime)
&nbsp;&nbsp;&nbsp; env_settings = EnvironmentSettings.Builder().use_blink_planner().build()
&nbsp;&nbsp;&nbsp; t_env = StreamTableEnvironment.create(stream_execution_environment=env,
environment_settings=env_settings)

&nbsp;&nbsp;&nbsp; source_ddl = """
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; CREATE TABLE payment_msg(
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; createTime VARCHAR,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; rt as TO_TIMESTAMP(createTime),
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; orderId BIGINT,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; payAmount DOUBLE,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; payPlatform INT,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; paySource INT,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; WATERMARK FOR rt as rt - INTERVAL '2' SECOND
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; ) WITH (
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.type' = 'kafka',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.version' = 'universal',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.topic' = 'payment_msg_2',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.properties.bootstrap.servers' = '0.0.0.0:9092',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.properties.group.id' = 'test_3',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.startup-mode' = 'latest-offset',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'format.type' = 'json'
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; )
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; """
&nbsp;&nbsp;&nbsp; t_env.sql_update(source_ddl)

&nbsp;&nbsp;&nbsp; es_sink_ddl = """
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; CREATE TABLE es_sink (
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; platform VARCHAR,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; pay_amount DOUBLE,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; rowtime TIMESTAMP(3)
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; ) with (
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.type' = 'elasticsearch',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.version' = '7',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.hosts' = 'http://localhost:9200',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.index' = 'platform_pay_amount_1',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.document-type' = 'payment',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'update-mode' = 'upsert',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.flush-on-checkpoint' = 'true',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.key-delimiter' = '$',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.key-null-literal' = 'n/a',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.bulk-flush.max-size' = '42mb',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.bulk-flush.max-actions' = '32',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.bulk-flush.interval' = '1000',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'connector.bulk-flush.backoff.delay' = '1000',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; 'format.type' = 'json'
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; )
&nbsp;&nbsp;&nbsp; """

&nbsp;&nbsp;&nbsp; t_env.sql_update(es_sink_ddl)

&nbsp;&nbsp;&nbsp; t_env.register_function('platformcodetoname', platform_code_to_name)

&nbsp;&nbsp;&nbsp; query = """
&nbsp;&nbsp;&nbsp; select platformcodetoname(payPlatform) as platform, sum(payAmount)
as pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT)
as rowtime
&nbsp;&nbsp;&nbsp; from payment_msg
&nbsp;&nbsp;&nbsp; group by tumble(rt, interval '5' seconds), payPlatform
&nbsp;&nbsp;&nbsp; """

&nbsp;&nbsp;&nbsp; count_result = t_env.sql_query(query)

&nbsp;&nbsp;&nbsp; t_env.create_temporary_view('windowed_values', count_result)

&nbsp;&nbsp;&nbsp; query2 = """
&nbsp;&nbsp;&nbsp; select platform, last_value(pay_amount), rowtime from
windowed_values group by platform, rowtime
&nbsp;&nbsp;&nbsp; """

&nbsp;&nbsp;&nbsp; final_result = t_env.sql_query(query2)

&nbsp;&nbsp;&nbsp; final_result.execute_insert(table_path='es_sink')


if __name__ == '__main__':
&nbsp;&nbsp;&nbsp; log_processing()


奇怪的不朽琴师 <1129656513@qq.com&gt; 于2020年7月15日周三 下午4:40写道:

&gt; &amp;nbsp;Shuiqiang,你好:
&gt; &amp;nbsp; &amp;nbsp;
&gt; &amp;nbsp;hi,能否请求您贡献一下完整的代码的案例,我是初学者,官网的2-from_kafka_to_kafka.py这个没有窗口,我现在想要一个在此基础上有窗口的demo,尝试编了很久也未能解决。我在给这个demo加上窗口功能后总是有各种各样的问题,十分痛苦,如能帮助,感激不尽。
&gt;
&gt;
&gt; 恳请所有看到此封邮件的大佬!
&gt;
&gt;
&gt; 谢谢!
&gt;
&gt;
&gt;
&gt;
&gt; ------------------&amp;nbsp;原始邮件&amp;nbsp;------------------
&gt; 发件人:
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; "user-zh"
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; <
&gt; acqua.csq@gmail.com&amp;gt;;
&gt; 发送时间:&amp;nbsp;2020年7月15日(星期三) 中午11:25
&gt; 收件人:&amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;gt;;
&gt;
&gt; 主题:&amp;nbsp;Re: pyflink1.11.0window
&gt;
&gt;
&gt;
&gt; 举个sql例子
&gt; select platformcodetoname(payPlatform) as platform, sum(payAmount) as
&gt; pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT) as
&gt; rowtime
&gt; from payment_msg group by tumble(rt, interval '5' seconds), payPlatform
&gt; 这个query 对每5s的tumble窗口做统计。
&gt;
&gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;gt; 于2020年7月15日周三 上午11:10写道:
&gt;
&gt; &amp;gt; Shuiqiang,你好:
&gt; &amp;gt; &amp;amp;nbsp;
&gt; &amp;amp;nbsp;我的目的是每间隔一段时间做一次汇总统计,比如每两秒做一下汇总,请问这个需求我改如何定义window?
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; ------------------&amp;amp;nbsp;原始邮件&amp;amp;nbsp;------------------
&gt; &amp;gt; 发件人:
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; "user-zh"
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; <
&gt; &amp;gt; acqua.csq@gmail.com&amp;amp;gt;;
&gt; &amp;gt; 发送时间:&amp;amp;nbsp;2020年7月15日(星期三) 上午10:51
&gt; &amp;gt; 收件人:&amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;amp;gt;;
&gt; &amp;gt;
&gt; &amp;gt; 主题:&amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; 琴师你好,
&gt; &amp;gt; 异常栈信息org.apache.flink.table.api.ValidationException: A tumble window
&gt; &amp;gt; expects a size value literal.
&gt; &amp;gt; 看起来是接下tumble window定义的代码不太正确吧
&gt; &amp;gt;
&gt; &amp;gt; Best,
&gt; &amp;gt; Shuiqiang
&gt; &amp;gt;
&gt; &amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;gt; 于2020年7月15日周三 上午10:27写道:
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;gt; 你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;我按着你回复的建议改了source但是会报新的错误,请问这个是因为什么?我想调试一个window一直没有成功,请帮帮我,谢谢。
&gt; &amp;gt; &amp;amp;gt; Traceback (most recent call last):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File "tou.py", line 71, in <module&amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; from_kafka_to_kafka_demo()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File "tou.py", line 21, in
&gt; from_kafka_to_kafka_demo
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; .select(" id,&amp;amp;amp;nbsp;
&gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt;
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py", line
&gt; &amp;gt; 907,
&gt; &amp;gt; &amp;amp;gt; in select
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; return
&gt; Table(self._j_table.select(fields),
&gt; &amp;gt; self._t_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
&gt; &amp;gt; &amp;amp;gt; line 1286, in __call__
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; answer, self.gateway_client,
&gt; self.target_id,
&gt; &amp;gt; self.name)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt;
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py",
&gt; &amp;gt; line
&gt; &amp;gt; &amp;amp;gt; 147, in deco
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; return f(*a, **kw)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
&gt; &amp;gt; &amp;amp;gt; line 328, in get_return_value
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; format(target_id, ".", name),
&gt; value)
&gt; &amp;gt; &amp;amp;gt; py4j.protocol.Py4JJavaError: An error occurred while calling
&gt; &amp;gt; o26.select.
&gt; &amp;gt; &amp;amp;gt; : org.apache.flink.table.api.ValidationException: A tumble
&gt; window
&gt; &amp;gt; expects
&gt; &amp;gt; &amp;amp;gt; a size value literal.
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt; sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt; java.lang.reflect.Method.invoke(Method.java:498)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; java.lang.Thread.run(Thread.java:748)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; def register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; source_ddl = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; """
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; create table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;type string,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;WATERMARK FOR
&gt; time1 as time1 -
&gt; &amp;gt; INTERVAL '2' SECOND
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; 'localhost:9092',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; 'connector.properties.zookeeper.connect' =
&gt; &amp;gt; 'localhost:2181',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'format.type' = 'json',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'format.derive-schema' =
&gt; 'true',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.version' =
&gt; 'universal'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; """
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env.sql_update(source_ddl)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; s_env =
&gt; &amp;gt; &amp;amp;gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; s_env.set_parallelism(1)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env =
&gt; StreamTableEnvironment.create(s_env)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; register_rides_sink(st_env)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; .window(Tumble.over("2.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .group_by("w") \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .select(" id,&amp;amp;amp;nbsp;
&gt; &amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 代码如上
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; ------------------&amp;amp;amp;nbsp;原始邮件&amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; 发件人:
&gt; &amp;gt;
&gt; &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; "user-zh"
&gt; &amp;gt;
&gt; &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; <
&gt; &amp;gt; &amp;amp;gt; acqua.csq@gmail.com&amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; 发送时间:&amp;amp;amp;nbsp;2020年7月10日(星期五) 上午9:17
&gt; &amp;gt; &amp;amp;gt; 收件人:&amp;amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org
&gt; &amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 主题:&amp;amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 琴师你好,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 你的source ddl里有指定time1为 time attribute吗?
&gt; &amp;gt; &amp;amp;gt; create table source1(
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; id
&gt; &amp;gt; int,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; time1 timestamp,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; type
&gt; &amp;gt; string,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; WATERMARK FOR time1 as time1 -
&gt; &amp;gt; &amp;amp;gt; INTERVAL '2' SECOND
&gt; &amp;gt; &amp;amp;gt; ) with (...)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;amp;gt; 于2020年7月10日周五
&gt; 上午8:43写道:
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt;
&gt; ------------------&amp;amp;amp;amp;nbsp;原始邮件&amp;amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 发件人:
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;gt;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; "奇怪的不朽琴师"
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;gt;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; <
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 1129656513@qq.com&amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 发送时间:&amp;amp;amp;amp;nbsp;2020年7月9日(星期四) 下午5:08
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 收件人:&amp;amp;amp;amp;nbsp;"godfrey he"<
&gt; godfreyhe@gmail.com
&gt; &amp;gt; &amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 主题:&amp;amp;amp;amp;nbsp;pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;我在使用pyflink1.11版本时,window开窗仍会报错
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; :
&gt; org.apache.flink.table.api.ValidationException: A group
&gt; &amp;gt; window
&gt; &amp;gt; &amp;amp;gt; expects a
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; time attribute for grouping in a stream
&gt; environment.
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 请问这个问题没有修复么?或者是我使用的方式不对,如果是使用不对,能提供一个正确的案例么?
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 代码如下
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 谢谢
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def from_kafka_to_kafka_demo():
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; s_env =
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; s_env.set_parallelism(1)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; # use
&gt; blink table planner
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; st_env =
&gt; &amp;gt; StreamTableEnvironment.create(s_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; #
&gt; register source and sink
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; register_rides_sink(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; .window(Tumble.over("1.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .group_by("w") \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .select(" id,&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; source_ddl = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; create
&gt; table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;type string
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; &amp;amp;gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.sql_update(source_ddl)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def register_rides_sink(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; sink_ddl
&gt; = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; create
&gt; table sink1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;time2 timestamp
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.topic' = 'tp3',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; &amp;amp;gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.sql_update(sink_ddl)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; if __name__ == '__main__':
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; from_kafka_to_kafka_demo()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;

Re: pyflink1.11.0window

Posted by Shuiqiang Chen <ac...@gmail.com>.
下面这个例子从kafka读取json格式的数据, 然后做窗口聚合后写入es, 可以参考下代码结构, 修改相应数据字段。 这份代码你本地应该是不能运行的

from pyflink.datastream import StreamExecutionEnvironment, TimeCharacteristic
from pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings
from pyflink.table.udf import udf


@udf(input_types=[DataTypes.INT()], result_type=DataTypes.STRING())
def platform_code_to_name(code):
    return "mobile" if code == 0 else "pc"


def log_processing():
    env = StreamExecutionEnvironment.get_execution_environment()
    env.set_stream_time_characteristic(TimeCharacteristic.EventTime)
    env_settings = EnvironmentSettings.Builder().use_blink_planner().build()
    t_env = StreamTableEnvironment.create(stream_execution_environment=env,
environment_settings=env_settings)

    source_ddl = """
            CREATE TABLE payment_msg(
                createTime VARCHAR,
                rt as TO_TIMESTAMP(createTime),
                orderId BIGINT,
                payAmount DOUBLE,
                payPlatform INT,
                paySource INT,
                WATERMARK FOR rt as rt - INTERVAL '2' SECOND
            ) WITH (
              'connector.type' = 'kafka',
              'connector.version' = 'universal',
              'connector.topic' = 'payment_msg_2',
              'connector.properties.bootstrap.servers' = '0.0.0.0:9092',
              'connector.properties.group.id' = 'test_3',
              'connector.startup-mode' = 'latest-offset',
              'format.type' = 'json'
            )
            """
    t_env.sql_update(source_ddl)

    es_sink_ddl = """
            CREATE TABLE es_sink (
            platform VARCHAR,
            pay_amount DOUBLE,
            rowtime TIMESTAMP(3)
            ) with (
                'connector.type' = 'elasticsearch',
                'connector.version' = '7',
                'connector.hosts' = 'http://localhost:9200',
                'connector.index' = 'platform_pay_amount_1',
                'connector.document-type' = 'payment',
                'update-mode' = 'upsert',
                'connector.flush-on-checkpoint' = 'true',
                'connector.key-delimiter' = '$',
                'connector.key-null-literal' = 'n/a',
                'connector.bulk-flush.max-size' = '42mb',
                'connector.bulk-flush.max-actions' = '32',
                'connector.bulk-flush.interval' = '1000',
                'connector.bulk-flush.backoff.delay' = '1000',
                'format.type' = 'json'
            )
    """

    t_env.sql_update(es_sink_ddl)

    t_env.register_function('platformcodetoname', platform_code_to_name)

    query = """
    select platformcodetoname(payPlatform) as platform, sum(payAmount)
as pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT)
as rowtime
    from payment_msg
    group by tumble(rt, interval '5' seconds), payPlatform
    """

    count_result = t_env.sql_query(query)

    t_env.create_temporary_view('windowed_values', count_result)

    query2 = """
    select platform, last_value(pay_amount), rowtime from
windowed_values group by platform, rowtime
    """

    final_result = t_env.sql_query(query2)

    final_result.execute_insert(table_path='es_sink')


if __name__ == '__main__':
    log_processing()


奇怪的不朽琴师 <11...@qq.com> 于2020年7月15日周三 下午4:40写道:

> &nbsp;Shuiqiang,你好:
> &nbsp; &nbsp;
> &nbsp;hi,能否请求您贡献一下完整的代码的案例,我是初学者,官网的2-from_kafka_to_kafka.py这个没有窗口,我现在想要一个在此基础上有窗口的demo,尝试编了很久也未能解决。我在给这个demo加上窗口功能后总是有各种各样的问题,十分痛苦,如能帮助,感激不尽。
>
>
> 恳请所有看到此封邮件的大佬!
>
>
> 谢谢!
>
>
>
>
> ------------------&nbsp;原始邮件&nbsp;------------------
> 发件人:
>                                                   "user-zh"
>                                                                     <
> acqua.csq@gmail.com&gt;;
> 发送时间:&nbsp;2020年7月15日(星期三) 中午11:25
> 收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;
>
> 主题:&nbsp;Re: pyflink1.11.0window
>
>
>
> 举个sql例子
> select platformcodetoname(payPlatform) as platform, sum(payAmount) as
> pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT) as
> rowtime
> from payment_msg group by tumble(rt, interval '5' seconds), payPlatform
> 这个query 对每5s的tumble窗口做统计。
>
> 奇怪的不朽琴师 <1129656513@qq.com&gt; 于2020年7月15日周三 上午11:10写道:
>
> &gt; Shuiqiang,你好:
> &gt; &amp;nbsp;
> &amp;nbsp;我的目的是每间隔一段时间做一次汇总统计,比如每两秒做一下汇总,请问这个需求我改如何定义window?
> &gt;
> &gt;
> &gt; ------------------&amp;nbsp;原始邮件&amp;nbsp;------------------
> &gt; 发件人:
> &gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> "user-zh"
> &gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> <
> &gt; acqua.csq@gmail.com&amp;gt;;
> &gt; 发送时间:&amp;nbsp;2020年7月15日(星期三) 上午10:51
> &gt; 收件人:&amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;gt;;
> &gt;
> &gt; 主题:&amp;nbsp;Re: pyflink1.11.0window
> &gt;
> &gt;
> &gt;
> &gt; 琴师你好,
> &gt; 异常栈信息org.apache.flink.table.api.ValidationException: A tumble window
> &gt; expects a size value literal.
> &gt; 看起来是接下tumble window定义的代码不太正确吧
> &gt;
> &gt; Best,
> &gt; Shuiqiang
> &gt;
> &gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;gt; 于2020年7月15日周三 上午10:27写道:
> &gt;
> &gt; &amp;gt; 你好:
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;
> &gt; &amp;gt;
> &gt;
> &amp;amp;nbsp;我按着你回复的建议改了source但是会报新的错误,请问这个是因为什么?我想调试一个window一直没有成功,请帮帮我,谢谢。
> &gt; &amp;gt; Traceback (most recent call last):
> &gt; &amp;gt; &amp;amp;nbsp; File "tou.py", line 71, in <module&amp;amp;gt;
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; from_kafka_to_kafka_demo()
> &gt; &amp;gt; &amp;amp;nbsp; File "tou.py", line 21, in
> from_kafka_to_kafka_demo
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; .select(" id,&amp;amp;nbsp;
> time1 , time1 ")\
> &gt; &amp;gt; &amp;amp;nbsp; File
> &gt; &amp;gt;
> "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py", line
> &gt; 907,
> &gt; &amp;gt; in select
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; return
> Table(self._j_table.select(fields),
> &gt; self._t_env)
> &gt; &amp;gt; &amp;amp;nbsp; File
> &gt; "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
> &gt; &amp;gt; line 1286, in __call__
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; answer, self.gateway_client,
> self.target_id,
> &gt; self.name)
> &gt; &amp;gt; &amp;amp;nbsp; File
> &gt; &amp;gt;
> "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py",
> &gt; line
> &gt; &amp;gt; 147, in deco
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; return f(*a, **kw)
> &gt; &amp;gt; &amp;amp;nbsp; File
> &gt; "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
> &gt; &amp;gt; line 328, in get_return_value
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; format(target_id, ".", name),
> value)
> &gt; &amp;gt; py4j.protocol.Py4JJavaError: An error occurred while calling
> &gt; o26.select.
> &gt; &amp;gt; : org.apache.flink.table.api.ValidationException: A tumble
> window
> &gt; expects
> &gt; &amp;gt; a size value literal.
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt;
> &gt;
> org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt;
> &gt;
> org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt;
> &gt;
> org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt;
> &gt;
> org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt;
> &gt;
> org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt;
> &gt;
> org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt; sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt;
> &gt;
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt;
> &gt;
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt; java.lang.reflect.Method.invoke(Method.java:498)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt;
> &gt;
> org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt;
> &gt;
> org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt;
> &gt;
> org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt;
> &gt;
> org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt;
> &gt;
> org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; &amp;gt;
> &gt;
> org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> at
> &gt; java.lang.Thread.run(Thread.java:748)
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; def register_rides_source(st_env):
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; source_ddl = \
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; """
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; create table source1(
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;id int,
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;time1 timestamp,
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;type string,
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;WATERMARK FOR
> time1 as time1 -
> &gt; INTERVAL '2' SECOND
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;) with (
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.type' = 'kafka',
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'update-mode' = 'append',
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.topic' = 'tp1',
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;
> 'connector.properties.bootstrap.servers' =
> &gt; 'localhost:9092',
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;
> 'connector.properties.zookeeper.connect' =
> &gt; 'localhost:2181',
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'format.type' = 'json',
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'format.derive-schema' =
> 'true',
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.version' =
> 'universal'
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; """
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env.sql_update(source_ddl)
> &gt; &amp;gt;
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;&amp;amp;nbsp;
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; s_env =
> &gt; &amp;gt; StreamExecutionEnvironment.get_execution_environment()
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; s_env.set_parallelism(1)
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env =
> StreamTableEnvironment.create(s_env)
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; register_rides_source(st_env)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; register_rides_sink(st_env)
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env.from_path("source1")\
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> &gt; &amp;gt; .window(Tumble.over("2.secends").on("time1").alias("w")) \
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> .group_by("w") \
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> .select(" id,&amp;amp;nbsp;
> &gt; time1 , time1 ")\
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> .insert_into("sink1")
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;&amp;amp;nbsp;
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;
> st_env.execute("2-from_kafka_to_kafka")
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; 代码如上
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> ------------------&amp;amp;nbsp;原始邮件&amp;amp;nbsp;------------------
> &gt; &amp;gt; 发件人:
> &gt;
> &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
> &gt; "user-zh"
> &gt;
> &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
> &gt; <
> &gt; &amp;gt; acqua.csq@gmail.com&amp;amp;gt;;
> &gt; &amp;gt; 发送时间:&amp;amp;nbsp;2020年7月10日(星期五) 上午9:17
> &gt; &amp;gt; 收件人:&amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org
> &amp;amp;gt;;
> &gt; &amp;gt;
> &gt; &amp;gt; 主题:&amp;amp;nbsp;Re: pyflink1.11.0window
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; 琴师你好,
> &gt; &amp;gt;
> &gt; &amp;gt; 你的source ddl里有指定time1为 time attribute吗?
> &gt; &amp;gt; create table source1(
> &gt; &amp;gt;
> &gt;
> &amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
> id
> &gt; int,
> &gt; &amp;gt;
> &gt;
> &amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
> &gt; time1 timestamp,
> &gt; &amp;gt;
> &gt;
> &amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
> type
> &gt; string,
> &gt; &amp;gt;
> &gt;
> &amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
> &gt; WATERMARK FOR time1 as time1 -
> &gt; &amp;gt; INTERVAL '2' SECOND
> &gt; &amp;gt; ) with (...)
> &gt; &amp;gt;
> &gt; &amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;gt; 于2020年7月10日周五
> 上午8:43写道:
> &gt; &amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt;
> ------------------&amp;amp;amp;nbsp;原始邮件&amp;amp;amp;nbsp;------------------
> &gt; &amp;gt; &amp;amp;gt; 发件人:
> &gt; &amp;gt;
> &gt;
> &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
> &gt; &amp;gt; "奇怪的不朽琴师"
> &gt; &amp;gt;
> &gt;
> &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
> &gt; &amp;gt; <
> &gt; &amp;gt; &amp;amp;gt; 1129656513@qq.com&amp;amp;amp;gt;;
> &gt; &amp;gt; &amp;amp;gt; 发送时间:&amp;amp;amp;nbsp;2020年7月9日(星期四) 下午5:08
> &gt; &amp;gt; &amp;amp;gt; 收件人:&amp;amp;amp;nbsp;"godfrey he"<
> godfreyhe@gmail.com
> &gt; &amp;amp;amp;gt;;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; 主题:&amp;amp;amp;nbsp;pyflink1.11.0window
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; 你好:
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;我在使用pyflink1.11版本时,window开窗仍会报错
> &gt; &amp;gt; &amp;amp;gt; :
> org.apache.flink.table.api.ValidationException: A group
> &gt; window
> &gt; &amp;gt; expects a
> &gt; &amp;gt; &amp;amp;gt; time attribute for grouping in a stream
> environment.
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; 请问这个问题没有修复么?或者是我使用的方式不对,如果是使用不对,能提供一个正确的案例么?
> &gt; &amp;gt; &amp;amp;gt; 代码如下
> &gt; &amp;gt; &amp;amp;gt; 谢谢
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; def from_kafka_to_kafka_demo():
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; s_env =
> &gt; &amp;gt; &amp;amp;gt;
> StreamExecutionEnvironment.get_execution_environment()
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> s_env.set_parallelism(1)
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; # use
> blink table planner
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env =
> &gt; StreamTableEnvironment.create(s_env)
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; #
> register source and sink
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> register_rides_source(st_env)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> register_rides_sink(st_env)
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> st_env.from_path("source1")\
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; &amp;gt; &amp;amp;gt;
> .window(Tumble.over("1.secends").on("time1").alias("w")) \
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; .group_by("w") \
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; .select(" id,&amp;amp;amp;nbsp;
> &gt; &amp;gt; time1 , time1 ")\
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; .insert_into("sink1")
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; st_env.execute("2-from_kafka_to_kafka")
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; def register_rides_source(st_env):
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> source_ddl = \
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; '''
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; create
> table source1(
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; id int,
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;time1 timestamp,
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;type string
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;) with (
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> 'connector.type' = 'kafka',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> 'update-mode' = 'append',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> 'connector.topic' = 'tp1',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; 'connector.properties.bootstrap.servers' =
> &gt; &amp;gt; 'localhost:9092'
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; '''
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> st_env.sql_update(source_ddl)
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; def register_rides_sink(st_env):
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; sink_ddl
> = \
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; '''
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; create
> table sink1(
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; id int,
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;time1 timestamp,
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;time2 timestamp
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;) with (
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> 'connector.type' = 'kafka',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> 'update-mode' = 'append',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> 'connector.topic' = 'tp3',
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &gt; 'connector.properties.bootstrap.servers' =
> &gt; &amp;gt; 'localhost:9092'
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> &amp;amp;amp;nbsp;)
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; '''
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> st_env.sql_update(sink_ddl)
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; if __name__ == '__main__':
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
> from_kafka_to_kafka_demo()
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt;
> &gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;

回复: pyflink1.11.0window

Posted by 奇怪的不朽琴师 <11...@qq.com>.
&nbsp;Shuiqiang,你好:
&nbsp; &nbsp; &nbsp;hi,能否请求您贡献一下完整的代码的案例,我是初学者,官网的2-from_kafka_to_kafka.py这个没有窗口,我现在想要一个在此基础上有窗口的demo,尝试编了很久也未能解决。我在给这个demo加上窗口功能后总是有各种各样的问题,十分痛苦,如能帮助,感激不尽。


恳请所有看到此封邮件的大佬!


谢谢!




------------------&nbsp;原始邮件&nbsp;------------------
发件人:                                                                                                                        "user-zh"                                                                                    <acqua.csq@gmail.com&gt;;
发送时间:&nbsp;2020年7月15日(星期三) 中午11:25
收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;

主题:&nbsp;Re: pyflink1.11.0window



举个sql例子
select platformcodetoname(payPlatform) as platform, sum(payAmount) as
pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT) as
rowtime
from payment_msg group by tumble(rt, interval '5' seconds), payPlatform
这个query 对每5s的tumble窗口做统计。

奇怪的不朽琴师 <1129656513@qq.com&gt; 于2020年7月15日周三 上午11:10写道:

&gt; Shuiqiang,你好:
&gt; &amp;nbsp; &amp;nbsp;我的目的是每间隔一段时间做一次汇总统计,比如每两秒做一下汇总,请问这个需求我改如何定义window?
&gt;
&gt;
&gt; ------------------&amp;nbsp;原始邮件&amp;nbsp;------------------
&gt; 发件人:
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; "user-zh"
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; <
&gt; acqua.csq@gmail.com&amp;gt;;
&gt; 发送时间:&amp;nbsp;2020年7月15日(星期三) 上午10:51
&gt; 收件人:&amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;gt;;
&gt;
&gt; 主题:&amp;nbsp;Re: pyflink1.11.0window
&gt;
&gt;
&gt;
&gt; 琴师你好,
&gt; 异常栈信息org.apache.flink.table.api.ValidationException: A tumble window
&gt; expects a size value literal.
&gt; 看起来是接下tumble window定义的代码不太正确吧
&gt;
&gt; Best,
&gt; Shuiqiang
&gt;
&gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;gt; 于2020年7月15日周三 上午10:27写道:
&gt;
&gt; &amp;gt; 你好:
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;
&gt; &amp;gt;
&gt; &amp;amp;nbsp;我按着你回复的建议改了source但是会报新的错误,请问这个是因为什么?我想调试一个window一直没有成功,请帮帮我,谢谢。
&gt; &amp;gt; Traceback (most recent call last):
&gt; &amp;gt; &amp;amp;nbsp; File "tou.py", line 71, in <module&amp;amp;gt;
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; from_kafka_to_kafka_demo()
&gt; &amp;gt; &amp;amp;nbsp; File "tou.py", line 21, in from_kafka_to_kafka_demo
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; .select(" id,&amp;amp;nbsp; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;nbsp; File
&gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py", line
&gt; 907,
&gt; &amp;gt; in select
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; return Table(self._j_table.select(fields),
&gt; self._t_env)
&gt; &amp;gt; &amp;amp;nbsp; File
&gt; "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
&gt; &amp;gt; line 1286, in __call__
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; answer, self.gateway_client, self.target_id,
&gt; self.name)
&gt; &amp;gt; &amp;amp;nbsp; File
&gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py",
&gt; line
&gt; &amp;gt; 147, in deco
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; return f(*a, **kw)
&gt; &amp;gt; &amp;amp;nbsp; File
&gt; "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
&gt; &amp;gt; line 328, in get_return_value
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; format(target_id, ".", name), value)
&gt; &amp;gt; py4j.protocol.Py4JJavaError: An error occurred while calling
&gt; o26.select.
&gt; &amp;gt; : org.apache.flink.table.api.ValidationException: A tumble window
&gt; expects
&gt; &amp;gt; a size value literal.
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt;
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt;
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt; sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt;
&gt; sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt;
&gt; sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt; java.lang.reflect.Method.invoke(Method.java:498)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; at
&gt; java.lang.Thread.run(Thread.java:748)
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; def register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; source_ddl = \
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; """
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; create table source1(
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;id int,
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;type string,
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;WATERMARK FOR time1 as time1 -
&gt; INTERVAL '2' SECOND
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.properties.bootstrap.servers' =
&gt; 'localhost:9092',
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.properties.zookeeper.connect' =
&gt; 'localhost:2181',
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'format.type' = 'json',
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'format.derive-schema' = 'true',
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.version' = 'universal'
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; """
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env.sql_update(source_ddl)
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; s_env =
&gt; &amp;gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; s_env.set_parallelism(1)
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env = StreamTableEnvironment.create(s_env)
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; register_rides_sink(st_env)
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
&gt; &amp;gt; .window(Tumble.over("2.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; .group_by("w") \
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; .select(" id,&amp;amp;nbsp;
&gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; 代码如上
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; ------------------&amp;amp;nbsp;原始邮件&amp;amp;nbsp;------------------
&gt; &amp;gt; 发件人:
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; "user-zh"
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; <
&gt; &amp;gt; acqua.csq@gmail.com&amp;amp;gt;;
&gt; &amp;gt; 发送时间:&amp;amp;nbsp;2020年7月10日(星期五) 上午9:17
&gt; &amp;gt; 收件人:&amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;amp;gt;;
&gt; &amp;gt;
&gt; &amp;gt; 主题:&amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; 琴师你好,
&gt; &amp;gt;
&gt; &amp;gt; 你的source ddl里有指定time1为 time attribute吗?
&gt; &amp;gt; create table source1(
&gt; &amp;gt;
&gt; &amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp; id
&gt; int,
&gt; &amp;gt;
&gt; &amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; time1 timestamp,
&gt; &amp;gt;
&gt; &amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp; type
&gt; string,
&gt; &amp;gt;
&gt; &amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; WATERMARK FOR time1 as time1 -
&gt; &amp;gt; INTERVAL '2' SECOND
&gt; &amp;gt; ) with (...)
&gt; &amp;gt;
&gt; &amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;gt; 于2020年7月10日周五 上午8:43写道:
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; ------------------&amp;amp;amp;nbsp;原始邮件&amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; 发件人:
&gt; &amp;gt;
&gt; &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; "奇怪的不朽琴师"
&gt; &amp;gt;
&gt; &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; <
&gt; &amp;gt; &amp;amp;gt; 1129656513@qq.com&amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; 发送时间:&amp;amp;amp;nbsp;2020年7月9日(星期四) 下午5:08
&gt; &amp;gt; &amp;amp;gt; 收件人:&amp;amp;amp;nbsp;"godfrey he"<godfreyhe@gmail.com
&gt; &amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 主题:&amp;amp;amp;nbsp;pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;我在使用pyflink1.11版本时,window开窗仍会报错
&gt; &amp;gt; &amp;amp;gt; : org.apache.flink.table.api.ValidationException: A group
&gt; window
&gt; &amp;gt; expects a
&gt; &amp;gt; &amp;amp;gt; time attribute for grouping in a stream environment.
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 请问这个问题没有修复么?或者是我使用的方式不对,如果是使用不对,能提供一个正确的案例么?
&gt; &amp;gt; &amp;amp;gt; 代码如下
&gt; &amp;gt; &amp;amp;gt; 谢谢
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; def from_kafka_to_kafka_demo():
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; s_env =
&gt; &amp;gt; &amp;amp;gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; s_env.set_parallelism(1)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; # use blink table planner
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env =
&gt; StreamTableEnvironment.create(s_env)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; # register source and sink
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; register_rides_sink(st_env)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; .window(Tumble.over("1.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .group_by("w") \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .select(" id,&amp;amp;amp;nbsp;
&gt; &amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; def register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; source_ddl = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; create table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;type string
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env.sql_update(source_ddl)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; def register_rides_sink(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; sink_ddl = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; create table sink1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;time2 timestamp
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.topic' = 'tp3',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env.sql_update(sink_ddl)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; if __name__ == '__main__':
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; from_kafka_to_kafka_demo()
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;

Re: pyflink1.11.0window

Posted by Shuiqiang Chen <ac...@gmail.com>.
举个sql例子
select platformcodetoname(payPlatform) as platform, sum(payAmount) as
pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT) as
rowtime
from payment_msg group by tumble(rt, interval '5' seconds), payPlatform
这个query 对每5s的tumble窗口做统计。

奇怪的不朽琴师 <11...@qq.com> 于2020年7月15日周三 上午11:10写道:

> Shuiqiang,你好:
> &nbsp; &nbsp;我的目的是每间隔一段时间做一次汇总统计,比如每两秒做一下汇总,请问这个需求我改如何定义window?
>
>
> ------------------&nbsp;原始邮件&nbsp;------------------
> 发件人:
>                                                   "user-zh"
>                                                                     <
> acqua.csq@gmail.com&gt;;
> 发送时间:&nbsp;2020年7月15日(星期三) 上午10:51
> 收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;
>
> 主题:&nbsp;Re: pyflink1.11.0window
>
>
>
> 琴师你好,
> 异常栈信息org.apache.flink.table.api.ValidationException: A tumble window
> expects a size value literal.
> 看起来是接下tumble window定义的代码不太正确吧
>
> Best,
> Shuiqiang
>
> 奇怪的不朽琴师 <1129656513@qq.com&gt; 于2020年7月15日周三 上午10:27写道:
>
> &gt; 你好:
> &gt; &amp;nbsp; &amp;nbsp;
> &gt;
> &amp;nbsp;我按着你回复的建议改了source但是会报新的错误,请问这个是因为什么?我想调试一个window一直没有成功,请帮帮我,谢谢。
> &gt; Traceback (most recent call last):
> &gt; &amp;nbsp; File "tou.py", line 71, in <module&amp;gt;
> &gt; &amp;nbsp; &amp;nbsp; from_kafka_to_kafka_demo()
> &gt; &amp;nbsp; File "tou.py", line 21, in from_kafka_to_kafka_demo
> &gt; &amp;nbsp; &amp;nbsp; .select(" id,&amp;nbsp; time1 , time1 ")\
> &gt; &amp;nbsp; File
> &gt; "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py", line
> 907,
> &gt; in select
> &gt; &amp;nbsp; &amp;nbsp; return Table(self._j_table.select(fields),
> self._t_env)
> &gt; &amp;nbsp; File
> "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
> &gt; line 1286, in __call__
> &gt; &amp;nbsp; &amp;nbsp; answer, self.gateway_client, self.target_id,
> self.name)
> &gt; &amp;nbsp; File
> &gt; "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py",
> line
> &gt; 147, in deco
> &gt; &amp;nbsp; &amp;nbsp; return f(*a, **kw)
> &gt; &amp;nbsp; File
> "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
> &gt; line 328, in get_return_value
> &gt; &amp;nbsp; &amp;nbsp; format(target_id, ".", name), value)
> &gt; py4j.protocol.Py4JJavaError: An error occurred while calling
> o26.select.
> &gt; : org.apache.flink.table.api.ValidationException: A tumble window
> expects
> &gt; a size value literal.
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt;
> org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt;
> org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt;
> org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt;
> org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt;
> org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt;
> org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt; sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt;
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt;
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt; java.lang.reflect.Method.invoke(Method.java:498)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt;
> org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt;
> org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt;
> org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt;
> org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt;
> org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> &gt;
> org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
> java.lang.Thread.run(Thread.java:748)
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt; def register_rides_source(st_env):
> &gt; &amp;nbsp; &amp;nbsp; source_ddl = \
> &gt; &amp;nbsp; &amp;nbsp; """
> &gt; &amp;nbsp; &amp;nbsp; create table source1(
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;id int,
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time1 timestamp,
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;type string,
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;WATERMARK FOR time1 as time1 -
> INTERVAL '2' SECOND
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;) with (
> &gt; &amp;nbsp; &amp;nbsp; 'connector.type' = 'kafka',
> &gt; &amp;nbsp; &amp;nbsp; 'update-mode' = 'append',
> &gt; &amp;nbsp; &amp;nbsp; 'connector.topic' = 'tp1',
> &gt; &amp;nbsp; &amp;nbsp; 'connector.properties.bootstrap.servers' =
> 'localhost:9092',
> &gt; &amp;nbsp; &amp;nbsp; 'connector.properties.zookeeper.connect' =
> 'localhost:2181',
> &gt; &amp;nbsp; &amp;nbsp; 'format.type' = 'json',
> &gt; &amp;nbsp; &amp;nbsp; 'format.derive-schema' = 'true',
> &gt; &amp;nbsp; &amp;nbsp; 'connector.version' = 'universal'
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;)
> &gt; &amp;nbsp; &amp;nbsp; """
> &gt; &amp;nbsp; &amp;nbsp; st_env.sql_update(source_ddl)
> &gt;
> &gt; &amp;nbsp; &amp;nbsp;&amp;nbsp;
> &gt; &amp;nbsp; &amp;nbsp; s_env =
> &gt; StreamExecutionEnvironment.get_execution_environment()
> &gt; &amp;nbsp; &amp;nbsp; s_env.set_parallelism(1)
> &gt;
> &gt;
> &gt; &amp;nbsp; &amp;nbsp; st_env = StreamTableEnvironment.create(s_env)
> &gt;
> &gt;
> &gt; &amp;nbsp; &amp;nbsp; register_rides_source(st_env)
> &gt; &amp;nbsp; &amp;nbsp; register_rides_sink(st_env)
> &gt;
> &gt;
> &gt; &amp;nbsp; &amp;nbsp; st_env.from_path("source1")\
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;
> &gt; .window(Tumble.over("2.secends").on("time1").alias("w")) \
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .group_by("w") \
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .select(" id,&amp;nbsp;
> time1 , time1 ")\
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .insert_into("sink1")
> &gt; &amp;nbsp; &amp;nbsp;&amp;nbsp;
> &gt; &amp;nbsp; &amp;nbsp; st_env.execute("2-from_kafka_to_kafka")
> &gt;
> &gt;
> &gt; 代码如上
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt;
> &gt; ------------------&amp;nbsp;原始邮件&amp;nbsp;------------------
> &gt; 发件人:
> &gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> "user-zh"
> &gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> <
> &gt; acqua.csq@gmail.com&amp;gt;;
> &gt; 发送时间:&amp;nbsp;2020年7月10日(星期五) 上午9:17
> &gt; 收件人:&amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;gt;;
> &gt;
> &gt; 主题:&amp;nbsp;Re: pyflink1.11.0window
> &gt;
> &gt;
> &gt;
> &gt; 琴师你好,
> &gt;
> &gt; 你的source ddl里有指定time1为 time attribute吗?
> &gt; create table source1(
> &gt;
> &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; id
> int,
> &gt;
> &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
> time1 timestamp,
> &gt;
> &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; type
> string,
> &gt;
> &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
> WATERMARK FOR time1 as time1 -
> &gt; INTERVAL '2' SECOND
> &gt; ) with (...)
> &gt;
> &gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;gt; 于2020年7月10日周五 上午8:43写道:
> &gt;
> &gt; &amp;gt;
> ------------------&amp;amp;nbsp;原始邮件&amp;amp;nbsp;------------------
> &gt; &amp;gt; 发件人:
> &gt;
> &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
> &gt; "奇怪的不朽琴师"
> &gt;
> &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
> &gt; <
> &gt; &amp;gt; 1129656513@qq.com&amp;amp;gt;;
> &gt; &amp;gt; 发送时间:&amp;amp;nbsp;2020年7月9日(星期四) 下午5:08
> &gt; &amp;gt; 收件人:&amp;amp;nbsp;"godfrey he"<godfreyhe@gmail.com
> &amp;amp;gt;;
> &gt; &amp;gt;
> &gt; &amp;gt; 主题:&amp;amp;nbsp;pyflink1.11.0window
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; 你好:
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;我在使用pyflink1.11版本时,window开窗仍会报错
> &gt; &amp;gt; : org.apache.flink.table.api.ValidationException: A group
> window
> &gt; expects a
> &gt; &amp;gt; time attribute for grouping in a stream environment.
> &gt; &amp;gt;
> &gt; &amp;gt; 请问这个问题没有修复么?或者是我使用的方式不对,如果是使用不对,能提供一个正确的案例么?
> &gt; &amp;gt; 代码如下
> &gt; &amp;gt; 谢谢
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; def from_kafka_to_kafka_demo():
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; s_env =
> &gt; &amp;gt; StreamExecutionEnvironment.get_execution_environment()
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; s_env.set_parallelism(1)
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; # use blink table planner
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env =
> StreamTableEnvironment.create(s_env)
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; # register source and sink
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; register_rides_source(st_env)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; register_rides_sink(st_env)
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env.from_path("source1")\
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> &gt; &amp;gt; .window(Tumble.over("1.secends").on("time1").alias("w")) \
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> .group_by("w") \
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> .select(" id,&amp;amp;nbsp;
> &gt; time1 , time1 ")\
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> .insert_into("sink1")
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;
> st_env.execute("2-from_kafka_to_kafka")
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; def register_rides_source(st_env):
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; source_ddl = \
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; '''
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; create table source1(
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> id int,
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;time1 timestamp,
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;type string
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;) with (
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.type' = 'kafka',
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'update-mode' = 'append',
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.topic' = 'tp1',
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;
> 'connector.properties.bootstrap.servers' =
> &gt; 'localhost:9092'
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; '''
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env.sql_update(source_ddl)
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; def register_rides_sink(st_env):
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; sink_ddl = \
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; '''
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; create table sink1(
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
> id int,
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;time1 timestamp,
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;time2 timestamp
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;) with (
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.type' = 'kafka',
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'update-mode' = 'append',
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.topic' = 'tp3',
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;
> 'connector.properties.bootstrap.servers' =
> &gt; 'localhost:9092'
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;)
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; '''
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env.sql_update(sink_ddl)
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; if __name__ == '__main__':
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; from_kafka_to_kafka_demo()
> &gt; &amp;gt;
> &gt; &amp;gt;
> &gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;

回复: pyflink1.11.0window

Posted by 奇怪的不朽琴师 <11...@qq.com>.
Shuiqiang,你好:
&nbsp; &nbsp;我的目的是每间隔一段时间做一次汇总统计,比如每两秒做一下汇总,请问这个需求我改如何定义window?


------------------&nbsp;原始邮件&nbsp;------------------
发件人:                                                                                                                        "user-zh"                                                                                    <acqua.csq@gmail.com&gt;;
发送时间:&nbsp;2020年7月15日(星期三) 上午10:51
收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;

主题:&nbsp;Re: pyflink1.11.0window



琴师你好,
异常栈信息org.apache.flink.table.api.ValidationException: A tumble window
expects a size value literal.
看起来是接下tumble window定义的代码不太正确吧

Best,
Shuiqiang

奇怪的不朽琴师 <1129656513@qq.com&gt; 于2020年7月15日周三 上午10:27写道:

&gt; 你好:
&gt; &amp;nbsp; &amp;nbsp;
&gt; &amp;nbsp;我按着你回复的建议改了source但是会报新的错误,请问这个是因为什么?我想调试一个window一直没有成功,请帮帮我,谢谢。
&gt; Traceback (most recent call last):
&gt; &amp;nbsp; File "tou.py", line 71, in <module&amp;gt;
&gt; &amp;nbsp; &amp;nbsp; from_kafka_to_kafka_demo()
&gt; &amp;nbsp; File "tou.py", line 21, in from_kafka_to_kafka_demo
&gt; &amp;nbsp; &amp;nbsp; .select(" id,&amp;nbsp; time1 , time1 ")\
&gt; &amp;nbsp; File
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py", line 907,
&gt; in select
&gt; &amp;nbsp; &amp;nbsp; return Table(self._j_table.select(fields), self._t_env)
&gt; &amp;nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
&gt; line 1286, in __call__
&gt; &amp;nbsp; &amp;nbsp; answer, self.gateway_client, self.target_id, self.name)
&gt; &amp;nbsp; File
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line
&gt; 147, in deco
&gt; &amp;nbsp; &amp;nbsp; return f(*a, **kw)
&gt; &amp;nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
&gt; line 328, in get_return_value
&gt; &amp;nbsp; &amp;nbsp; format(target_id, ".", name), value)
&gt; py4j.protocol.Py4JJavaError: An error occurred while calling o26.select.
&gt; : org.apache.flink.table.api.ValidationException: A tumble window expects
&gt; a size value literal.
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; java.lang.reflect.Method.invoke(Method.java:498)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at
&gt; org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at java.lang.Thread.run(Thread.java:748)
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt; def register_rides_source(st_env):
&gt; &amp;nbsp; &amp;nbsp; source_ddl = \
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; create table source1(
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;id int,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time1 timestamp,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;type string,
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;WATERMARK FOR time1 as time1 - INTERVAL '2' SECOND
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;) with (
&gt; &amp;nbsp; &amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;nbsp; &amp;nbsp; 'update-mode' = 'append',
&gt; &amp;nbsp; &amp;nbsp; 'connector.topic' = 'tp1',
&gt; &amp;nbsp; &amp;nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
&gt; &amp;nbsp; &amp;nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
&gt; &amp;nbsp; &amp;nbsp; 'format.type' = 'json',
&gt; &amp;nbsp; &amp;nbsp; 'format.derive-schema' = 'true',
&gt; &amp;nbsp; &amp;nbsp; 'connector.version' = 'universal'
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;)
&gt; &amp;nbsp; &amp;nbsp; """
&gt; &amp;nbsp; &amp;nbsp; st_env.sql_update(source_ddl)
&gt;
&gt; &amp;nbsp; &amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; &amp;nbsp; s_env =
&gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;nbsp; &amp;nbsp; s_env.set_parallelism(1)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; st_env = StreamTableEnvironment.create(s_env)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; register_rides_source(st_env)
&gt; &amp;nbsp; &amp;nbsp; register_rides_sink(st_env)
&gt;
&gt;
&gt; &amp;nbsp; &amp;nbsp; st_env.from_path("source1")\
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;
&gt; .window(Tumble.over("2.secends").on("time1").alias("w")) \
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .group_by("w") \
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .select(" id,&amp;nbsp; time1 , time1 ")\
&gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .insert_into("sink1")
&gt; &amp;nbsp; &amp;nbsp;&amp;nbsp;
&gt; &amp;nbsp; &amp;nbsp; st_env.execute("2-from_kafka_to_kafka")
&gt;
&gt;
&gt; 代码如上
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt;
&gt; ------------------&amp;nbsp;原始邮件&amp;nbsp;------------------
&gt; 发件人:
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; "user-zh"
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; <
&gt; acqua.csq@gmail.com&amp;gt;;
&gt; 发送时间:&amp;nbsp;2020年7月10日(星期五) 上午9:17
&gt; 收件人:&amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;gt;;
&gt;
&gt; 主题:&amp;nbsp;Re: pyflink1.11.0window
&gt;
&gt;
&gt;
&gt; 琴师你好,
&gt;
&gt; 你的source ddl里有指定time1为 time attribute吗?
&gt; create table source1(
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; id int,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; time1 timestamp,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; type string,
&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; WATERMARK FOR time1 as time1 -
&gt; INTERVAL '2' SECOND
&gt; ) with (...)
&gt;
&gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;gt; 于2020年7月10日周五 上午8:43写道:
&gt;
&gt; &amp;gt; ------------------&amp;amp;nbsp;原始邮件&amp;amp;nbsp;------------------
&gt; &amp;gt; 发件人:
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; "奇怪的不朽琴师"
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; <
&gt; &amp;gt; 1129656513@qq.com&amp;amp;gt;;
&gt; &amp;gt; 发送时间:&amp;amp;nbsp;2020年7月9日(星期四) 下午5:08
&gt; &amp;gt; 收件人:&amp;amp;nbsp;"godfrey he"<godfreyhe@gmail.com&amp;amp;gt;;
&gt; &amp;gt;
&gt; &amp;gt; 主题:&amp;amp;nbsp;pyflink1.11.0window
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; 你好:
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;我在使用pyflink1.11版本时,window开窗仍会报错
&gt; &amp;gt; : org.apache.flink.table.api.ValidationException: A group window
&gt; expects a
&gt; &amp;gt; time attribute for grouping in a stream environment.
&gt; &amp;gt;
&gt; &amp;gt; 请问这个问题没有修复么?或者是我使用的方式不对,如果是使用不对,能提供一个正确的案例么?
&gt; &amp;gt; 代码如下
&gt; &amp;gt; 谢谢
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; def from_kafka_to_kafka_demo():
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; s_env =
&gt; &amp;gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; s_env.set_parallelism(1)
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; # use blink table planner
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env = StreamTableEnvironment.create(s_env)
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; # register source and sink
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; register_rides_sink(st_env)
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;
&gt; &amp;gt; .window(Tumble.over("1.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; .group_by("w") \
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; .select(" id,&amp;amp;nbsp;
&gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; def register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; source_ddl = \
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; create table source1(
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; id int,
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;type string
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.properties.bootstrap.servers' =
&gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env.sql_update(source_ddl)
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; def register_rides_sink(st_env):
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; sink_ddl = \
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; create table sink1(
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp; id int,
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;time2 timestamp
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.topic' = 'tp3',
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; 'connector.properties.bootstrap.servers' =
&gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; &amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; st_env.sql_update(sink_ddl)
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; if __name__ == '__main__':
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp; from_kafka_to_kafka_demo()
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;nbsp; &amp;amp;nbsp;

Re: pyflink1.11.0window

Posted by Shuiqiang Chen <ac...@gmail.com>.
琴师你好,
异常栈信息org.apache.flink.table.api.ValidationException: A tumble window
expects a size value literal.
看起来是接下tumble window定义的代码不太正确吧

Best,
Shuiqiang

奇怪的不朽琴师 <11...@qq.com> 于2020年7月15日周三 上午10:27写道:

> 你好:
> &nbsp; &nbsp;
> &nbsp;我按着你回复的建议改了source但是会报新的错误,请问这个是因为什么?我想调试一个window一直没有成功,请帮帮我,谢谢。
> Traceback (most recent call last):
> &nbsp; File "tou.py", line 71, in <module&gt;
> &nbsp; &nbsp; from_kafka_to_kafka_demo()
> &nbsp; File "tou.py", line 21, in from_kafka_to_kafka_demo
> &nbsp; &nbsp; .select(" id,&nbsp; time1 , time1 ")\
> &nbsp; File
> "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py", line 907,
> in select
> &nbsp; &nbsp; return Table(self._j_table.select(fields), self._t_env)
> &nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
> line 1286, in __call__
> &nbsp; &nbsp; answer, self.gateway_client, self.target_id, self.name)
> &nbsp; File
> "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line
> 147, in deco
> &nbsp; &nbsp; return f(*a, **kw)
> &nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
> line 328, in get_return_value
> &nbsp; &nbsp; format(target_id, ".", name), value)
> py4j.protocol.Py4JJavaError: An error occurred while calling o26.select.
> : org.apache.flink.table.api.ValidationException: A tumble window expects
> a size value literal.
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> java.lang.reflect.Method.invoke(Method.java:498)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
> &nbsp; &nbsp; &nbsp; &nbsp; at
> org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
> &nbsp; &nbsp; &nbsp; &nbsp; at java.lang.Thread.run(Thread.java:748)
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
> def register_rides_source(st_env):
> &nbsp; &nbsp; source_ddl = \
> &nbsp; &nbsp; """
> &nbsp; &nbsp; create table source1(
> &nbsp; &nbsp; &nbsp;id int,
> &nbsp; &nbsp; &nbsp;time1 timestamp,
> &nbsp; &nbsp; &nbsp;type string,
> &nbsp; &nbsp; &nbsp;WATERMARK FOR time1 as time1 - INTERVAL '2' SECOND
> &nbsp; &nbsp; &nbsp;) with (
> &nbsp; &nbsp; 'connector.type' = 'kafka',
> &nbsp; &nbsp; 'update-mode' = 'append',
> &nbsp; &nbsp; 'connector.topic' = 'tp1',
> &nbsp; &nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
> &nbsp; &nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
> &nbsp; &nbsp; 'format.type' = 'json',
> &nbsp; &nbsp; 'format.derive-schema' = 'true',
> &nbsp; &nbsp; 'connector.version' = 'universal'
> &nbsp; &nbsp; &nbsp;)
> &nbsp; &nbsp; """
> &nbsp; &nbsp; st_env.sql_update(source_ddl)
>
> &nbsp; &nbsp;&nbsp;
> &nbsp; &nbsp; s_env =
> StreamExecutionEnvironment.get_execution_environment()
> &nbsp; &nbsp; s_env.set_parallelism(1)
>
>
> &nbsp; &nbsp; st_env = StreamTableEnvironment.create(s_env)
>
>
> &nbsp; &nbsp; register_rides_source(st_env)
> &nbsp; &nbsp; register_rides_sink(st_env)
>
>
> &nbsp; &nbsp; st_env.from_path("source1")\
> &nbsp; &nbsp; &nbsp; &nbsp;
> .window(Tumble.over("2.secends").on("time1").alias("w")) \
> &nbsp; &nbsp; &nbsp; &nbsp; .group_by("w") \
> &nbsp; &nbsp; &nbsp; &nbsp; .select(" id,&nbsp; time1 , time1 ")\
> &nbsp; &nbsp; &nbsp; &nbsp; .insert_into("sink1")
> &nbsp; &nbsp;&nbsp;
> &nbsp; &nbsp; st_env.execute("2-from_kafka_to_kafka")
>
>
> 代码如上
>
>
>
>
>
>
>
>
> ------------------&nbsp;原始邮件&nbsp;------------------
> 发件人:
>                                                   "user-zh"
>                                                                     <
> acqua.csq@gmail.com&gt;;
> 发送时间:&nbsp;2020年7月10日(星期五) 上午9:17
> 收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;
>
> 主题:&nbsp;Re: pyflink1.11.0window
>
>
>
> 琴师你好,
>
> 你的source ddl里有指定time1为 time attribute吗?
> create table source1(
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; id int,
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; time1 timestamp,
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; type string,
> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; WATERMARK FOR time1 as time1 -
> INTERVAL '2' SECOND
> ) with (...)
>
> 奇怪的不朽琴师 <1129656513@qq.com&gt; 于2020年7月10日周五 上午8:43写道:
>
> &gt; ------------------&amp;nbsp;原始邮件&amp;nbsp;------------------
> &gt; 发件人:
> &gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> "奇怪的不朽琴师"
> &gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
> <
> &gt; 1129656513@qq.com&amp;gt;;
> &gt; 发送时间:&amp;nbsp;2020年7月9日(星期四) 下午5:08
> &gt; 收件人:&amp;nbsp;"godfrey he"<godfreyhe@gmail.com&amp;gt;;
> &gt;
> &gt; 主题:&amp;nbsp;pyflink1.11.0window
> &gt;
> &gt;
> &gt;
> &gt; 你好:
> &gt; &amp;nbsp; &amp;nbsp;我在使用pyflink1.11版本时,window开窗仍会报错
> &gt; : org.apache.flink.table.api.ValidationException: A group window
> expects a
> &gt; time attribute for grouping in a stream environment.
> &gt;
> &gt; 请问这个问题没有修复么?或者是我使用的方式不对,如果是使用不对,能提供一个正确的案例么?
> &gt; 代码如下
> &gt; 谢谢
> &gt;
> &gt;
> &gt; def from_kafka_to_kafka_demo():
> &gt; &amp;nbsp; &amp;nbsp; s_env =
> &gt; StreamExecutionEnvironment.get_execution_environment()
> &gt; &amp;nbsp; &amp;nbsp; s_env.set_parallelism(1)
> &gt;
> &gt;
> &gt; &amp;nbsp; &amp;nbsp; # use blink table planner
> &gt; &amp;nbsp; &amp;nbsp; st_env = StreamTableEnvironment.create(s_env)
> &gt;
> &gt;
> &gt; &amp;nbsp; &amp;nbsp; # register source and sink
> &gt; &amp;nbsp; &amp;nbsp; register_rides_source(st_env)
> &gt; &amp;nbsp; &amp;nbsp; register_rides_sink(st_env)
> &gt;
> &gt;
> &gt; &amp;nbsp; &amp;nbsp; st_env.from_path("source1")\
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;
> &gt; .window(Tumble.over("1.secends").on("time1").alias("w")) \
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .group_by("w") \
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .select(" id,&amp;nbsp;
> time1 , time1 ")\
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; .insert_into("sink1")
> &gt; &amp;nbsp; &amp;nbsp;
> &gt; &amp;nbsp; &amp;nbsp; st_env.execute("2-from_kafka_to_kafka")
> &gt;
> &gt;
> &gt;
> &gt;
> &gt; def register_rides_source(st_env):
> &gt; &amp;nbsp; &amp;nbsp; source_ddl = \
> &gt; &amp;nbsp; &amp;nbsp; '''
> &gt; &amp;nbsp; &amp;nbsp; create table source1(
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; id int,
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time1 timestamp,
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;type string
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;) with (
> &gt; &amp;nbsp; &amp;nbsp; 'connector.type' = 'kafka',
> &gt; &amp;nbsp; &amp;nbsp; 'update-mode' = 'append',
> &gt; &amp;nbsp; &amp;nbsp; 'connector.topic' = 'tp1',
> &gt; &amp;nbsp; &amp;nbsp; 'connector.properties.bootstrap.servers' =
> 'localhost:9092'
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;)
> &gt; &amp;nbsp; &amp;nbsp; '''
> &gt; &amp;nbsp; &amp;nbsp; st_env.sql_update(source_ddl)
> &gt;
> &gt;
> &gt;
> &gt;
> &gt; def register_rides_sink(st_env):
> &gt; &amp;nbsp; &amp;nbsp; sink_ddl = \
> &gt; &amp;nbsp; &amp;nbsp; '''
> &gt; &amp;nbsp; &amp;nbsp; create table sink1(
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; id int,
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time1 timestamp,
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;time2 timestamp
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;) with (
> &gt; &amp;nbsp; &amp;nbsp; 'connector.type' = 'kafka',
> &gt; &amp;nbsp; &amp;nbsp; 'update-mode' = 'append',
> &gt; &amp;nbsp; &amp;nbsp; 'connector.topic' = 'tp3',
> &gt; &amp;nbsp; &amp;nbsp; 'connector.properties.bootstrap.servers' =
> 'localhost:9092'
> &gt; &amp;nbsp; &amp;nbsp; &amp;nbsp;)
> &gt; &amp;nbsp; &amp;nbsp; '''
> &gt; &amp;nbsp; &amp;nbsp; st_env.sql_update(sink_ddl)
> &gt;
> &gt;
> &gt;
> &gt;
> &gt; if __name__ == '__main__':
> &gt; &amp;nbsp; &amp;nbsp; from_kafka_to_kafka_demo()
> &gt;
> &gt;
> &gt; &amp;nbsp; &amp;nbsp;