You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user-zh@flink.apache.org by "chengyanan1008@foxmail.com" <ch...@foxmail.com> on 2020/07/21 02:12:40 UTC

回复: 回复: pyflink1.11.0window

Hi,因为你的Sink只支持数据的insert插入,请检查insert 语句
关键报错信息是这一句:
“AppendStreamTableSink requires that Table has only insert changes.”



chengyanan1008@foxmail.com
 
发件人: 奇怪的不朽琴师
发送时间: 2020-07-20 16:23
收件人: user-zh
主题: 回复: pyflink1.11.0window
HI :
&nbsp; &nbsp; 我现在有一个新的问题,我在此基础上加了一个关联,再写入kafka时报错,如下
Traceback (most recent call last):
&nbsp; File "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line 147, in deco
&nbsp; &nbsp; return f(*a, **kw)
&nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/protocol.py", line 328, in get_return_value
&nbsp; &nbsp; format(target_id, ".", name), value)
py4j.protocol.Py4JJavaError: An error occurred while calling o5.sqlUpdate.
: org.apache.flink.table.api.TableException: AppendStreamTableSink requires that Table has only insert changes.
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.scala:123)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.scala:48)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.plan.nodes.exec.ExecNode$class.translateToPlan(ExecNode.scala:58)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecSink.translateToPlan(StreamExecSink.scala:48)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$translateToPlan$1.apply(StreamPlanner.scala:60)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.delegation.StreamPlanner$$anonfun$translateToPlan$1.apply(StreamPlanner.scala:59)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.Iterator$class.foreach(Iterator.scala:891)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
&nbsp; &nbsp; &nbsp; &nbsp; at scala.collection.AbstractTraversable.map(Traversable.scala:104)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.delegation.StreamPlanner.translateToPlan(StreamPlanner.scala:59)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:153)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:685)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.table.api.internal.TableEnvironmentImpl.sqlUpdate(TableEnvironmentImpl.java:495)
&nbsp; &nbsp; &nbsp; &nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&nbsp; &nbsp; &nbsp; &nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&nbsp; &nbsp; &nbsp; &nbsp; at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&nbsp; &nbsp; &nbsp; &nbsp; at java.lang.reflect.Method.invoke(Method.java:498)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&nbsp; &nbsp; &nbsp; &nbsp; at org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&nbsp; &nbsp; &nbsp; &nbsp; at java.lang.Thread.run(Thread.java:748)
 
 
 
 
During handling of the above exception, another exception occurred:
 
 
Traceback (most recent call last):
&nbsp; File "tou.py", line 99, in <module&gt;
&nbsp; &nbsp; from_kafka_to_kafka_demo()
&nbsp; File "tou.py", line 33, in from_kafka_to_kafka_demo
&nbsp; &nbsp; st_env.sql_update("insert into flink_result select id,type,rowtime from final_result2")
&nbsp; File "/usr/local/lib/python3.7/site-packages/pyflink/table/table_environment.py", line 547, in sql_update
&nbsp; &nbsp; self._j_tenv.sqlUpdate(stmt)
&nbsp; File "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py", line 1286, in __call__
&nbsp; &nbsp; answer, self.gateway_client, self.target_id, self.name)
&nbsp; File "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py", line 154, in deco
&nbsp; &nbsp; raise exception_mapping[exception](s.split(': ', 1)[1], stack_trace)
pyflink.util.exceptions.TableException: 'AppendStreamTableSink requires that Table has only insert changes.'
 
 
 
 
 
这种应该如何实现,需求大概是一个流表(需要分组汇总)关联一个维表。
 
 
from pyflink.datastream import StreamExecutionEnvironment, TimeCharacteristic
from pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings,DataTypes, CsvTableSource, CsvTableSink
from pyflink.table.descriptors import Schema, Kafka, Json, Rowtime
from pyflink.table.window import Tumble&nbsp;
 
 
 
 
def from_kafka_to_kafka_demo():
 
 
&nbsp; &nbsp; # use blink table planner
&nbsp; &nbsp; env = StreamExecutionEnvironment.get_execution_environment()
&nbsp; &nbsp; env.set_stream_time_characteristic(TimeCharacteristic.EventTime)
&nbsp; &nbsp; env_settings = EnvironmentSettings.Builder().use_blink_planner().build()
&nbsp; &nbsp; st_env = StreamTableEnvironment.create(stream_execution_environment=env,environment_settings=env_settings)
 
 
&nbsp; &nbsp; # register source and sink
&nbsp; &nbsp; register_rides_source(st_env)
&nbsp; &nbsp; register_rides_sink(st_env)
&nbsp; &nbsp; register_mysql_source(st_env)
 
 
&nbsp; &nbsp; query = """
&nbsp; &nbsp; select&nbsp; cast(sum(t1.id) as int) as id, max(t1.type) as type,cast(tumble_start(t1.time1, interval '4' second) as bigint) as rowtime
&nbsp; &nbsp; from source1 t1&nbsp;
&nbsp; &nbsp; group by tumble(t1.time1, interval '4' second)
&nbsp; &nbsp; """
&nbsp; &nbsp; count_result = st_env.sql_query(query)
&nbsp; &nbsp; st_env.create_temporary_view('final_result', count_result)
&nbsp; &nbsp; query2 = """
&nbsp; &nbsp; select&nbsp; t1.id,t2.type,t1.rowtime from final_result t1 left join dim_mysql t2 on t1.type=t2.id
&nbsp; &nbsp; """
&nbsp; &nbsp; count_result2 = st_env.sql_query(query2)
&nbsp; &nbsp; st_env.create_temporary_view('final_result2', count_result2)
 
 
&nbsp; &nbsp; st_env.sql_update("insert into flink_result select id,type,rowtime from final_result2")
&nbsp; &nbsp; st_env.execute("2-from_kafka_to_kafka")
&nbsp; &nbsp;&nbsp;
 
 
 
 
def register_rides_source(st_env):
&nbsp; &nbsp; source_ddl = \
&nbsp; &nbsp; """
&nbsp; &nbsp; create table source1(
&nbsp; &nbsp; &nbsp;id int,
&nbsp; &nbsp; &nbsp;time2 varchar ,
&nbsp; &nbsp; &nbsp;time1 as TO_TIMESTAMP(time2,'yyyyMMddHHmmss'),
&nbsp; &nbsp; &nbsp;type string,
&nbsp; &nbsp; &nbsp;WATERMARK FOR time1 as time1 - INTERVAL '2' SECOND
&nbsp; &nbsp; &nbsp;) with (
&nbsp; &nbsp; 'connector.type' = 'kafka',
&nbsp; &nbsp; 'connector.topic' = 'tp1',
&nbsp; &nbsp; 'connector.startup-mode' = 'latest-offset',
&nbsp; &nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
&nbsp; &nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
&nbsp; &nbsp; 'format.type' = 'json',
&nbsp; &nbsp; 'connector.version' = 'universal'
&nbsp; &nbsp; &nbsp;)
&nbsp; &nbsp; """
&nbsp; &nbsp; st_env.sql_update(source_ddl)
 
 
def register_mysql_source(st_env):
&nbsp; &nbsp; source_ddl = \
&nbsp; &nbsp; """
&nbsp; &nbsp; CREATE TABLE dim_mysql (
&nbsp; &nbsp; id varchar,&nbsp; --&nbsp;
&nbsp; &nbsp; type varchar --&nbsp;
&nbsp; &nbsp; ) WITH (
&nbsp; &nbsp; 'connector.type' = 'jdbc',
&nbsp; &nbsp; 'connector.url' = 'jdbc:mysql://localhost:3390/test',
&nbsp; &nbsp; 'connector.table' = 'flink_test',
&nbsp; &nbsp; 'connector.driver' = 'com.mysql.jdbc.Driver',
&nbsp; &nbsp; 'connector.username' = '****',
&nbsp; &nbsp; 'connector.password' = '*****',
&nbsp; &nbsp; 'connector.lookup.cache.max-rows' = '5000',
&nbsp; &nbsp; 'connector.lookup.cache.ttl' = '10min'
&nbsp; &nbsp; )
&nbsp; &nbsp; """&nbsp; &nbsp;&nbsp;
&nbsp; &nbsp; st_env.sql_update(source_ddl)
 
 
def register_rides_sink(st_env):
&nbsp; &nbsp; sink_ddl = \
&nbsp; &nbsp; """
&nbsp; &nbsp; CREATE TABLE flink_result (
&nbsp; &nbsp; id int,&nbsp; &nbsp;
&nbsp; &nbsp; type varchar,
&nbsp; &nbsp; rtime bigint,
&nbsp; &nbsp; primary key(id)
&nbsp; &nbsp; ) WITH (
&nbsp; &nbsp; with (
&nbsp; &nbsp; 'connector.type' = 'kafka',
&nbsp; &nbsp; 'connector.topic' = 'tp4',
&nbsp; &nbsp; 'connector.startup-mode' = 'latest-offset',
&nbsp; &nbsp; 'connector.properties.bootstrap.servers' = 'localhost:9092',
&nbsp; &nbsp; 'connector.properties.zookeeper.connect' = 'localhost:2181',
&nbsp; &nbsp; 'format.type' = 'json',
&nbsp; &nbsp; 'connector.version' = 'universal'
&nbsp; &nbsp; &nbsp;)
&nbsp; &nbsp; )
&nbsp; &nbsp; """
&nbsp; &nbsp; st_env.sql_update(sink_ddl)
 
 
 
 
if __name__ == '__main__':
&nbsp; &nbsp; from_kafka_to_kafka_demo()
 
 
 
 
 
------------------&nbsp;原始邮件&nbsp;------------------
发件人:                                                                                                                        "我自己的邮箱"                                                                                    <1129656513@qq.com&gt;;
发送时间:&nbsp;2020年7月15日(星期三) 下午5:30
收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;
 
主题:&nbsp;回复: pyflink1.11.0window
 
 
 
&nbsp; &nbsp; &nbsp;非常感谢!
 
 
 
 
------------------ 原始邮件 ------------------
发件人:                                                                                                                        "user-zh"                                                                                    <acqua.csq@gmail.com&gt;;
发送时间:&nbsp;2020年7月15日(星期三) 下午5:23
收件人:&nbsp;"user-zh"<user-zh@flink.apache.org&gt;;
 
主题:&nbsp;Re: pyflink1.11.0window
 
 
 
下面这个例子从kafka读取json格式的数据, 然后做窗口聚合后写入es, 可以参考下代码结构, 修改相应数据字段。 这份代码你本地应该是不能运行的
 
from pyflink.datastream import StreamExecutionEnvironment, TimeCharacteristic
from pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings
from pyflink.table.udf import udf
 
 
@udf(input_types=[DataTypes.INT()], result_type=DataTypes.STRING())
def platform_code_to_name(code):
&nbsp; &nbsp; return "mobile" if code == 0 else "pc"
 
 
def log_processing():
&nbsp; &nbsp; env = StreamExecutionEnvironment.get_execution_environment()
&nbsp; &nbsp; env.set_stream_time_characteristic(TimeCharacteristic.EventTime)
&nbsp; &nbsp; env_settings = EnvironmentSettings.Builder().use_blink_planner().build()
&nbsp; &nbsp; t_env = StreamTableEnvironment.create(stream_execution_environment=env,
environment_settings=env_settings)
 
&nbsp; &nbsp; source_ddl = """
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; CREATE TABLE payment_msg(
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; createTime VARCHAR,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; rt as TO_TIMESTAMP(createTime),
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; orderId BIGINT,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; payAmount DOUBLE,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; payPlatform INT,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; paySource INT,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; WATERMARK FOR rt as rt - INTERVAL '2' SECOND
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; ) WITH (
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.type' = 'kafka',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.version' = 'universal',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.topic' = 'payment_msg_2',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.properties.bootstrap.servers' = '0.0.0.0:9092',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.properties.group.id' = 'test_3',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.startup-mode' = 'latest-offset',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'format.type' = 'json'
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; )
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; """
&nbsp; &nbsp; t_env.sql_update(source_ddl)
 
&nbsp; &nbsp; es_sink_ddl = """
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; CREATE TABLE es_sink (
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; platform VARCHAR,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; pay_amount DOUBLE,
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; rowtime TIMESTAMP(3)
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; ) with (
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.type' = 'elasticsearch',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.version' = '7',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.hosts' = 'http://localhost:9200',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.index' = 'platform_pay_amount_1',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.document-type' = 'payment',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'update-mode' = 'upsert',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.flush-on-checkpoint' = 'true',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.key-delimiter' = '$',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.key-null-literal' = 'n/a',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.bulk-flush.max-size' = '42mb',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.bulk-flush.max-actions' = '32',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.bulk-flush.interval' = '1000',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'connector.bulk-flush.backoff.delay' = '1000',
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; 'format.type' = 'json'
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; )
&nbsp; &nbsp; """
 
&nbsp; &nbsp; t_env.sql_update(es_sink_ddl)
 
&nbsp; &nbsp; t_env.register_function('platformcodetoname', platform_code_to_name)
 
&nbsp; &nbsp; query = """
&nbsp; &nbsp; select platformcodetoname(payPlatform) as platform, sum(payAmount)
as pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT)
as rowtime
&nbsp; &nbsp; from payment_msg
&nbsp; &nbsp; group by tumble(rt, interval '5' seconds), payPlatform
&nbsp; &nbsp; """
 
&nbsp; &nbsp; count_result = t_env.sql_query(query)
 
&nbsp; &nbsp; t_env.create_temporary_view('windowed_values', count_result)
 
&nbsp; &nbsp; query2 = """
&nbsp; &nbsp; select platform, last_value(pay_amount), rowtime from
windowed_values group by platform, rowtime
&nbsp; &nbsp; """
 
&nbsp; &nbsp; final_result = t_env.sql_query(query2)
 
&nbsp; &nbsp; final_result.execute_insert(table_path='es_sink')
 
 
if __name__ == '__main__':
&nbsp; &nbsp; log_processing()
 
 
奇怪的不朽琴师 <1129656513@qq.com&gt; 于2020年7月15日周三 下午4:40写道:
 
&gt; &amp;nbsp;Shuiqiang,你好:
&gt; &amp;nbsp; &amp;nbsp;
&gt; &amp;nbsp;hi,能否请求您贡献一下完整的代码的案例,我是初学者,官网的2-from_kafka_to_kafka.py这个没有窗口,我现在想要一个在此基础上有窗口的demo,尝试编了很久也未能解决。我在给这个demo加上窗口功能后总是有各种各样的问题,十分痛苦,如能帮助,感激不尽。
&gt;
&gt;
&gt; 恳请所有看到此封邮件的大佬!
&gt;
&gt;
&gt; 谢谢!
&gt;
&gt;
&gt;
&gt;
&gt; ------------------&amp;nbsp;原始邮件&amp;nbsp;------------------
&gt; 发件人:
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; "user-zh"
&gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; <
&gt; acqua.csq@gmail.com&amp;gt;;
&gt; 发送时间:&amp;nbsp;2020年7月15日(星期三) 中午11:25
&gt; 收件人:&amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;gt;;
&gt;
&gt; 主题:&amp;nbsp;Re: pyflink1.11.0window
&gt;
&gt;
&gt;
&gt; 举个sql例子
&gt; select platformcodetoname(payPlatform) as platform, sum(payAmount) as
&gt; pay_amount, cast(tumble_start(rt, interval '5' seconds) as BIGINT) as
&gt; rowtime
&gt; from payment_msg group by tumble(rt, interval '5' seconds), payPlatform
&gt; 这个query 对每5s的tumble窗口做统计。
&gt;
&gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;gt; 于2020年7月15日周三 上午11:10写道:
&gt;
&gt; &amp;gt; Shuiqiang,你好:
&gt; &amp;gt; &amp;amp;nbsp;
&gt; &amp;amp;nbsp;我的目的是每间隔一段时间做一次汇总统计,比如每两秒做一下汇总,请问这个需求我改如何定义window?
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; ------------------&amp;amp;nbsp;原始邮件&amp;amp;nbsp;------------------
&gt; &amp;gt; 发件人:
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; "user-zh"
&gt; &amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;
&gt; <
&gt; &amp;gt; acqua.csq@gmail.com&amp;amp;gt;;
&gt; &amp;gt; 发送时间:&amp;amp;nbsp;2020年7月15日(星期三) 上午10:51
&gt; &amp;gt; 收件人:&amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org&amp;amp;gt;;
&gt; &amp;gt;
&gt; &amp;gt; 主题:&amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt;
&gt; &amp;gt; 琴师你好,
&gt; &amp;gt; 异常栈信息org.apache.flink.table.api.ValidationException: A tumble window
&gt; &amp;gt; expects a size value literal.
&gt; &amp;gt; 看起来是接下tumble window定义的代码不太正确吧
&gt; &amp;gt;
&gt; &amp;gt; Best,
&gt; &amp;gt; Shuiqiang
&gt; &amp;gt;
&gt; &amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;gt; 于2020年7月15日周三 上午10:27写道:
&gt; &amp;gt;
&gt; &amp;gt; &amp;amp;gt; 你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;我按着你回复的建议改了source但是会报新的错误,请问这个是因为什么?我想调试一个window一直没有成功,请帮帮我,谢谢。
&gt; &amp;gt; &amp;amp;gt; Traceback (most recent call last):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File "tou.py", line 71, in <module&amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; from_kafka_to_kafka_demo()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File "tou.py", line 21, in
&gt; from_kafka_to_kafka_demo
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; .select(" id,&amp;amp;amp;nbsp;
&gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt;
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/table/table.py", line
&gt; &amp;gt; 907,
&gt; &amp;gt; &amp;amp;gt; in select
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; return
&gt; Table(self._j_table.select(fields),
&gt; &amp;gt; self._t_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/py4j/java_gateway.py",
&gt; &amp;gt; &amp;amp;gt; line 1286, in __call__
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; answer, self.gateway_client,
&gt; self.target_id,
&gt; &amp;gt; self.name)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; &amp;amp;gt;
&gt; "/usr/local/lib/python3.7/site-packages/pyflink/util/exceptions.py",
&gt; &amp;gt; line
&gt; &amp;gt; &amp;amp;gt; 147, in deco
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; return f(*a, **kw)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; File
&gt; &amp;gt; "/usr/local/lib/python3.7/site-packages/py4j/protocol.py",
&gt; &amp;gt; &amp;amp;gt; line 328, in get_return_value
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; format(target_id, ".", name),
&gt; value)
&gt; &amp;gt; &amp;amp;gt; py4j.protocol.Py4JJavaError: An error occurred while calling
&gt; &amp;gt; o26.select.
&gt; &amp;gt; &amp;amp;gt; : org.apache.flink.table.api.ValidationException: A tumble
&gt; window
&gt; &amp;gt; expects
&gt; &amp;gt; &amp;amp;gt; a size value literal.
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.getAsValueLiteral(AggregateOperationFactory.java:384)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.validateAndCreateTumbleWindow(AggregateOperationFactory.java:302)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.AggregateOperationFactory.createResolvedWindow(AggregateOperationFactory.java:236)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.operations.utils.OperationTreeBuilder.windowAggregate(OperationTreeBuilder.java:250)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:794)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.table.api.internal.TableImpl$WindowGroupedTableImpl.select(TableImpl.java:781)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt; sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt; java.lang.reflect.Method.invoke(Method.java:498)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.Gateway.invoke(Gateway.java:282)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.commands.CallCommand.execute(CallCommand.java:79)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; org.apache.flink.api.python.shaded.py4j.GatewayConnection.run(GatewayConnection.java:238)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; at
&gt; &amp;gt; java.lang.Thread.run(Thread.java:748)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; def register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; source_ddl = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; """
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; create table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;type string,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;WATERMARK FOR
&gt; time1 as time1 -
&gt; &amp;gt; INTERVAL '2' SECOND
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; 'localhost:9092',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; 'connector.properties.zookeeper.connect' =
&gt; &amp;gt; 'localhost:2181',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'format.type' = 'json',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'format.derive-schema' =
&gt; 'true',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; 'connector.version' =
&gt; 'universal'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; """
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env.sql_update(source_ddl)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; s_env =
&gt; &amp;gt; &amp;amp;gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; s_env.set_parallelism(1)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env =
&gt; StreamTableEnvironment.create(s_env)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; register_rides_sink(st_env)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; .window(Tumble.over("2.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .group_by("w") \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .select(" id,&amp;amp;amp;nbsp;
&gt; &amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;nbsp; &amp;amp;amp;nbsp;
&gt; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 代码如上
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; ------------------&amp;amp;amp;nbsp;原始邮件&amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; 发件人:
&gt; &amp;gt;
&gt; &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; "user-zh"
&gt; &amp;gt;
&gt; &amp;amp;gt;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;&amp;amp;nbsp;
&gt; &amp;gt; <
&gt; &amp;gt; &amp;amp;gt; acqua.csq@gmail.com&amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; 发送时间:&amp;amp;amp;nbsp;2020年7月10日(星期五) 上午9:17
&gt; &amp;gt; &amp;amp;gt; 收件人:&amp;amp;amp;nbsp;"user-zh"<user-zh@flink.apache.org
&gt; &amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 主题:&amp;amp;amp;nbsp;Re: pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 琴师你好,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 你的source ddl里有指定time1为 time attribute吗?
&gt; &amp;gt; &amp;amp;gt; create table source1(
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; id
&gt; &amp;gt; int,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; time1 timestamp,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; type
&gt; &amp;gt; string,
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; WATERMARK FOR time1 as time1 -
&gt; &amp;gt; &amp;amp;gt; INTERVAL '2' SECOND
&gt; &amp;gt; &amp;amp;gt; ) with (...)
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; 奇怪的不朽琴师 <1129656513@qq.com&amp;amp;amp;gt; 于2020年7月10日周五
&gt; 上午8:43写道:
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt;
&gt; ------------------&amp;amp;amp;amp;nbsp;原始邮件&amp;amp;amp;amp;nbsp;------------------
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 发件人:
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;gt;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; "奇怪的不朽琴师"
&gt; &amp;gt; &amp;amp;gt;
&gt; &amp;gt;
&gt; &amp;amp;amp;gt;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;&amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; <
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 1129656513@qq.com&amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 发送时间:&amp;amp;amp;amp;nbsp;2020年7月9日(星期四) 下午5:08
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 收件人:&amp;amp;amp;amp;nbsp;"godfrey he"<
&gt; godfreyhe@gmail.com
&gt; &amp;gt; &amp;amp;amp;amp;gt;;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 主题:&amp;amp;amp;amp;nbsp;pyflink1.11.0window
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 你好:
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;我在使用pyflink1.11版本时,window开窗仍会报错
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; :
&gt; org.apache.flink.table.api.ValidationException: A group
&gt; &amp;gt; window
&gt; &amp;gt; &amp;amp;gt; expects a
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; time attribute for grouping in a stream
&gt; environment.
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 请问这个问题没有修复么?或者是我使用的方式不对,如果是使用不对,能提供一个正确的案例么?
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 代码如下
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; 谢谢
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def from_kafka_to_kafka_demo():
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; s_env =
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; StreamExecutionEnvironment.get_execution_environment()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; s_env.set_parallelism(1)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; # use
&gt; blink table planner
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; st_env =
&gt; &amp;gt; StreamTableEnvironment.create(s_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; #
&gt; register source and sink
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; register_rides_source(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; register_rides_sink(st_env)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.from_path("source1")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; .window(Tumble.over("1.secends").on("time1").alias("w")) \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .group_by("w") \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .select(" id,&amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; time1 , time1 ")\
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; .insert_into("sink1")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; st_env.execute("2-from_kafka_to_kafka")
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def register_rides_source(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; source_ddl = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; create
&gt; table source1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;type string
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.topic' = 'tp1',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; &amp;amp;gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.sql_update(source_ddl)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; def register_rides_sink(st_env):
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; sink_ddl
&gt; = \
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; create
&gt; table sink1(
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; id int,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;time1 timestamp,
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;time2 timestamp
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;) with (
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.type' = 'kafka',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'update-mode' = 'append',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; 'connector.topic' = 'tp3',
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;gt; 'connector.properties.bootstrap.servers' =
&gt; &amp;gt; &amp;amp;gt; 'localhost:9092'
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; &amp;amp;amp;amp;nbsp;)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp; '''
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; st_env.sql_update(sink_ddl)
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; if __name__ == '__main__':
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;
&gt; from_kafka_to_kafka_demo()
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt;
&gt; &amp;gt; &amp;amp;gt; &amp;amp;amp;gt; &amp;amp;amp;amp;nbsp; &amp;amp;amp;amp;nbsp;