You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by di...@apache.org on 2021/05/08 13:06:16 UTC

[flink] branch release-1.13 updated: [hotfix][docs] Correct the examples in Python DataStream API

This is an automated email from the ASF dual-hosted git repository.

dianfu pushed a commit to branch release-1.13
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.13 by this push:
     new 4ba54dd  [hotfix][docs] Correct the examples in Python DataStream API
4ba54dd is described below

commit 4ba54dd11c88411228e7330f3894ae3f481df26f
Author: Dian Fu <di...@apache.org>
AuthorDate: Sat May 8 21:05:20 2021 +0800

    [hotfix][docs] Correct the examples in Python DataStream API
---
 docs/content.zh/docs/dev/python/datastream/data_types.md | 6 +++---
 docs/content.zh/docs/dev/python/datastream_tutorial.md   | 6 +++---
 docs/content/docs/dev/python/datastream/data_types.md    | 4 ++--
 docs/content/docs/dev/python/datastream_tutorial.md      | 6 +++---
 4 files changed, 11 insertions(+), 11 deletions(-)

diff --git a/docs/content.zh/docs/dev/python/datastream/data_types.md b/docs/content.zh/docs/dev/python/datastream/data_types.md
index f12ead5..54109e6 100644
--- a/docs/content.zh/docs/dev/python/datastream/data_types.md
+++ b/docs/content.zh/docs/dev/python/datastream/data_types.md
@@ -63,7 +63,7 @@ Since Java operators or functions can not identify Python data, types need to be
 For example, types need to be provided if you want to output data using the StreamingFileSink which is implemented in Java.
 
 ```python
-from pyflink.common.serialization import SimpleStringEncoder
+from pyflink.common.serialization import Encoder
 from pyflink.common.typeinfo import Types
 from pyflink.datastream import StreamExecutionEnvironment
 from pyflink.datastream.connectors import StreamingFileSink
@@ -73,10 +73,10 @@ def streaming_file_sink():
     env = StreamExecutionEnvironment.get_execution_environment()
     env.set_parallelism(1)
     env.from_collection(collection=[(1, 'aaa'), (2, 'bbb')]) \
-        .map(lambda record: (record[0]+1, record[1].upper()),
+        .map(lambda record: (record[0] + 1, record[1].upper()),
              output_type=Types.ROW([Types.INT(), Types.STRING()])) \
         .add_sink(StreamingFileSink
-                  .for_row_format('/tmp/output', SimpleStringEncoder())
+                  .for_row_format('/tmp/output', Encoder.simple_string_encoder())
                   .build())
 
     env.execute()
diff --git a/docs/content.zh/docs/dev/python/datastream_tutorial.md b/docs/content.zh/docs/dev/python/datastream_tutorial.md
index cb6b4c2..d5fb302 100644
--- a/docs/content.zh/docs/dev/python/datastream_tutorial.md
+++ b/docs/content.zh/docs/dev/python/datastream_tutorial.md
@@ -83,7 +83,7 @@ ds = env.from_collection(
 
 ```python
 ds.add_sink(StreamingFileSink
-    .for_row_format('/tmp/output', SimpleStringEncoder())
+    .for_row_format('/tmp/output', Encoder.simple_string_encoder())
     .build())
 ```
 
@@ -96,7 +96,7 @@ env.execute("tutorial_job")
 完整的代码如下:
 
 ```python
-from pyflink.common.serialization import SimpleStringEncoder
+from pyflink.common.serialization import Encoder
 from pyflink.common.typeinfo import Types
 from pyflink.datastream import StreamExecutionEnvironment
 from pyflink.datastream.connectors import StreamingFileSink
@@ -109,7 +109,7 @@ def tutorial():
         collection=[(1, 'aaa'), (2, 'bbb')],
         type_info=Types.ROW([Types.INT(), Types.STRING()]))
     ds.add_sink(StreamingFileSink
-                .for_row_format('/tmp/output', SimpleStringEncoder())
+                .for_row_format('/tmp/output', Encoder.simple_string_encoder())
                 .build())
     env.execute("tutorial_job")
 
diff --git a/docs/content/docs/dev/python/datastream/data_types.md b/docs/content/docs/dev/python/datastream/data_types.md
index e58cc45..be0b683 100644
--- a/docs/content/docs/dev/python/datastream/data_types.md
+++ b/docs/content/docs/dev/python/datastream/data_types.md
@@ -63,7 +63,7 @@ Since Java operators or functions can not identify Python data, types need to be
 For example, types need to be provided if you want to output data using the StreamingFileSink which is implemented in Java.
 
 ```python
-from pyflink.common.serialization import SimpleStringEncoder
+from pyflink.common.serialization import Encoder
 from pyflink.common.typeinfo import Types
 from pyflink.datastream import StreamExecutionEnvironment
 from pyflink.datastream.connectors import StreamingFileSink
@@ -76,7 +76,7 @@ def streaming_file_sink():
         .map(lambda record: (record[0]+1, record[1].upper()),
              output_type=Types.ROW([Types.INT(), Types.STRING()])) \
         .add_sink(StreamingFileSink
-                  .for_row_format('/tmp/output', SimpleStringEncoder())
+                  .for_row_format('/tmp/output', Encoder.simple_string_encoder())
                   .build())
 
     env.execute()
diff --git a/docs/content/docs/dev/python/datastream_tutorial.md b/docs/content/docs/dev/python/datastream_tutorial.md
index aef9b4d..8fe64d1 100644
--- a/docs/content/docs/dev/python/datastream_tutorial.md
+++ b/docs/content/docs/dev/python/datastream_tutorial.md
@@ -82,7 +82,7 @@ You can now perform transformations on this data stream, or just write the data
 
 ```python
 ds.add_sink(StreamingFileSink
-    .for_row_format('/tmp/output', SimpleStringEncoder())
+    .for_row_format('/tmp/output', Encoder.simple_string_encoder())
     .build())
 ```
 
@@ -95,7 +95,7 @@ env.execute("tutorial_job")
 The complete code so far:
 
 ```python
-from pyflink.common.serialization import SimpleStringEncoder
+from pyflink.common.serialization import Encoder
 from pyflink.common.typeinfo import Types
 from pyflink.datastream import StreamExecutionEnvironment
 from pyflink.datastream.connectors import StreamingFileSink
@@ -108,7 +108,7 @@ def tutorial():
         collection=[(1, 'aaa'), (2, 'bbb')],
         type_info=Types.ROW([Types.INT(), Types.STRING()]))
     ds.add_sink(StreamingFileSink
-                .for_row_format('/tmp/output', SimpleStringEncoder())
+                .for_row_format('/tmp/output', Encoder.simple_string_encoder())
                 .build())
     env.execute("tutorial_job")