You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by di...@apache.org on 2020/06/03 08:13:58 UTC

[flink] branch release-1.11 updated: [hotfix][python] Add the version for the APIs introduced in 1.11.0

This is an automated email from the ASF dual-hosted git repository.

dianfu pushed a commit to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.11 by this push:
     new 639a892  [hotfix][python] Add the version for the APIs introduced in 1.11.0
639a892 is described below

commit 639a892f18fcfda877fe2a727d5a2ca4b2418f05
Author: Dian Fu <di...@apache.org>
AuthorDate: Wed Jun 3 11:43:01 2020 +0800

    [hotfix][python] Add the version for the APIs introduced in 1.11.0
---
 flink-python/pyflink/common/completable_future.py  |  2 +
 flink-python/pyflink/common/job_client.py          | 16 +++++
 .../pyflink/common/job_execution_result.py         | 16 +++++
 flink-python/pyflink/common/job_id.py              |  2 +
 flink-python/pyflink/common/job_status.py          |  6 ++
 flink-python/pyflink/metrics/metricbase.py         | 80 ++++++++++++++++++++--
 flink-python/pyflink/ml/api/base.py                | 24 +++++++
 flink-python/pyflink/ml/api/ml_environment.py      | 10 +++
 .../pyflink/ml/api/ml_environment_factory.py       | 12 ++++
 flink-python/pyflink/ml/lib/param/colname.py       |  6 ++
 flink-python/pyflink/table/catalog.py              | 10 ++-
 flink-python/pyflink/table/descriptors.py          | 22 ++++++
 flink-python/pyflink/table/explain_detail.py       |  2 +
 flink-python/pyflink/table/result_kind.py          |  2 +
 flink-python/pyflink/table/statement_set.py        |  9 +++
 flink-python/pyflink/table/table.py                |  8 +++
 flink-python/pyflink/table/table_environment.py    | 10 +++
 flink-python/pyflink/table/table_result.py         | 10 +++
 flink-python/pyflink/table/types.py                |  2 +
 flink-python/pyflink/table/udf.py                  | 17 ++++-
 20 files changed, 256 insertions(+), 10 deletions(-)

diff --git a/flink-python/pyflink/common/completable_future.py b/flink-python/pyflink/common/completable_future.py
index 266ea26..18b9f56 100644
--- a/flink-python/pyflink/common/completable_future.py
+++ b/flink-python/pyflink/common/completable_future.py
@@ -27,6 +27,8 @@ class CompletableFuture(Future):
 
     When two or more threads attempt to set_result, set_exception, or cancel a CompletableFuture,
     only one of them succeeds.
+
+    .. versionadded:: 1.11.0
     """
 
     def __init__(self, j_completable_future, py_class=None):
diff --git a/flink-python/pyflink/common/job_client.py b/flink-python/pyflink/common/job_client.py
index e4a1f39..9bdfa62 100644
--- a/flink-python/pyflink/common/job_client.py
+++ b/flink-python/pyflink/common/job_client.py
@@ -26,6 +26,8 @@ __all__ = ['JobClient']
 class JobClient(object):
     """
     A client that is scoped to a specific job.
+
+    .. versionadded:: 1.11.0
     """
 
     def __init__(self, j_job_client):
@@ -37,6 +39,8 @@ class JobClient(object):
 
         :return: JobID, or null if the job has been executed on a runtime without JobIDs
                  or if the execution failed.
+
+        .. versionadded:: 1.11.0
         """
         return JobID(self._j_job_client.getJobID())
 
@@ -46,6 +50,8 @@ class JobClient(object):
 
         :return: A CompletableFuture containing the JobStatus of the associated job.
         :rtype: pyflink.common.CompletableFuture
+
+        .. versionadded:: 1.11.0
         """
         return CompletableFuture(self._j_job_client.getJobStatus(), JobStatus)
 
@@ -55,6 +61,8 @@ class JobClient(object):
 
         :return: A CompletableFuture for canceling the associated job.
         :rtype: pyflink.common.CompletableFuture
+
+        .. versionadded:: 1.11.0
         """
         return CompletableFuture(self._j_job_client.cancel())
 
@@ -73,6 +81,8 @@ class JobClient(object):
         :type savepoint_directory: str
         :return: A CompletableFuture containing the path where the savepoint is located.
         :rtype: pyflink.common.CompletableFuture
+
+        .. versionadded:: 1.11.0
         """
         return CompletableFuture(
             self._j_job_client.stopWithSavepoint(advance_to_end_of_event_time, savepoint_directory),
@@ -87,6 +97,8 @@ class JobClient(object):
         :type savepoint_directory: str
         :return: A CompletableFuture containing the path where the savepoint is located.
         :rtype: pyflink.common.CompletableFuture
+
+        .. versionadded:: 1.11.0
         """
         return CompletableFuture(self._j_job_client.triggerSavepoint(savepoint_directory), str)
 
@@ -99,6 +111,8 @@ class JobClient(object):
         :param class_loader: Class loader used to deserialize the incoming accumulator results.
         :return: A CompletableFuture containing the accumulators of the associated job.
         :rtype: pyflink.common.CompletableFuture
+
+        .. versionadded:: 1.11.0
         """
         return CompletableFuture(self._j_job_client.getAccumulators(class_loader), dict)
 
@@ -109,6 +123,8 @@ class JobClient(object):
         :param user_class_loader: Class loader used to deserialize the accumulators of the job.
         :return: A CompletableFuture containing the JobExecutionResult result of the job execution.
         :rtype: pyflink.common.CompletableFuture
+
+        .. versionadded:: 1.11.0
         """
         return CompletableFuture(self._j_job_client.getJobExecutionResult(user_class_loader),
                                  JobExecutionResult)
diff --git a/flink-python/pyflink/common/job_execution_result.py b/flink-python/pyflink/common/job_execution_result.py
index 26ea95e..2462237 100644
--- a/flink-python/pyflink/common/job_execution_result.py
+++ b/flink-python/pyflink/common/job_execution_result.py
@@ -24,6 +24,8 @@ class JobExecutionResult(object):
     """
     The result of a job execution. Gives access to the execution time of the job,
     and to all accumulators created by this job.
+
+    .. versionadded:: 1.11.0
     """
 
     def __init__(self, j_job_execution_result):
@@ -35,6 +37,8 @@ class JobExecutionResult(object):
 
         :return: JobID, or null if the job has been executed on a runtime without JobIDs
                  or if the execution failed.
+
+        .. versionadded:: 1.11.0
         """
         return JobID(self._j_job_execution_result.getJobID())
 
@@ -45,6 +49,8 @@ class JobExecutionResult(object):
         .. seealso:: :func:`get_job_execution_result` to retrieve the JobExecutionResult.
 
         :return: ``True`` if this is a JobExecutionResult, ``False`` otherwise.
+
+        .. versionadded:: 1.11.0
         """
         return self._j_job_execution_result.isJobExecutionResult()
 
@@ -54,6 +60,8 @@ class JobExecutionResult(object):
 
         :throws: Exception if this is not a JobExecutionResult.
         :return: The JobExecutionResult.
+
+        .. versionadded:: 1.11.0
         """
         return self
 
@@ -63,6 +71,8 @@ class JobExecutionResult(object):
         without the pre-flight steps like the optimizer.
 
         :return: The net execution time in milliseconds.
+
+        .. versionadded:: 1.11.0
         """
         return self._j_job_execution_result.getNetRuntime()
 
@@ -73,6 +83,8 @@ class JobExecutionResult(object):
 
         :param accumulator_name: The name of the accumulator.
         :return: The value of the accumulator with the given name.
+
+        .. versionadded:: 1.11.0
         """
         return self.get_all_accumulator_results().get(accumulator_name)
 
@@ -83,6 +95,8 @@ class JobExecutionResult(object):
 
         :return: The dict which the keys are names of the accumulator and the values
                  are values of the accumulator produced by the job.
+
+        .. versionadded:: 1.11.0
         """
         j_result_map = self._j_job_execution_result.getAllAccumulatorResults()
         accumulators = {}
@@ -93,5 +107,7 @@ class JobExecutionResult(object):
     def to_string(self):
         """
         Convert JobExecutionResult to a string, if possible.
+
+        .. versionadded:: 1.11.0
         """
         return self._j_job_execution_result.toString()
diff --git a/flink-python/pyflink/common/job_id.py b/flink-python/pyflink/common/job_id.py
index 49c1349..afb7013 100644
--- a/flink-python/pyflink/common/job_id.py
+++ b/flink-python/pyflink/common/job_id.py
@@ -26,6 +26,8 @@ class JobID(object):
     Jobs act simultaneously as sessions, because jobs can be created and submitted incrementally
     in different parts. Newer fragments of a graph can be attached to existing graphs, thereby
     extending the current data flow graphs.
+
+    .. versionadded:: 1.11.0
     """
 
     def __init__(self, j_job_id):
diff --git a/flink-python/pyflink/common/job_status.py b/flink-python/pyflink/common/job_status.py
index bbd4b33..8d553e3 100644
--- a/flink-python/pyflink/common/job_status.py
+++ b/flink-python/pyflink/common/job_status.py
@@ -64,6 +64,8 @@ class JobStatus(object):
     :data:`RECONCILING`:
 
     The job is currently reconciling and waits for task execution report to recover state.
+
+    .. versionadded:: 1.11.0
     """
 
     CREATED = 0
@@ -91,6 +93,8 @@ class JobStatus(object):
         dropped from the high-availability services.
 
         :return: ``True`` if this job status is globally terminal, ``False`` otherwise.
+
+        .. versionadded:: 1.11.0
         """
         return self._j_job_status.isGloballyTerminalState()
 
@@ -104,6 +108,8 @@ class JobStatus(object):
         which is typically entered when the executing JobManager looses its leader status.
 
         :return: ``True`` if this job status is terminal, ``False`` otherwise.
+
+        .. versionadded:: 1.11.0
         """
         return self._j_job_status.isTerminalState()
 
diff --git a/flink-python/pyflink/metrics/metricbase.py b/flink-python/pyflink/metrics/metricbase.py
index 17966a3..a4b8d5f 100644
--- a/flink-python/pyflink/metrics/metricbase.py
+++ b/flink-python/pyflink/metrics/metricbase.py
@@ -22,6 +22,16 @@ from typing import Callable
 
 
 class MetricGroup(abc.ABC):
+    """
+    A MetricGroup is a named container for metrics and further metric subgroups.
+
+    Instances of this class can be used to register new metrics with Flink and to create a nested
+    hierarchy based on the group names.
+
+    A MetricGroup is uniquely identified by it's place in the hierarchy and name.
+
+    .. versionadded:: 1.11.0
+    """
 
     def add_group(self, name: str, extra: str = None) -> 'MetricGroup':
         """
@@ -31,24 +41,32 @@ class MetricGroup(abc.ABC):
         The key group is added to this group's sub-groups, while the value
         group is added to the key group's sub-groups. In this case,
         the value group will be returned and a user variable will be defined.
+
+        .. versionadded:: 1.11.0
         """
         pass
 
     def counter(self, name: str) -> 'Counter':
         """
         Registers a new `Counter` with Flink.
+
+        .. versionadded:: 1.11.0
         """
         pass
 
     def gauge(self, name: str, obj: Callable[[], int]) -> None:
         """
         Registers a new `Gauge` with Flink.
+
+        .. versionadded:: 1.11.0
         """
         pass
 
     def meter(self, name: str, time_span_in_seconds: int = 60) -> 'Meter':
         """
         Registers a new `Meter` with Flink.
+
+        .. versionadded:: 1.11.0
         """
         # There is no meter type in Beam, use counter to implement meter
         pass
@@ -56,6 +74,8 @@ class MetricGroup(abc.ABC):
     def distribution(self, name: str) -> 'Distribution':
         """
         Registers a new `Distribution` with Flink.
+
+        .. versionadded:: 1.11.0
         """
         pass
 
@@ -139,54 +159,100 @@ class GenericMetricGroup(MetricGroup):
 
 
 class Metric(object):
-    """Base interface of a metric object."""
+    """
+    Base interface of a metric object.
+
+    .. versionadded:: 1.11.0
+    """
     pass
 
 
 class Counter(Metric):
-    """Counter metric interface. Allows a count to be incremented/decremented
-    during pipeline execution."""
+    """
+    Counter metric interface. Allows a count to be incremented/decremented
+    during pipeline execution.
+
+    .. versionadded:: 1.11.0
+    """
 
     def __init__(self, inner_counter):
         self._inner_counter = inner_counter
 
     def inc(self, n=1):
+        """
+        Increment the current count by the given value.
+
+        .. versionadded:: 1.11.0
+        """
         self._inner_counter.inc(n)
 
     def dec(self, n=1):
+        """
+        Decrement the current count by 1.
+
+        .. versionadded:: 1.11.0
+        """
         self.inc(-n)
 
     def get_count(self):
+        """
+        Returns the current count.
+
+        .. versionadded:: 1.11.0
+        """
         from apache_beam.metrics.execution import MetricsEnvironment
         container = MetricsEnvironment.current_container()
         return container.get_counter(self._inner_counter.metric_name).get_cumulative()
 
 
 class Distribution(Metric):
-    """Distribution Metric interface.
+    """
+    Distribution Metric interface.
 
     Allows statistics about the distribution of a variable to be collected during
-    pipeline execution."""
+    pipeline execution.
+
+    .. versionadded:: 1.11.0
+    """
 
     def __init__(self, inner_distribution):
         self._inner_distribution = inner_distribution
 
     def update(self, value):
+        """
+        Updates the distribution value.
+
+        .. versionadded:: 1.11.0
+        """
         self._inner_distribution.update(value)
 
 
 class Meter(Metric):
-    """Meter Metric interface.
+    """
+    Meter Metric interface.
 
-    Metric for measuring throughput."""
+    Metric for measuring throughput.
+
+    .. versionadded:: 1.11.0
+    """
 
     def __init__(self, inner_counter):
         self._inner_counter = inner_counter
 
     def mark_event(self, value=1):
+        """
+        Mark occurrence of the specified number of events.
+
+        .. versionadded:: 1.11.0
+        """
         self._inner_counter.inc(value)
 
     def get_count(self):
+        """
+        Get number of events marked on the meter.
+
+        .. versionadded:: 1.11.0
+        """
         from apache_beam.metrics.execution import MetricsEnvironment
         container = MetricsEnvironment.current_container()
         return container.get_counter(self._inner_counter.metric_name).get_cumulative()
diff --git a/flink-python/pyflink/ml/api/base.py b/flink-python/pyflink/ml/api/base.py
index 6b7c39d..1ae66df 100644
--- a/flink-python/pyflink/ml/api/base.py
+++ b/flink-python/pyflink/ml/api/base.py
@@ -35,6 +35,8 @@ class PipelineStage(WithParams):
 
     Each pipeline stage is with parameters, and requires a public empty constructor for
     restoration in Pipeline.
+
+    .. versionadded:: 1.11.0
     """
 
     def __init__(self, params=None):
@@ -75,6 +77,8 @@ class PipelineStage(WithParams):
 class Transformer(PipelineStage):
     """
     A transformer is a PipelineStage that transforms an input Table to a result Table.
+
+    .. versionadded:: 1.11.0
     """
 
     __metaclass__ = ABCMeta
@@ -87,6 +91,8 @@ class Transformer(PipelineStage):
         :param table_env: the table environment to which the input table is bound.
         :param table: the table to be transformed
         :returns: the transformed table
+
+        .. versionadded:: 1.11.0
         """
         raise NotImplementedError()
 
@@ -95,6 +101,8 @@ class JavaTransformer(Transformer):
     """
     Base class for Transformer that wrap Java implementations. Subclasses should
     ensure they have the transformer Java object available as j_obj.
+
+    .. versionadded:: 1.11.0
     """
 
     def __init__(self, j_obj):
@@ -108,6 +116,8 @@ class JavaTransformer(Transformer):
         :param table_env: the table environment to which the input table is bound.
         :param table: the table to be transformed
         :returns: the transformed table
+
+        .. versionadded:: 1.11.0
         """
         self._convert_params_to_java(self._j_obj)
         return Table(self._j_obj.transform(table_env._j_tenv, table._j_table), table_env)
@@ -120,6 +130,8 @@ class Model(Transformer):
     A model is an ordinary Transformer except how it is created. While ordinary transformers
     are defined by specifying the parameters directly, a model is usually generated by an Estimator
     when Estimator.fit(table_env, table) is invoked.
+
+    .. versionadded:: 1.11.0
     """
 
     __metaclass__ = ABCMeta
@@ -129,6 +141,8 @@ class JavaModel(JavaTransformer, Model):
     """
     Base class for JavaTransformer that wrap Java implementations.
     Subclasses should ensure they have the model Java object available as j_obj.
+
+    .. versionadded:: 1.11.0
     """
 
 
@@ -138,6 +152,8 @@ class Estimator(PipelineStage):
 
     The implementations are expected to take an input table as training samples and generate a
     Model which fits these samples.
+
+    .. versionadded:: 1.11.0
     """
 
     __metaclass__ = ABCMeta
@@ -149,6 +165,8 @@ class Estimator(PipelineStage):
         :param table_env: the table environment to which the input table is bound.
         :param table: the table with records to train the Model.
         :returns: a model trained to fit on the given Table.
+
+        .. versionadded:: 1.11.0
         """
         raise NotImplementedError()
 
@@ -157,6 +175,8 @@ class JavaEstimator(Estimator):
     """
     Base class for Estimator that wrap Java implementations.
     Subclasses should ensure they have the estimator Java object available as j_obj.
+
+    .. versionadded:: 1.11.0
     """
 
     def __init__(self, j_obj):
@@ -170,6 +190,8 @@ class JavaEstimator(Estimator):
         :param table_env: the table environment to which the input table is bound.
         :param table: the table with records to train the Model.
         :returns: a model trained to fit on the given Table.
+
+        .. versionadded:: 1.11.0
         """
         self._convert_params_to_java(self._j_obj)
         return JavaModel(self._j_obj.fit(table_env._j_tenv, table._j_table))
@@ -194,6 +216,8 @@ class Pipeline(Estimator, Model, Transformer):
 
     In addition, a pipeline can also be used as a PipelineStage in another pipeline, just like an
     ordinaryEstimator or Transformer as describe above.
+
+    .. versionadded:: 1.11.0
     """
 
     def __init__(self, stages=None, pipeline_json=None):
diff --git a/flink-python/pyflink/ml/api/ml_environment.py b/flink-python/pyflink/ml/api/ml_environment.py
index b74ba12..7b29ae9 100644
--- a/flink-python/pyflink/ml/api/ml_environment.py
+++ b/flink-python/pyflink/ml/api/ml_environment.py
@@ -27,6 +27,8 @@ class MLEnvironment(object):
     will be associated with a unique ID. The operations associated with the same
     MLEnvironment ID will share the same Flink job context. Both MLEnvironment
     ID and MLEnvironment can only be retrieved from MLEnvironmentFactory.
+
+    .. versionadded:: 1.11.0
     """
 
     def __init__(self, exe_env=None, stream_exe_env=None, batch_tab_env=None, stream_tab_env=None):
@@ -41,6 +43,8 @@ class MLEnvironment(object):
         it initial the ExecutionEnvironment with default Configuration.
 
         :return: the batch ExecutionEnvironment.
+
+        .. versionadded:: 1.11.0
         """
         if self._exe_env is None:
             self._exe_env = ExecutionEnvironment.get_execution_environment()
@@ -52,6 +56,8 @@ class MLEnvironment(object):
         set, it initial the StreamExecutionEnvironment with default Configuration.
 
         :return: the StreamExecutionEnvironment.
+
+        .. versionadded:: 1.11.0
         """
         if self._stream_exe_env is None:
             self._stream_exe_env = StreamExecutionEnvironment.get_execution_environment()
@@ -63,6 +69,8 @@ class MLEnvironment(object):
         it initial the BatchTableEnvironment with default Configuration.
 
         :return: the BatchTableEnvironment.
+
+        .. versionadded:: 1.11.0
         """
         if self._batch_tab_env is None:
             self._batch_tab_env = BatchTableEnvironment.create(
@@ -75,6 +83,8 @@ class MLEnvironment(object):
         it initial the StreamTableEnvironment with default Configuration.
 
         :return: the StreamTableEnvironment.
+
+        .. versionadded:: 1.11.0
         """
         if self._stream_tab_env is None:
             self._stream_tab_env = StreamTableEnvironment.create(
diff --git a/flink-python/pyflink/ml/api/ml_environment_factory.py b/flink-python/pyflink/ml/api/ml_environment_factory.py
index 1001798..4f40b38 100644
--- a/flink-python/pyflink/ml/api/ml_environment_factory.py
+++ b/flink-python/pyflink/ml/api/ml_environment_factory.py
@@ -28,6 +28,8 @@ import threading
 class MLEnvironmentFactory:
     """
     Factory to get the MLEnvironment using a MLEnvironmentId.
+
+    .. versionadded:: 1.11.0
     """
     _lock = threading.RLock()
     _default_ml_environment_id = 0
@@ -41,6 +43,8 @@ class MLEnvironmentFactory:
 
         :param ml_env_id: the MLEnvironmentId
         :return: the MLEnvironment
+
+        .. versionadded:: 1.11.0
         """
         with MLEnvironmentFactory._lock:
             if ml_env_id == 0:
@@ -58,6 +62,8 @@ class MLEnvironmentFactory:
         Get the MLEnvironment use the default MLEnvironmentId.
 
         :return: the default MLEnvironment.
+
+        .. versionadded:: 1.11.0
         """
         with MLEnvironmentFactory._lock:
             if MLEnvironmentFactory._map[MLEnvironmentFactory._default_ml_environment_id] is None:
@@ -78,6 +84,8 @@ class MLEnvironmentFactory:
         Create a unique MLEnvironment id and register a new MLEnvironment in the factory.
 
         :return: the MLEnvironment id.
+
+        .. versionadded:: 1.11.0
         """
         with MLEnvironmentFactory._lock:
             return MLEnvironmentFactory.register_ml_environment(MLEnvironment())
@@ -89,6 +97,8 @@ class MLEnvironmentFactory:
 
         :param ml_environment: the MLEnvironment that will be stored in the factory.
         :return: the MLEnvironment id.
+
+        .. versionadded:: 1.11.0
         """
         with MLEnvironmentFactory._lock:
             MLEnvironmentFactory._map[MLEnvironmentFactory._next_id] = ml_environment
@@ -102,6 +112,8 @@ class MLEnvironmentFactory:
 
         :param ml_env_id: the id.
         :return: the removed MLEnvironment
+
+        .. versionadded:: 1.11.0
         """
         with MLEnvironmentFactory._lock:
             if ml_env_id is None:
diff --git a/flink-python/pyflink/ml/lib/param/colname.py b/flink-python/pyflink/ml/lib/param/colname.py
index 581551b..57decd7 100644
--- a/flink-python/pyflink/ml/lib/param/colname.py
+++ b/flink-python/pyflink/ml/lib/param/colname.py
@@ -22,6 +22,8 @@ from pyflink.ml.api.param import WithParams, ParamInfo, TypeConverters
 class HasSelectedCols(WithParams):
     """
     An interface for classes with a parameter specifying the name of multiple table columns.
+
+    .. versionadded:: 1.11.0
     """
 
     selected_cols = ParamInfo(
@@ -40,6 +42,8 @@ class HasSelectedCols(WithParams):
 class HasOutputCol(WithParams):
     """
     An interface for classes with a parameter specifying the name of the output column.
+
+    .. versionadded:: 1.11.0
     """
 
     output_col = ParamInfo(
@@ -58,6 +62,8 @@ class HasOutputCol(WithParams):
 class HasPredictionCol(WithParams):
     """
     An interface for classes with a parameter specifying the column name of the prediction.
+
+    .. versionadded:: 1.11.0
     """
     prediction_col = ParamInfo(
         "predictionCol",
diff --git a/flink-python/pyflink/table/catalog.py b/flink-python/pyflink/table/catalog.py
index 9d3a7d3..f941ee1 100644
--- a/flink-python/pyflink/table/catalog.py
+++ b/flink-python/pyflink/table/catalog.py
@@ -15,6 +15,7 @@
 #  See the License for the specific language governing permissions and
 # limitations under the License.
 ################################################################################
+import warnings
 
 from py4j.java_gateway import java_import
 
@@ -626,6 +627,8 @@ class CatalogBaseTable(object):
         configuration for accessing the data in the external system.
 
         :return: Property map of the table/view.
+
+        .. versionadded:: 1.11.0
         """
         return dict(self._j_catalog_base_table.getOptions())
 
@@ -633,10 +636,13 @@ class CatalogBaseTable(object):
         """
         Get the properties of the table.
 
-        This method is deprecated. Use :func:`~pyflink.table.CatalogBaseTable.get_options` instead.
-
         :return: Property map of the table/view.
+
+        .. note:: This method is deprecated. Use :func:`~pyflink.table.CatalogBaseTable.get_options`
+                  instead.
         """
+        warnings.warn("Deprecated in 1.11. Use CatalogBaseTable#get_options instead.",
+                      DeprecationWarning)
         return dict(self._j_catalog_base_table.getProperties())
 
     def get_schema(self):
diff --git a/flink-python/pyflink/table/descriptors.py b/flink-python/pyflink/table/descriptors.py
index a51d0e5..561d83c 100644
--- a/flink-python/pyflink/table/descriptors.py
+++ b/flink-python/pyflink/table/descriptors.py
@@ -244,6 +244,8 @@ class Schema(Descriptor):
                        stored.
                        E.g, [('int_field', DataTypes.INT()), ('string_field', DataTypes.STRING())].
         :return: This schema object.
+
+        .. versionadded:: 1.11.0
         """
         if sys.version_info[:2] <= (3, 5) and not isinstance(fields, OrderedDict):
             raise TypeError("Must use OrderedDict type in python3.5 or older version to key the "
@@ -1083,6 +1085,8 @@ class Kafka(ConnectorDescriptor):
 
         :param timestamp timestamp for the startup offsets, as milliseconds from epoch.
         :return: This object.
+
+        .. versionadded:: 1.11.0
         """
         self._j_kafka = self._j_kafka.startFromTimestamp(int(timestamp))
         return self
@@ -1528,6 +1532,8 @@ class Elasticsearch(ConnectorDescriptor):
 class HBase(ConnectorDescriptor):
     """
     Connector descriptor for Apache HBase.
+
+    .. versionadded:: 1.11.0
     """
 
     def __init__(self, version=None, table_name=None, zookeeper_quorum=None,
@@ -1580,6 +1586,8 @@ class HBase(ConnectorDescriptor):
 
         :param version: HBase version. E.g., "1.4.3".
         :return: This object.
+
+        .. versionadded:: 1.11.0
         """
         if not isinstance(version, str):
             version = str(version)
@@ -1592,6 +1600,8 @@ class HBase(ConnectorDescriptor):
 
         :param table_name: Name of HBase table. E.g., "testNamespace:testTable", "testDefaultTable"
         :return: This object.
+
+        .. versionadded:: 1.11.0
         """
         self._j_hbase = self._j_hbase.tableName(table_name)
         return self
@@ -1603,6 +1613,8 @@ class HBase(ConnectorDescriptor):
         :param zookeeper_quorum: zookeeper quorum address to connect the HBase cluster. E.g.,
                                  "localhost:2181,localhost:2182,localhost:2183"
         :return: This object.
+
+        .. versionadded:: 1.11.0
         """
         self._j_hbase = self._j_hbase.zookeeperQuorum(zookeeper_quorum)
         return self
@@ -1614,6 +1626,8 @@ class HBase(ConnectorDescriptor):
         :param zookeeper_node_parent: zookeeper node path of hbase cluster. E.g,
                                       "/hbase/example-root-znode".
         :return: This object
+
+        .. versionadded:: 1.11.0
         """
         self._j_hbase = self._j_hbase.zookeeperNodeParent(zookeeper_node_parent)
         return self
@@ -1625,6 +1639,8 @@ class HBase(ConnectorDescriptor):
 
         :param max_size: the maximum size.
         :return: This object.
+
+        .. versionadded:: 1.11.0
         """
         if not isinstance(max_size, str):
             max_size = str(max_size)
@@ -1638,6 +1654,8 @@ class HBase(ConnectorDescriptor):
 
         :param write_buffer_flush_max_rows: number of added rows when begin the request flushing.
         :return: This object.
+
+        .. versionadded:: 1.11.0
         """
         self._j_hbase = self._j_hbase.writeBufferFlushMaxRows(write_buffer_flush_max_rows)
         return self
@@ -1652,6 +1670,8 @@ class HBase(ConnectorDescriptor):
                          "{length value}{time unit label}" E.g, "123ms", "1 s", if not time unit
                          label is specified, it will be considered as milliseconds.
         :return: This object.
+
+        .. versionadded:: 1.11.0
         """
         if not isinstance(interval, str):
             interval = str(interval)
@@ -1763,6 +1783,8 @@ class ConnectTableDescriptor(Descriptor):
         .. note:: The schema must be explicitly defined.
 
         :param path: path where to register the temporary table
+
+        .. versionadded:: 1.10.0
         """
         self._j_connect_table_descriptor.createTemporaryTable(path)
         return self
diff --git a/flink-python/pyflink/table/explain_detail.py b/flink-python/pyflink/table/explain_detail.py
index 0cbcbe9..b3b549f 100644
--- a/flink-python/pyflink/table/explain_detail.py
+++ b/flink-python/pyflink/table/explain_detail.py
@@ -22,6 +22,8 @@ __all__ = ['ExplainDetail']
 class ExplainDetail(object):
     """
     ExplainDetail defines the types of details for explain result.
+
+    .. versionadded:: 1.11.0
     """
 
     # The cost information on physical rel node estimated by optimizer.
diff --git a/flink-python/pyflink/table/result_kind.py b/flink-python/pyflink/table/result_kind.py
index 68c324d..16c425f 100644
--- a/flink-python/pyflink/table/result_kind.py
+++ b/flink-python/pyflink/table/result_kind.py
@@ -32,6 +32,8 @@ class ResultKind(object):
 
     The statement (e.g. DML, DQL, SHOW) executes successfully, and the result contains important
     content.
+
+    .. versionadded:: 1.11.0
     """
 
     SUCCESS = 0
diff --git a/flink-python/pyflink/table/statement_set.py b/flink-python/pyflink/table/statement_set.py
index 005f384..be20035 100644
--- a/flink-python/pyflink/table/statement_set.py
+++ b/flink-python/pyflink/table/statement_set.py
@@ -33,6 +33,7 @@ class StatementSet(object):
         The added statements and Tables will be cleared
         when calling the `execute` method.
 
+    .. versionadded:: 1.11.0
     """
 
     def __init__(self, _j_statement_set, t_env):
@@ -47,6 +48,8 @@ class StatementSet(object):
         :type stmt: str
         :return: current StatementSet instance.
         :rtype: pyflink.table.StatementSet
+
+        .. versionadded:: 1.11.0
         """
         self._j_statement_set.addInsertSql(stmt)
         return self
@@ -65,6 +68,8 @@ class StatementSet(object):
         :type overwrite: bool
         :return: current StatementSet instance.
         :rtype: pyflink.table.StatementSet
+
+        .. versionadded:: 1.11.0
         """
         self._j_statement_set.addInsert(target_path, table._j_table, overwrite)
         return self
@@ -78,6 +83,8 @@ class StatementSet(object):
         :type extra_details: tuple[ExplainDetail] (variable-length arguments of ExplainDetail)
         :return: All statements and Tables for which the AST and execution plan will be returned.
         :rtype: str
+
+        .. versionadded:: 1.11.0
         """
         j_extra_details = to_j_explain_detail_arr(extra_details)
         return self._j_statement_set.explain(j_extra_details)
@@ -90,6 +97,8 @@ class StatementSet(object):
             The added statements and Tables will be cleared when executing this method.
 
         :return: execution result.
+
+        .. versionadded:: 1.11.0
         """
         self._t_env._before_execute()
         return TableResult(self._j_statement_set.execute())
diff --git a/flink-python/pyflink/table/table.py b/flink-python/pyflink/table/table.py
index 7174006..e140825 100644
--- a/flink-python/pyflink/table/table.py
+++ b/flink-python/pyflink/table/table.py
@@ -712,6 +712,8 @@ class Table(object):
             >>> table.filter("a > 0.5").to_pandas()
 
         :return: the result pandas DataFrame.
+
+        .. versionadded:: 1.11.0
         """
         self._t_env._before_execute()
         gateway = get_gateway()
@@ -774,6 +776,8 @@ class Table(object):
                existing data or not.
         :type overwrite: bool
         :return: The table result.
+
+        .. versionadded:: 1.11.0
         """
         self._t_env._before_execute()
         return TableResult(self._j_table.executeInsert(table_path, overwrite))
@@ -788,6 +792,8 @@ class Table(object):
             >>> tab.execute()
 
         :return: The content of the table.
+
+        .. versionadded:: 1.11.0
         """
         self._t_env._before_execute()
         return TableResult(self._j_table.execute())
@@ -801,6 +807,8 @@ class Table(object):
         :type extra_details: tuple[ExplainDetail] (variable-length arguments of ExplainDetail)
         :return: The statement for which the AST and execution plan will be returned.
         :rtype: str
+
+        .. versionadded:: 1.11.0
         """
         j_extra_details = to_j_explain_detail_arr(extra_details)
         return self._j_table.explain(j_extra_details)
diff --git a/flink-python/pyflink/table/table_environment.py b/flink-python/pyflink/table/table_environment.py
index 5a9c8c9..7327378 100644
--- a/flink-python/pyflink/table/table_environment.py
+++ b/flink-python/pyflink/table/table_environment.py
@@ -372,6 +372,8 @@ class TableEnvironment(object):
 
         :return: List of view names in the current database of the current catalog.
         :rtype: list[str]
+
+        .. versionadded:: 1.11.0
         """
         j_view_name_array = self._j_tenv.listViews()
         return [item for item in j_view_name_array]
@@ -491,6 +493,8 @@ class TableEnvironment(object):
         :type extra_details: tuple[ExplainDetail] (variable-length arguments of ExplainDetail)
         :return: The statement for which the AST and execution plan will be returned.
         :rtype: str
+
+        .. versionadded:: 1.11.0
         """
 
         j_extra_details = to_j_explain_detail_arr(extra_details)
@@ -532,6 +536,8 @@ class TableEnvironment(object):
         :return content for DQL/SHOW/DESCRIBE/EXPLAIN,
                 the affected row count for `DML` (-1 means unknown),
                 or a string message ("OK") for other statements.
+
+        .. versionadded:: 1.11.0
         """
         self._before_execute()
         return TableResult(self._j_tenv.executeSql(stmt))
@@ -544,6 +550,8 @@ class TableEnvironment(object):
 
         :return statement_set instance
         :rtype: pyflink.table.StatementSet
+
+        .. versionadded:: 1.11.0
         """
         _j_statement_set = self._j_tenv.createStatementSet()
         return StatementSet(_j_statement_set, self)
@@ -1209,6 +1217,8 @@ class TableEnvironment(object):
                            determines the number of parallel source tasks.
                            If not specified, the default parallelism will be used.
         :return: The result table.
+
+        .. versionadded:: 1.11.0
         """
 
         import pandas as pd
diff --git a/flink-python/pyflink/table/table_result.py b/flink-python/pyflink/table/table_result.py
index 5071424..c611f67 100644
--- a/flink-python/pyflink/table/table_result.py
+++ b/flink-python/pyflink/table/table_result.py
@@ -25,6 +25,8 @@ __all__ = ['TableResult']
 class TableResult(object):
     """
     A :class:`~pyflink.table.TableResult` is the representation of the statement execution result.
+
+    .. versionadded:: 1.11.0
     """
 
     def __init__(self, j_table_result):
@@ -37,6 +39,8 @@ class TableResult(object):
 
         :return: The job client, optional.
         :rtype: pyflink.common.JobClient
+
+        .. versionadded:: 1.11.0
         """
         job_client = self._j_table_result.getJobClient()
         if job_client.isPresent():
@@ -50,6 +54,8 @@ class TableResult(object):
 
         :return: The schema of result.
         :rtype: pyflink.table.TableSchema
+
+        .. versionadded:: 1.11.0
         """
         return TableSchema(j_table_schema=self._j_table_result.getTableSchema())
 
@@ -59,11 +65,15 @@ class TableResult(object):
 
         :return: The result kind.
         :rtype: pyflink.table.ResultKind
+
+        .. versionadded:: 1.11.0
         """
         return ResultKind._from_j_result_kind(self._j_table_result.getResultKind())
 
     def print(self):
         """
         Print the result contents as tableau form to client console.
+
+        .. versionadded:: 1.11.0
         """
         self._j_table_result.print()
diff --git a/flink-python/pyflink/table/types.py b/flink-python/pyflink/table/types.py
index e92660c..413272a 100644
--- a/flink-python/pyflink/table/types.py
+++ b/flink-python/pyflink/table/types.py
@@ -1182,6 +1182,8 @@ class RowType(DataType):
     def field_types(self):
         """
         Returns all field types in a list.
+
+        .. versionadded:: 1.11.0
         """
         return list([f.data_type for f in self.fields])
 
diff --git a/flink-python/pyflink/table/udf.py b/flink-python/pyflink/table/udf.py
index 9248275..b6b3e7e 100644
--- a/flink-python/pyflink/table/udf.py
+++ b/flink-python/pyflink/table/udf.py
@@ -39,6 +39,11 @@ class FunctionContext(object):
         self._base_metric_group = base_metric_group
 
     def get_metric_group(self) -> MetricGroup:
+        """
+        Returns the metric group for this parallel subtask.
+
+        .. versionadded:: 1.11.0
+        """
         if self._base_metric_group is None:
             raise RuntimeError("Metric has not been enabled. You can enable "
                                "metric with the 'python.metric.enabled' configuration.")
@@ -48,9 +53,11 @@ class FunctionContext(object):
 class UserDefinedFunction(abc.ABC):
     """
     Base interface for user-defined function.
+
+    .. versionadded:: 1.10.0
     """
 
-    def open(self, function_context):
+    def open(self, function_context: FunctionContext):
         """
         Initialization method for the function. It is called before the actual working methods
         and thus suitable for one time setup work.
@@ -85,6 +92,8 @@ class ScalarFunction(UserDefinedFunction):
     """
     Base interface for user-defined scalar function. A user-defined scalar functions maps zero, one,
     or multiple scalar values to a new scalar value.
+
+    .. versionadded:: 1.10.0
     """
 
     @abc.abstractmethod
@@ -99,6 +108,8 @@ class TableFunction(UserDefinedFunction):
     """
     Base interface for user-defined table function. A user-defined table function creates zero, one,
     or multiple rows to a new row value.
+
+    .. versionadded:: 1.11.0
     """
 
     @abc.abstractmethod
@@ -343,6 +354,8 @@ def udf(f=None, input_types=None, result_type=None, deterministic=None, name=Non
     :type udf_type: str
     :return: UserDefinedScalarFunctionWrapper or function.
     :rtype: UserDefinedScalarFunctionWrapper or function
+
+    .. versionadded:: 1.10.0
     """
     if udf_type not in ('general', 'pandas'):
         raise ValueError("The udf_type must be one of 'general, pandas', got %s." % udf_type)
@@ -387,6 +400,8 @@ def udtf(f=None, input_types=None, result_types=None, deterministic=None, name=N
     :type deterministic: bool
     :return: UserDefinedTableFunctionWrapper or function.
     :rtype: UserDefinedTableFunctionWrapper or function
+
+    .. versionadded:: 1.11.0
     """
     # decorator
     if f is None: