You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by ss...@apache.org on 2023/10/05 02:24:17 UTC

[tvm] branch unity updated: [Unity][Fix] Remove duplicated words from comments, NFC (#15875)

This is an automated email from the ASF dual-hosted git repository.

sslyu pushed a commit to branch unity
in repository https://gitbox.apache.org/repos/asf/tvm.git


The following commit(s) were added to refs/heads/unity by this push:
     new 2e30dbe0ff [Unity][Fix] Remove duplicated words from comments, NFC (#15875)
2e30dbe0ff is described below

commit 2e30dbe0ff1abc9df0ed1f64e6d240780b1d7fe8
Author: Krzysztof Parzyszek <kp...@quicinc.com>
AuthorDate: Wed Oct 4 21:24:09 2023 -0500

    [Unity][Fix] Remove duplicated words from comments, NFC (#15875)
    
    Removed instances of accidentally repeated words from comments. There
    are cases where duplicated words appear legitimately, those cases remain
    unmodified.
---
 include/tvm/relax/transform.h                      | 2 +-
 python/tvm/relax/frontend/nn/op.py                 | 2 +-
 python/tvm/relax/op/linear_algebra.py              | 4 ++--
 python/tvm/relax/struct_info.py                    | 4 ++--
 python/tvm/relax/training/utils.py                 | 2 +-
 python/tvm/relax/transform/transform.py            | 8 ++++++--
 python/tvm/runtime/disco/process_pool.py           | 2 +-
 src/relax/op/tensor/linear_algebra.h               | 2 +-
 src/relax/training/utils.h                         | 2 +-
 src/relax/transform/fuse_ops.cc                    | 2 +-
 src/relax/transform/static_plan_block_memory.cc    | 2 +-
 tests/python/relax/test_transform_fold_constant.py | 2 +-
 12 files changed, 19 insertions(+), 15 deletions(-)

diff --git a/include/tvm/relax/transform.h b/include/tvm/relax/transform.h
index 45a31b0911..78d835a442 100644
--- a/include/tvm/relax/transform.h
+++ b/include/tvm/relax/transform.h
@@ -503,7 +503,7 @@ TVM_DLL Pass DecomposeOpsForTraining(Optional<String> func_name);
  * pass inserts the layout transformations in the call sites of PrimFuncs being replaced to
  * transform i/o buffers into expected layout.
  *
- * \param op_impl_map Map from from kOperatorName attr (e.g., relax.conv2d) to replacement PrimFunc
+ * \param op_impl_map Map from kOperatorName attr (e.g., relax.conv2d) to replacement PrimFunc
  * \param op_buffer_transforms Map from kOperatorName attr to layout transformations on each of the
  * PrimFunc i/o buffers.
  * \param axis_separators Map from kOperatorName attr to axis_separators of each buffer_transforms
diff --git a/python/tvm/relax/frontend/nn/op.py b/python/tvm/relax/frontend/nn/op.py
index 8eafa80802..3e7b9d6bb2 100644
--- a/python/tvm/relax/frontend/nn/op.py
+++ b/python/tvm/relax/frontend/nn/op.py
@@ -256,7 +256,7 @@ def matmul(a: Tensor, b: Tensor, out_dtype: Optional[str] = None, name: str = "m
 
     out_dtype: Optional[Union[str, DataType]]
         The data type of the matmul result.
-        When it is not specified, the output dtype will be the the same as input dtype.
+        When it is not specified, the output dtype will be the same as input dtype.
 
     name : str
         Name hint.
diff --git a/python/tvm/relax/op/linear_algebra.py b/python/tvm/relax/op/linear_algebra.py
index e7e609937d..efb5085c78 100644
--- a/python/tvm/relax/op/linear_algebra.py
+++ b/python/tvm/relax/op/linear_algebra.py
@@ -41,7 +41,7 @@ def matmul(x1: Expr, x2: Expr, out_dtype: Optional[Union[str, DataType]] = None)
 
     out_dtype: Optional[Union[str, DataType]]
         The data type of the matmul result.
-        When it is not specified, the output dtype will be the the same as input dtype.
+        When it is not specified, the output dtype will be the same as input dtype.
 
     Returns
     -------
@@ -72,7 +72,7 @@ def linear(
 
     out_dtype: Optional[Union[str, DataType]]
         The data type of the matmul result.
-        When it is not specified, the output dtype will be the the same as input dtype.
+        When it is not specified, the output dtype will be the same as input dtype.
 
     Notes
     -----
diff --git a/python/tvm/relax/struct_info.py b/python/tvm/relax/struct_info.py
index 4f89dc158c..34a9d82595 100644
--- a/python/tvm/relax/struct_info.py
+++ b/python/tvm/relax/struct_info.py
@@ -148,7 +148,7 @@ class TensorStructInfo(StructInfo):
     dtype : Optional[str]
         The content data type.
 
-    vdevice : Optional[VDevice]
+    vdevice : Optional[Vdevice]
         The virtual device.
 
     ndim : Optional[int]
@@ -247,7 +247,7 @@ class FuncStructInfo(StructInfo):
         Parameters
         ----------
         ret: Optional[StructInfo]
-           The struct info of the the function return value.
+           The struct info of the function return value.
 
         derive_func: Optional[EnvFunc]
            The environment function used for derivation
diff --git a/python/tvm/relax/training/utils.py b/python/tvm/relax/training/utils.py
index bf9e937457..4d1a321772 100644
--- a/python/tvm/relax/training/utils.py
+++ b/python/tvm/relax/training/utils.py
@@ -90,7 +90,7 @@ def AppendLoss(
         Specify the number of `prediction_outputs` of the backbone function. Default: 1.
 
     new_func_name : Optional[str]
-        Specify the name of the appended result. If is is not specified, the name will be
+        Specify the name of the appended result. If it is not specified, the name will be
         `func_name + "_loss"`.
 
     Returns
diff --git a/python/tvm/relax/transform/transform.py b/python/tvm/relax/transform/transform.py
index 1676ba18c1..028a0e04d4 100644
--- a/python/tvm/relax/transform/transform.py
+++ b/python/tvm/relax/transform/transform.py
@@ -448,8 +448,12 @@ def BindParams(
     func_name: str
         The function name to be bound
 
-    params : Dict[Union[str,relax.Var],Union[tvm.runtime.NDArray, np.ndarray]]
-        The map from parameter or parameter name name to constant
+    params : Dict[
+                Union[str,relax.Var],
+                Union[tvm.runtime.NDArray, np.ndarray],
+             ]
+
+        The map from parameter or parameter name to constant
         tensors.
 
     Returns
diff --git a/python/tvm/runtime/disco/process_pool.py b/python/tvm/runtime/disco/process_pool.py
index fd4ba7a165..836744dba6 100644
--- a/python/tvm/runtime/disco/process_pool.py
+++ b/python/tvm/runtime/disco/process_pool.py
@@ -165,7 +165,7 @@ def _kill_child_processes(pid):
 
 @register_func("runtime.disco.create_process_pool")
 def _create_process_pool(num_workers: int):
-    """Create a process pool where the workers' are are [1, num_workers)."""
+    """Create a process pool where the workers' are [1, num_workers)."""
     pool = [DiscoPopenWorker(i, num_workers) for i in range(1, num_workers)]
 
     def result_func(worker_id: int):
diff --git a/src/relax/op/tensor/linear_algebra.h b/src/relax/op/tensor/linear_algebra.h
index 83deb02012..e0f091bb61 100644
--- a/src/relax/op/tensor/linear_algebra.h
+++ b/src/relax/op/tensor/linear_algebra.h
@@ -38,7 +38,7 @@ namespace relax {
  * \param x1 The first input tensor.
  * \param x2 The second input tensor.
  * \param out_dtype The data type of the matmul result.
- * When it is not specified, the output dtype will be the the same as input dtype.
+ * When it is not specified, the output dtype will be the same as input dtype.
  * \return The computed result.
  */
 Expr matmul(Expr x1, Expr x2, DataType out_dtype);
diff --git a/src/relax/training/utils.h b/src/relax/training/utils.h
index 074aedc287..f280308f9d 100644
--- a/src/relax/training/utils.h
+++ b/src/relax/training/utils.h
@@ -46,7 +46,7 @@ namespace transform {
  * \param loss_function The loss function.
  * \param num_backbone_outputs Specify the number of `prediction_outputs` of the backbone function.
  * Default: 1.
- * \param new_func_name Specify the name of the appended result. If is is not specified, the name
+ * \param new_func_name Specify the name of the appended result. If it is not specified, the name
  * will be `func_name + "_loss"`.
  * \return The Pass.
  */
diff --git a/src/relax/transform/fuse_ops.cc b/src/relax/transform/fuse_ops.cc
index a13d0830a0..8a76935e9b 100644
--- a/src/relax/transform/fuse_ops.cc
+++ b/src/relax/transform/fuse_ops.cc
@@ -446,7 +446,7 @@ class FunctionCreator : public ExprMutator {
   }
 
   /*!
-   * \brief Create the grouped function according according to the collected bindings and parameters
+   * \brief Create the grouped function according to the collected bindings and parameters
    * \param composite_name The name to identify the pattern this function is created from, if any.
    * It will become the value of the kComposite attribute of the created function.
    * \note The created function won't be returned immediately. It's stored in the `function_` field.
diff --git a/src/relax/transform/static_plan_block_memory.cc b/src/relax/transform/static_plan_block_memory.cc
index e6aa450ff8..ef2d582548 100644
--- a/src/relax/transform/static_plan_block_memory.cc
+++ b/src/relax/transform/static_plan_block_memory.cc
@@ -662,7 +662,7 @@ class StorageAllocator : public StorageAllocatorBaseVisitor {
   /*!
    * \brief Check if a token has no reference and thus can be released. And release it if so.
    * \param token The token to be checked.
-   * \param release_site The CallNode where the the input token is send for release.
+   * \param release_site The CallNode where the input token is send for release.
    * If the token is checked to release here, we keep record of the release site so that
    * kill_tensor can be inserted here at the rewrite stage.
    */
diff --git a/tests/python/relax/test_transform_fold_constant.py b/tests/python/relax/test_transform_fold_constant.py
index a4dffba114..9f2e3a4a09 100644
--- a/tests/python/relax/test_transform_fold_constant.py
+++ b/tests/python/relax/test_transform_fold_constant.py
@@ -24,7 +24,7 @@ from tvm.script import ir as I, tir as T, relax as R
 
 
 def gen_mod(mod, name, binding):
-    """Select relax function with name, rename to main and and bind constant.
+    """Select relax function with name, rename to main and bind constant.
 
     Parameters
     ----------