You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by ko...@apache.org on 2020/01/05 03:07:32 UTC

[arrow] branch master updated: ARROW-7479: [Rust][Ruby][R] Fix typos

This is an automated email from the ASF dual-hosted git repository.

kou pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow.git


The following commit(s) were added to refs/heads/master by this push:
     new dd6b17d  ARROW-7479: [Rust][Ruby][R] Fix typos
dd6b17d is described below

commit dd6b17d0cc1a77aaff84c5a4472ac73bc79486af
Author: Kazuaki Ishizaki <is...@jp.ibm.com>
AuthorDate: Sun Jan 5 12:07:10 2020 +0900

    ARROW-7479: [Rust][Ruby][R] Fix typos
    
    This PR fixes typos in files under `rust`, `ruby`, and `r` directories
    
    Closes #6108 from kiszk/ARROW-7479 and squashes the following commits:
    
    07098de78 <Kazuaki Ishizaki> address review comment
    1c8927ea0 <Kazuaki Ishizaki> address review comment
    3b35733d0 <Kazuaki Ishizaki> fix typo
    a22296ba3 <Kazuaki Ishizaki> address review comment
    529a3769e <Kazuaki Ishizaki> fix typos
    
    Authored-by: Kazuaki Ishizaki <is...@jp.ibm.com>
    Signed-off-by: Sutou Kouhei <ko...@clear-code.com>
---
 r/R/csv.R                                          |  2 +-
 r/R/dplyr.R                                        |  2 +-
 r/R/parquet.R                                      |  4 +--
 r/man/ParquetWriterProperties.Rd                   |  2 +-
 r/man/read_delim_arrow.Rd                          |  2 +-
 r/man/write_parquet.Rd                             |  2 +-
 r/src/array_from_vector.cpp                        | 26 +++++++++---------
 r/src/arrow_types.h                                |  2 +-
 r/src/recordbatch.cpp                              |  2 +-
 r/tests/testthat/test-Array.R                      |  2 +-
 ruby/red-arrow/lib/arrow/field.rb                  |  2 +-
 rust/arrow/src/array/array.rs                      |  8 +++---
 rust/arrow/src/array/builder.rs                    | 32 +++++++++++-----------
 rust/arrow/src/ipc/gen/SparseTensor.rs             |  2 +-
 rust/arrow/src/ipc/reader.rs                       |  2 +-
 rust/arrow/src/json/reader.rs                      |  4 +--
 rust/datafusion/src/execution/physical_plan/mod.rs |  2 +-
 rust/parquet/src/column/writer.rs                  |  2 +-
 rust/parquet/src/file/properties.rs                |  8 +++---
 rust/parquet/src/record/api.rs                     |  2 +-
 rust/parquet/src/util/bit_util.rs                  |  2 +-
 21 files changed, 56 insertions(+), 56 deletions(-)

diff --git a/r/R/csv.R b/r/R/csv.R
index 8abb711..797ccdc 100644
--- a/r/R/csv.R
+++ b/r/R/csv.R
@@ -67,7 +67,7 @@
 #' @param as_data_frame Should the function return a `data.frame` or an
 #' [arrow::Table][Table]?
 #'
-#' @return A `data.frame`, or an Table if `as_data_frame = FALSE`.
+#' @return A `data.frame`, or a Table if `as_data_frame = FALSE`.
 #' @export
 #' @examples
 #' \donttest{
diff --git a/r/R/dplyr.R b/r/R/dplyr.R
index fcad16c..39e56d3 100644
--- a/r/R/dplyr.R
+++ b/r/R/dplyr.R
@@ -21,7 +21,7 @@
 arrow_dplyr_query <- function(.data) {
   # An arrow_dplyr_query is a container for an Arrow data object (Table,
   # RecordBatch, or Dataset) and the state of the user's dplyr query--things
-  # like selected colums, filters, and group vars.
+  # like selected columns, filters, and group vars.
 
   # For most dplyr methods,
   # method.Table == method.RecordBatch == method.Dataset == method.arrow_dplyr_query
diff --git a/r/R/parquet.R b/r/R/parquet.R
index 46fc7ce..0973a8b 100644
--- a/r/R/parquet.R
+++ b/r/R/parquet.R
@@ -60,7 +60,7 @@ read_parquet <- function(file,
 #' @param compression_level compression level. Meaning depends on compression algorithm
 #' @param use_dictionary Specify if we should use dictionary encoding. Default `TRUE`
 #' @param write_statistics Specify if we should write statistics. Default `TRUE`
-#' @param data_page_size Set a target threshhold for the approximate encoded
+#' @param data_page_size Set a target threshold for the approximate encoded
 #'    size of data pages within a column chunk (in bytes). Default 1 MiB.
 #' @param properties properties for parquet writer, derived from arguments
 #'   `version`, `compression`, `compression_level`, `use_dictionary`,
@@ -254,7 +254,7 @@ make_valid_version <- function(version, valid_versions = valid_parquet_version)
 #' - `compression_level`: Compression level; meaning depends on compression algorithm
 #' - `use_dictionary`: Specify if we should use dictionary encoding. Default `TRUE`
 #' - `write_statistics`: Specify if we should write statistics. Default `TRUE`
-#' - `data_page_size`: Set a target threshhold for the approximate encoded
+#' - `data_page_size`: Set a target threshold for the approximate encoded
 #'    size of data pages within a column chunk (in bytes). Default 1 MiB.
 #'
 #' @details The parameters `compression`, `compression_level`, `use_dictionary`
diff --git a/r/man/ParquetWriterProperties.Rd b/r/man/ParquetWriterProperties.Rd
index 4df2e6a..a2fab2a 100644
--- a/r/man/ParquetWriterProperties.Rd
+++ b/r/man/ParquetWriterProperties.Rd
@@ -37,7 +37,7 @@ and takes the following arguments:
 \item \code{compression_level}: Compression level; meaning depends on compression algorithm
 \item \code{use_dictionary}: Specify if we should use dictionary encoding. Default \code{TRUE}
 \item \code{write_statistics}: Specify if we should write statistics. Default \code{TRUE}
-\item \code{data_page_size}: Set a target threshhold for the approximate encoded
+\item \code{data_page_size}: Set a target threshold for the approximate encoded
 size of data pages within a column chunk (in bytes). Default 1 MiB.
 }
 }
diff --git a/r/man/read_delim_arrow.Rd b/r/man/read_delim_arrow.Rd
index 030ab38..e26fe5b 100644
--- a/r/man/read_delim_arrow.Rd
+++ b/r/man/read_delim_arrow.Rd
@@ -109,7 +109,7 @@ parsing options provided in other arguments (e.g. \code{delim}, \code{quote}, et
 \link[=Table]{arrow::Table}?}
 }
 \value{
-A \code{data.frame}, or an Table if \code{as_data_frame = FALSE}.
+A \code{data.frame}, or a Table if \code{as_data_frame = FALSE}.
 }
 \description{
 These functions uses the Arrow C++ CSV reader to read into a \code{data.frame}.
diff --git a/r/man/write_parquet.Rd b/r/man/write_parquet.Rd
index f5af877..fa32a9f 100644
--- a/r/man/write_parquet.Rd
+++ b/r/man/write_parquet.Rd
@@ -42,7 +42,7 @@ write_parquet(
 
 \item{write_statistics}{Specify if we should write statistics. Default \code{TRUE}}
 
-\item{data_page_size}{Set a target threshhold for the approximate encoded
+\item{data_page_size}{Set a target threshold for the approximate encoded
 size of data pages within a column chunk (in bytes). Default 1 MiB.}
 
 \item{properties}{properties for parquet writer, derived from arguments
diff --git a/r/src/array_from_vector.cpp b/r/src/array_from_vector.cpp
index 1f94f48..3bf015d 100644
--- a/r/src/array_from_vector.cpp
+++ b/r/src/array_from_vector.cpp
@@ -1002,7 +1002,7 @@ arrow::Status CheckCompatibleStruct(SEXP obj,
 }
 
 std::shared_ptr<arrow::Array> Array__from_vector(
-    SEXP x, const std::shared_ptr<arrow::DataType>& type, bool type_infered) {
+    SEXP x, const std::shared_ptr<arrow::DataType>& type, bool type_inferred) {
   // short circuit if `x` is already an Array
   if (Rf_inherits(x, "Array")) {
     return Rcpp::ConstReferenceSmartPtrInputParameter<std::shared_ptr<arrow::Array>>(x);
@@ -1020,9 +1020,9 @@ std::shared_ptr<arrow::Array> Array__from_vector(
     return arrow::r::MakeStringArray(x);
   }
 
-  // factors only when type has been infered
+  // factors only when type has been inferred
   if (type->id() == Type::DICTIONARY) {
-    if (type_infered || arrow::r::CheckCompatibleFactor(x, type)) {
+    if (type_inferred || arrow::r::CheckCompatibleFactor(x, type)) {
       return arrow::r::MakeFactorArray(x, type);
     }
 
@@ -1031,7 +1031,7 @@ std::shared_ptr<arrow::Array> Array__from_vector(
 
   // struct types
   if (type->id() == Type::STRUCT) {
-    if (!type_infered) {
+    if (!type_inferred) {
       STOP_IF_NOT_OK(arrow::r::CheckCompatibleStruct(x, type));
     }
 
@@ -1066,16 +1066,16 @@ std::shared_ptr<arrow::DataType> Array__infer_type(SEXP x) {
 // [[arrow::export]]
 std::shared_ptr<arrow::Array> Array__from_vector(SEXP x, SEXP s_type) {
   // the type might be NULL, in which case we need to infer it from the data
-  // we keep track of whether it was infered or supplied
-  bool type_infered = Rf_isNull(s_type);
+  // we keep track of whether it was inferred or supplied
+  bool type_inferred = Rf_isNull(s_type);
   std::shared_ptr<arrow::DataType> type;
-  if (type_infered) {
+  if (type_inferred) {
     type = arrow::r::InferType(x);
   } else {
     type = arrow::r::extract<arrow::DataType>(s_type);
   }
 
-  return arrow::r::Array__from_vector(x, type, type_infered);
+  return arrow::r::Array__from_vector(x, type, type_inferred);
 }
 
 // [[arrow::export]]
@@ -1084,12 +1084,12 @@ std::shared_ptr<arrow::ChunkedArray> ChunkedArray__from_list(Rcpp::List chunks,
   std::vector<std::shared_ptr<arrow::Array>> vec;
 
   // the type might be NULL, in which case we need to infer it from the data
-  // we keep track of whether it was infered or supplied
-  bool type_infered = Rf_isNull(s_type);
+  // we keep track of whether it was inferred or supplied
+  bool type_inferred = Rf_isNull(s_type);
   R_xlen_t n = XLENGTH(chunks);
 
   std::shared_ptr<arrow::DataType> type;
-  if (type_infered) {
+  if (type_inferred) {
     if (n == 0) {
       Rcpp::stop("type must be specified for empty list");
     }
@@ -1106,11 +1106,11 @@ std::shared_ptr<arrow::ChunkedArray> ChunkedArray__from_list(Rcpp::List chunks,
     vec.push_back(array);
   } else {
     // the first - might differ from the rest of the loop
-    // because we might have infered the type from the first element of the list
+    // because we might have inferred the type from the first element of the list
     //
     // this only really matters for dictionary arrays
     vec.push_back(
-        arrow::r::Array__from_vector(VECTOR_ELT(chunks, 0), type, type_infered));
+        arrow::r::Array__from_vector(VECTOR_ELT(chunks, 0), type, type_inferred));
 
     for (R_xlen_t i = 1; i < n; i++) {
       vec.push_back(arrow::r::Array__from_vector(VECTOR_ELT(chunks, i), type, false));
diff --git a/r/src/arrow_types.h b/r/src/arrow_types.h
index e40cb6e..4db3fee 100644
--- a/r/src/arrow_types.h
+++ b/r/src/arrow_types.h
@@ -253,7 +253,7 @@ namespace r {
 Status count_fields(SEXP lst, int* out);
 
 std::shared_ptr<arrow::Array> Array__from_vector(
-    SEXP x, const std::shared_ptr<arrow::DataType>& type, bool type_infered);
+    SEXP x, const std::shared_ptr<arrow::DataType>& type, bool type_inferred);
 
 template <typename T>
 std::vector<std::shared_ptr<T>> List_to_shared_ptr_vector(SEXP x) {
diff --git a/r/src/recordbatch.cpp b/r/src/recordbatch.cpp
index 74ac481..c27c918 100644
--- a/r/src/recordbatch.cpp
+++ b/r/src/recordbatch.cpp
@@ -292,7 +292,7 @@ std::shared_ptr<arrow::RecordBatch> RecordBatch__from_arrays(SEXP schema_sxp, SE
     }
   }
 
-  // generate schema from the types that have been infered
+  // generate schema from the types that have been inferred
   std::shared_ptr<arrow::Schema> schema;
 
   std::vector<std::shared_ptr<arrow::Field>> fields(num_fields);
diff --git a/r/tests/testthat/test-Array.R b/r/tests/testthat/test-Array.R
index 2992e58..9ba7738 100644
--- a/r/tests/testthat/test-Array.R
+++ b/r/tests/testthat/test-Array.R
@@ -443,7 +443,7 @@ test_that("Array$create() handles data frame -> struct arrays (ARROW-3811)", {
   expect_equivalent(a$as_vector(), df)
 })
 
-test_that("Array$create() can handle data frame with custom struct type (not infered)", {
+test_that("Array$create() can handle data frame with custom struct type (not inferred)", {
   df <- tibble::tibble(x = 1:10, y = 1:10)
   type <- struct(x = float64(), y = int16())
   a <- Array$create(df, type = type)
diff --git a/ruby/red-arrow/lib/arrow/field.rb b/ruby/red-arrow/lib/arrow/field.rb
index 599ff30..e439cb9 100644
--- a/ruby/red-arrow/lib/arrow/field.rb
+++ b/ruby/red-arrow/lib/arrow/field.rb
@@ -59,7 +59,7 @@ module Arrow
     #     There is a shortcut for convenience. If field description
     #     doesn't have `:data_type`, all keys except `:name` are
     #     processes as data type description. For example, the
-    #     following field descrptions are the same:
+    #     following field descriptions are the same:
     #
     #     ```ruby
     #     {name: "visible", data_type: {type: :boolean}}
diff --git a/rust/arrow/src/array/array.rs b/rust/arrow/src/array/array.rs
index fdf30c9..cf4ad3b 100644
--- a/rust/arrow/src/array/array.rs
+++ b/rust/arrow/src/array/array.rs
@@ -811,7 +811,7 @@ pub struct ListArray {
 }
 
 impl ListArray {
-    /// Returns an reference to the values of this list.
+    /// Returns a reference to the values of this list.
     pub fn values(&self) -> ArrayRef {
         self.values.clone()
     }
@@ -946,7 +946,7 @@ pub struct FixedSizeListArray {
 }
 
 impl FixedSizeListArray {
-    /// Returns an reference to the values of this list.
+    /// Returns a reference to the values of this list.
     pub fn values(&self) -> ArrayRef {
         self.values.clone()
     }
@@ -1745,7 +1745,7 @@ mod tests {
     #[test]
     fn test_time64_nanosecond_array_from_vec() {
         // Test building a primitive array with null values
-        // we use Int32 and Int64 as a backing array, so all Int32 and Int64 convensions
+        // we use Int32 and Int64 as a backing array, so all Int32 and Int64 conventions
         // work
 
         // 1e6:        00:00:00.001
@@ -1942,7 +1942,7 @@ mod tests {
 
     #[test]
     fn test_primitive_array_builder() {
-        // Test building an primitive array with ArrayData builder and offset
+        // Test building a primitive array with ArrayData builder and offset
         let buf = Buffer::from(&[0, 1, 2, 3, 4].to_byte_slice());
         let buf2 = buf.clone();
         let data = ArrayData::builder(DataType::Int32)
diff --git a/rust/arrow/src/array/builder.rs b/rust/arrow/src/array/builder.rs
index 868ab44..dc751e0 100644
--- a/rust/arrow/src/array/builder.rs
+++ b/rust/arrow/src/array/builder.rs
@@ -215,14 +215,14 @@ pub trait ArrayBuilder: Any {
     /// Builds the array
     fn finish(&mut self) -> ArrayRef;
 
-    /// Returns the builder as an non-mutable `Any` reference.
+    /// Returns the builder as a non-mutable `Any` reference.
     ///
     /// This is most useful when one wants to call non-mutable APIs on a specific builder
     /// type. In this case, one can first cast this into a `Any`, and then use
     /// `downcast_ref` to get a reference on the specific builder.
     fn as_any(&self) -> &Any;
 
-    /// Returns the builder as an mutable `Any` reference.
+    /// Returns the builder as a mutable `Any` reference.
     ///
     /// This is most useful when one wants to call mutable APIs on a specific builder
     /// type. In this case, one can first cast this into a `Any`, and then use
@@ -240,12 +240,12 @@ pub struct PrimitiveBuilder<T: ArrowPrimitiveType> {
 }
 
 impl<T: ArrowPrimitiveType> ArrayBuilder for PrimitiveBuilder<T> {
-    /// Returns the builder as an non-mutable `Any` reference.
+    /// Returns the builder as a non-mutable `Any` reference.
     fn as_any(&self) -> &Any {
         self
     }
 
-    /// Returns the builder as an mutable `Any` reference.
+    /// Returns the builder as a mutable `Any` reference.
     fn as_any_mut(&mut self) -> &mut Any {
         self
     }
@@ -354,12 +354,12 @@ impl<T: ArrayBuilder> ArrayBuilder for ListBuilder<T>
 where
     T: 'static,
 {
-    /// Returns the builder as an non-mutable `Any` reference.
+    /// Returns the builder as a non-mutable `Any` reference.
     fn as_any(&self) -> &Any {
         self
     }
 
-    /// Returns the builder as an mutable `Any` reference.
+    /// Returns the builder as a mutable `Any` reference.
     fn as_any_mut(&mut self) -> &mut Any {
         self
     }
@@ -455,12 +455,12 @@ impl<T: ArrayBuilder> ArrayBuilder for FixedSizeListBuilder<T>
 where
     T: 'static,
 {
-    /// Returns the builder as an non-mutable `Any` reference.
+    /// Returns the builder as a non-mutable `Any` reference.
     fn as_any(&self) -> &Any {
         self
     }
 
-    /// Returns the builder as an mutable `Any` reference.
+    /// Returns the builder as a mutable `Any` reference.
     fn as_any_mut(&mut self) -> &mut Any {
         self
     }
@@ -559,12 +559,12 @@ impl BinaryArrayBuilder for StringBuilder {}
 impl BinaryArrayBuilder for FixedSizeBinaryBuilder {}
 
 impl ArrayBuilder for BinaryBuilder {
-    /// Returns the builder as an non-mutable `Any` reference.
+    /// Returns the builder as a non-mutable `Any` reference.
     fn as_any(&self) -> &Any {
         self
     }
 
-    /// Returns the builder as an mutable `Any` reference.
+    /// Returns the builder as a mutable `Any` reference.
     fn as_any_mut(&mut self) -> &mut Any {
         self
     }
@@ -586,12 +586,12 @@ impl ArrayBuilder for BinaryBuilder {
 }
 
 impl ArrayBuilder for StringBuilder {
-    /// Returns the builder as an non-mutable `Any` reference.
+    /// Returns the builder as a non-mutable `Any` reference.
     fn as_any(&self) -> &Any {
         self
     }
 
-    /// Returns the builder as an mutable `Any` reference.
+    /// Returns the builder as a mutable `Any` reference.
     fn as_any_mut(&mut self) -> &mut Any {
         self
     }
@@ -613,12 +613,12 @@ impl ArrayBuilder for StringBuilder {
 }
 
 impl ArrayBuilder for FixedSizeBinaryBuilder {
-    /// Returns the builder as an non-mutable `Any` reference.
+    /// Returns the builder as a non-mutable `Any` reference.
     fn as_any(&self) -> &Any {
         self
     }
 
-    /// Returns the builder as an mutable `Any` reference.
+    /// Returns the builder as a mutable `Any` reference.
     fn as_any_mut(&mut self) -> &mut Any {
         self
     }
@@ -784,7 +784,7 @@ impl ArrayBuilder for StructBuilder {
         Arc::new(self.finish())
     }
 
-    /// Returns the builder as an non-mutable `Any` reference.
+    /// Returns the builder as a non-mutable `Any` reference.
     ///
     /// This is most useful when one wants to call non-mutable APIs on a specific builder
     /// type. In this case, one can first cast this into a `Any`, and then use
@@ -793,7 +793,7 @@ impl ArrayBuilder for StructBuilder {
         self
     }
 
-    /// Returns the builder as an mutable `Any` reference.
+    /// Returns the builder as a mutable `Any` reference.
     ///
     /// This is most useful when one wants to call mutable APIs on a specific builder
     /// type. In this case, one can first cast this into a `Any`, and then use
diff --git a/rust/arrow/src/ipc/gen/SparseTensor.rs b/rust/arrow/src/ipc/gen/SparseTensor.rs
index 1705901..8c04c6c 100644
--- a/rust/arrow/src/ipc/gen/SparseTensor.rs
+++ b/rust/arrow/src/ipc/gen/SparseTensor.rs
@@ -89,7 +89,7 @@ pub enum SparseTensorIndexCOOOffset {}
 
 /// ----------------------------------------------------------------------
 /// EXPERIMENTAL: Data structures for sparse tensors
-/// Coodinate (COO) format of sparse tensor index.
+/// Coordinate (COO) format of sparse tensor index.
 ///
 /// COO's index list are represented as a NxM matrix,
 /// where N is the number of non-zero values,
diff --git a/rust/arrow/src/ipc/reader.rs b/rust/arrow/src/ipc/reader.rs
index 511a982..db28455 100644
--- a/rust/arrow/src/ipc/reader.rs
+++ b/rust/arrow/src/ipc/reader.rs
@@ -310,7 +310,7 @@ fn create_primitive_array(
     make_array(array_data)
 }
 
-/// Reads the correct number of buffers based on list type an null_count, and creates a
+/// Reads the correct number of buffers based on list type and null_count, and creates a
 /// list array ref
 fn create_list_array(
     field_node: &ipc::FieldNode,
diff --git a/rust/arrow/src/json/reader.rs b/rust/arrow/src/json/reader.rs
index c2b6464..78bb6a9 100644
--- a/rust/arrow/src/json/reader.rs
+++ b/rust/arrow/src/json/reader.rs
@@ -285,7 +285,7 @@ fn infer_json_schema(file: File, max_read_records: Option<usize>) -> Result<Arc<
                                 Ok(())
                             }
                             Value::Object(_) => Err(ArrowError::JsonError(
-                                "Reading nested JSON structes currently not supported"
+                                "Reading nested JSON structs currently not supported"
                                     .to_string(),
                             )),
                         }
@@ -1047,7 +1047,7 @@ mod tests {
             List(Box::new(Int64)),
             coerce_data_type(vec![&Int64, &List(Box::new(Int64))]).unwrap()
         );
-        // boolean an number are incompatible, return utf8
+        // boolean and number are incompatible, return utf8
         assert_eq!(
             List(Box::new(Utf8)),
             coerce_data_type(vec![&Boolean, &List(Box::new(Float64))]).unwrap()
diff --git a/rust/datafusion/src/execution/physical_plan/mod.rs b/rust/datafusion/src/execution/physical_plan/mod.rs
index 44095f0..1b12268 100644
--- a/rust/datafusion/src/execution/physical_plan/mod.rs
+++ b/rust/datafusion/src/execution/physical_plan/mod.rs
@@ -65,7 +65,7 @@ pub trait AggregateExpr: Send + Sync {
     fn name(&self) -> String;
     /// Get the data type of this expression, given the schema of the input
     fn data_type(&self, input_schema: &Schema) -> Result<DataType>;
-    /// Evaluate the expressioon being aggregated
+    /// Evaluate the expression being aggregated
     fn evaluate_input(&self, batch: &RecordBatch) -> Result<ArrayRef>;
     /// Create an accumulator for this aggregate expression
     fn create_accumulator(&self) -> Rc<RefCell<dyn Accumulator>>;
diff --git a/rust/parquet/src/column/writer.rs b/rust/parquet/src/column/writer.rs
index f499179..d8f037b 100644
--- a/rust/parquet/src/column/writer.rs
+++ b/rust/parquet/src/column/writer.rs
@@ -1426,7 +1426,7 @@ mod tests {
     #[test]
     fn test_column_writer_add_data_pages_with_dict() {
         // ARROW-5129: Test verifies that we add data page in case of dictionary encoding
-        // and no fallback occured so far.
+        // and no fallback occurred so far.
         let file = get_temp_file("test_column_writer_add_data_pages_with_dict", &[]);
         let sink = FileSink::new(&file);
         let page_writer = Box::new(SerializedPageWriter::new(sink));
diff --git a/rust/parquet/src/file/properties.rs b/rust/parquet/src/file/properties.rs
index 54f093d..7ebf693 100644
--- a/rust/parquet/src/file/properties.rs
+++ b/rust/parquet/src/file/properties.rs
@@ -252,7 +252,7 @@ impl WriterPropertiesBuilder {
     }
 
     // ----------------------------------------------------------------------
-    // Writer properies related to a file
+    // Writer properties related to a file
 
     /// Sets writer version.
     pub fn set_writer_version(mut self, value: WriterVersion) -> Self {
@@ -299,7 +299,7 @@ impl WriterPropertiesBuilder {
     /// columns. In case when dictionary is enabled for any column, this value is
     /// considered to be a fallback encoding for that column.
     ///
-    /// Panics if user tries to set dictionary encoding here, regardless of dictinoary
+    /// Panics if user tries to set dictionary encoding here, regardless of dictionary
     /// encoding flag being set.
     pub fn set_encoding(mut self, value: Encoding) -> Self {
         self.default_column_properties.set_encoding(value);
@@ -354,7 +354,7 @@ impl WriterPropertiesBuilder {
     /// global defaults or explicitly, this value is considered to be a fallback
     /// encoding for this column.
     ///
-    /// Panics if user tries to set dictionary encoding here, regardless of dictinoary
+    /// Panics if user tries to set dictionary encoding here, regardless of dictionary
     /// encoding flag being set.
     pub fn set_column_encoding(mut self, col: ColumnPath, value: Encoding) -> Self {
         self.get_mut_props(col).set_encoding(value);
@@ -425,7 +425,7 @@ impl ColumnProperties {
     /// In case when dictionary is enabled for a column, this value is considered to
     /// be a fallback encoding.
     ///
-    /// Panics if user tries to set dictionary encoding here, regardless of dictinoary
+    /// Panics if user tries to set dictionary encoding here, regardless of dictionary
     /// encoding flag being set. Use `set_dictionary_enabled` method to enable dictionary
     /// for a column.
     fn set_encoding(&mut self, value: Encoding) {
diff --git a/rust/parquet/src/record/api.rs b/rust/parquet/src/record/api.rs
index 1975e37..ccd66c1 100644
--- a/rust/parquet/src/record/api.rs
+++ b/rust/parquet/src/record/api.rs
@@ -285,7 +285,7 @@ impl ListAccessor for List {
     list_complex_accessor!(get_map, MapInternal, Map);
 }
 
-/// `Map` represents a map which contains an list of key->value pairs.
+/// `Map` represents a map which contains a list of key->value pairs.
 #[derive(Clone, Debug, PartialEq)]
 pub struct Map {
     entries: Vec<(Field, Field)>,
diff --git a/rust/parquet/src/util/bit_util.rs b/rust/parquet/src/util/bit_util.rs
index 4305675..70457bb 100644
--- a/rust/parquet/src/util/bit_util.rs
+++ b/rust/parquet/src/util/bit_util.rs
@@ -516,7 +516,7 @@ impl BitReader {
                     self.byte_offset += 4 * num_bits;
                     for n in 0..32 {
                         // We need to copy from smaller size to bigger size to avoid
-                        // overwritting other memory regions.
+                        // overwriting other memory regions.
                         if size_of::<T>() > size_of::<u32>() {
                             ::std::ptr::copy_nonoverlapping(
                                 out_buf[n..].as_ptr() as *const u32,