You are viewing a plain text version of this content. The canonical link for it is here.
Posted to github@arrow.apache.org by GitBox <gi...@apache.org> on 2020/08/19 18:51:56 UTC

[GitHub] [arrow] jorgecarleitao commented on a change in pull request #8009: ARROW-9790: [Rust][Parquet]: Increase test coverage in arrow_reader.rs

jorgecarleitao commented on a change in pull request #8009:
URL: https://github.com/apache/arrow/pull/8009#discussion_r473249646



##########
File path: rust/parquet/src/arrow/arrow_reader.rs
##########
@@ -348,30 +348,101 @@ mod tests {
         ";
 
         let converter = Utf8ArrayConverter {};
-        single_column_reader_test::<
+        run_single_column_reader_tests::<
             ByteArrayType,
             StringArray,
             Utf8ArrayConverter,
             RandUtf8Gen,
-        >(2, 100, 2, message_type, 15, 50, converter);
+        >(2, message_type, &converter);
     }
 
-    fn single_column_reader_test<T, A, C, G>(
+    /// Parameters for single_column_reader_test
+    #[derive(Debug)]
+    struct TestOptions {
+        /// Number of row group to write to parquet (row group size =
+        /// num_row_groups / num_rows)
         num_row_groups: usize,
+        /// Total number of rows
         num_rows: usize,
-        rand_max: i32,
-        message_type: &str,
+        /// Size of batches to read back
         record_batch_size: usize,
+        /// Total number of batches to attempt to read.
+        /// `record_batch_size` * `num_iterations` should be greater
+        /// than `num_rows` to ensure the data can be read back completely
         num_iterations: usize,
-        converter: C,
+    }
+
+    impl TestOptions {
+        fn new(
+            num_row_groups: usize,
+            num_rows: usize,
+            record_batch_size: usize,
+            num_iterations: usize,
+        ) -> Self {
+            TestOptions {
+                num_row_groups,
+                num_rows,
+                record_batch_size,
+                num_iterations,
+            }
+        }
+    }
+
+    /// Create a parquet file and then read it using
+    /// `ParquetFileArrowReader` using a standard set of parameters
+    /// `opts`.
+    ///
+    /// `rand_max` represents the maximum size of value to pass to to
+    /// value generator
+    fn run_single_column_reader_tests<T, A, C, G>(
+        rand_max: i32,
+        message_type: &str,
+        converter: &C,
     ) where
         T: DataType,
         G: RandGen<T>,
         A: PartialEq + Array + 'static,
         C: Converter<Vec<Option<T::T>>, A> + 'static,
     {
-        let values: Vec<Vec<T::T>> = (0..num_row_groups)
-            .map(|_| G::gen_vec(rand_max, num_rows))
+        let all_options = vec![
+            TestOptions::new(2, 100, 15, 50),
+            // batch size (5) so batches to fall on row group
+            // boundaries (25 rows in 3 row groups --> row groups of
+            // 10, 10, and 5) to test edge refilling edge cases.
+            TestOptions::new(3, 25, 5, 50),

Review comment:
       Since you felt the need to add a comment explaining the values, a tiny suggestion is to declare all members of `TestOptions` as `pub` and use 
   
   ```
   TestOptions {
       num_row_groups: 3,
       num_rows: 25,
       record_batch_size: 5,
       num_iterations: 50,
   }
   ```
   instead, to increase readability of the values' meaning.
   




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org