You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by al...@apache.org on 2023/06/08 10:38:23 UTC

[arrow-datafusion] branch main updated: make page filter public (#6523)

This is an automated email from the ASF dual-hosted git repository.

alamb pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow-datafusion.git


The following commit(s) were added to refs/heads/main by this push:
     new 4973c7c238 make page filter public (#6523)
4973c7c238 is described below

commit 4973c7c238b16088f768948ddb3725765e1f205b
Author: Jiacai Liu <de...@liujiacai.net>
AuthorDate: Thu Jun 8 18:38:17 2023 +0800

    make page filter public (#6523)
    
    * make page_filter public
    
    * make parquet public
    
    * fix CI
---
 datafusion/core/src/datasource/physical_plan/mod.rs                 | 2 +-
 datafusion/core/src/datasource/physical_plan/parquet.rs             | 4 +++-
 datafusion/core/src/datasource/physical_plan/parquet/page_filter.rs | 2 +-
 3 files changed, 5 insertions(+), 3 deletions(-)

diff --git a/datafusion/core/src/datasource/physical_plan/mod.rs b/datafusion/core/src/datasource/physical_plan/mod.rs
index fcb118569f..54b916788c 100644
--- a/datafusion/core/src/datasource/physical_plan/mod.rs
+++ b/datafusion/core/src/datasource/physical_plan/mod.rs
@@ -24,7 +24,7 @@ mod chunked_store;
 mod csv;
 mod file_stream;
 mod json;
-mod parquet;
+pub mod parquet;
 
 pub(crate) use self::csv::plan_to_csv;
 pub use self::csv::{CsvConfig, CsvExec, CsvOpener};
diff --git a/datafusion/core/src/datasource/physical_plan/parquet.rs b/datafusion/core/src/datasource/physical_plan/parquet.rs
index 383a2066fc..48e4d49371 100644
--- a/datafusion/core/src/datasource/physical_plan/parquet.rs
+++ b/datafusion/core/src/datasource/physical_plan/parquet.rs
@@ -65,7 +65,7 @@ use parquet::file::{metadata::ParquetMetaData, properties::WriterProperties};
 use parquet::schema::types::ColumnDescriptor;
 
 mod metrics;
-mod page_filter;
+pub mod page_filter;
 mod row_filter;
 mod row_groups;
 
@@ -612,12 +612,14 @@ pub trait ParquetFileReaderFactory: Debug + Send + Sync + 'static {
     ) -> Result<Box<dyn AsyncFileReader + Send>>;
 }
 
+/// Default parquet reader factory.
 #[derive(Debug)]
 pub struct DefaultParquetFileReaderFactory {
     store: Arc<dyn ObjectStore>,
 }
 
 impl DefaultParquetFileReaderFactory {
+    /// Create a factory.
     pub fn new(store: Arc<dyn ObjectStore>) -> Self {
         Self { store }
     }
diff --git a/datafusion/core/src/datasource/physical_plan/parquet/page_filter.rs b/datafusion/core/src/datasource/physical_plan/parquet/page_filter.rs
index c046de73d7..e5c1d8feb0 100644
--- a/datafusion/core/src/datasource/physical_plan/parquet/page_filter.rs
+++ b/datafusion/core/src/datasource/physical_plan/parquet/page_filter.rs
@@ -101,7 +101,7 @@ use super::metrics::ParquetFileMetrics;
 /// So we can entirely skip rows 0->199 and 250->299 as we know they
 /// can not contain rows that match the predicate.
 #[derive(Debug)]
-pub(crate) struct PagePruningPredicate {
+pub struct PagePruningPredicate {
     predicates: Vec<PruningPredicate>,
 }