You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by al...@apache.org on 2022/04/10 23:22:18 UTC
[arrow-datafusion] branch maint-7.x updated: Fix clippy on 7.x branch (#2191)
This is an automated email from the ASF dual-hosted git repository.
alamb pushed a commit to branch maint-7.x
in repository https://gitbox.apache.org/repos/asf/arrow-datafusion.git
The following commit(s) were added to refs/heads/maint-7.x by this push:
new b1ef00b9f Fix clippy on 7.x branch (#2191)
b1ef00b9f is described below
commit b1ef00b9f52124040506ca5ca158c50f503a4b98
Author: Andrew Lamb <an...@nerdnetworks.org>
AuthorDate: Sun Apr 10 19:22:13 2022 -0400
Fix clippy on 7.x branch (#2191)
---
.../core/src/execution_plans/distributed_query.rs | 5 +--
.../core/src/execution_plans/shuffle_reader.rs | 4 +--
ballista/rust/scheduler/src/lib.rs | 2 +-
ballista/rust/scheduler/src/main.rs | 4 +--
datafusion/src/datasource/file_format/parquet.rs | 4 +--
datafusion/src/datasource/mod.rs | 4 +--
datafusion/src/logical_plan/extension.rs | 1 +
datafusion/src/physical_plan/hash_utils.rs | 4 +--
datafusion/src/physical_plan/tdigest/mod.rs | 36 +++++++++++-----------
9 files changed, 33 insertions(+), 31 deletions(-)
diff --git a/ballista/rust/core/src/execution_plans/distributed_query.rs b/ballista/rust/core/src/execution_plans/distributed_query.rs
index d6b3c3da9..d53df94ae 100644
--- a/ballista/rust/core/src/execution_plans/distributed_query.rs
+++ b/ballista/rust/core/src/execution_plans/distributed_query.rs
@@ -289,7 +289,8 @@ async fn fetch_partition(
BallistaClient::try_new(metadata.host.as_str(), metadata.port as u16)
.await
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?;
- Ok(ballista_client
+
+ ballista_client
.fetch_partition(
&partition_id.job_id,
partition_id.stage_id as usize,
@@ -297,5 +298,5 @@ async fn fetch_partition(
&location.path,
)
.await
- .map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?)
+ .map_err(|e| DataFusionError::Execution(format!("{:?}", e)))
}
diff --git a/ballista/rust/core/src/execution_plans/shuffle_reader.rs b/ballista/rust/core/src/execution_plans/shuffle_reader.rs
index 7482c1843..73e5aebca 100644
--- a/ballista/rust/core/src/execution_plans/shuffle_reader.rs
+++ b/ballista/rust/core/src/execution_plans/shuffle_reader.rs
@@ -212,7 +212,7 @@ async fn fetch_partition(
BallistaClient::try_new(metadata.host.as_str(), metadata.port as u16)
.await
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?;
- Ok(ballista_client
+ ballista_client
.fetch_partition(
&partition_id.job_id,
partition_id.stage_id as usize,
@@ -220,7 +220,7 @@ async fn fetch_partition(
&location.path,
)
.await
- .map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?)
+ .map_err(|e| DataFusionError::Execution(format!("{:?}", e)))
}
#[cfg(test)]
diff --git a/ballista/rust/scheduler/src/lib.rs b/ballista/rust/scheduler/src/lib.rs
index 3459cce9a..92de88ae7 100644
--- a/ballista/rust/scheduler/src/lib.rs
+++ b/ballista/rust/scheduler/src/lib.rs
@@ -232,7 +232,7 @@ impl<T: 'static + AsLogicalPlan, U: 'static + AsExecutionPlan> SchedulerServer<T
async fn fetch_tasks(
&self,
- available_executors: &mut Vec<ExecutorData>,
+ available_executors: &mut [ExecutorData],
job_id: &str,
) -> Result<(Vec<Vec<TaskDefinition>>, usize), BallistaError> {
let mut ret: Vec<Vec<TaskDefinition>> =
diff --git a/ballista/rust/scheduler/src/main.rs b/ballista/rust/scheduler/src/main.rs
index c9066027e..cb3762720 100644
--- a/ballista/rust/scheduler/src/main.rs
+++ b/ballista/rust/scheduler/src/main.rs
@@ -106,7 +106,7 @@ async fn start_server(
),
};
- Ok(Server::bind(&addr)
+ Server::bind(&addr)
.serve(make_service_fn(move |request: &AddrStream| {
let scheduler_grpc_server =
SchedulerGrpcServer::new(scheduler_server.clone());
@@ -145,7 +145,7 @@ async fn start_server(
))
}))
.await
- .context("Could not start grpc server")?)
+ .context("Could not start grpc server")
}
#[tokio::main]
diff --git a/datafusion/src/datasource/file_format/parquet.rs b/datafusion/src/datasource/file_format/parquet.rs
index 4afb2f54c..d1d26e2c6 100644
--- a/datafusion/src/datasource/file_format/parquet.rs
+++ b/datafusion/src/datasource/file_format/parquet.rs
@@ -122,8 +122,8 @@ impl FileFormat for ParquetFormat {
}
fn summarize_min_max(
- max_values: &mut Vec<Option<MaxAccumulator>>,
- min_values: &mut Vec<Option<MinAccumulator>>,
+ max_values: &mut [Option<MaxAccumulator>],
+ min_values: &mut [Option<MinAccumulator>],
fields: &[Field],
i: usize,
stat: &ParquetStatistics,
diff --git a/datafusion/src/datasource/mod.rs b/datafusion/src/datasource/mod.rs
index 33512b40c..9a7b17d1a 100644
--- a/datafusion/src/datasource/mod.rs
+++ b/datafusion/src/datasource/mod.rs
@@ -177,8 +177,8 @@ fn create_max_min_accs(
fn get_col_stats(
schema: &Schema,
null_counts: Vec<usize>,
- max_values: &mut Vec<Option<MaxAccumulator>>,
- min_values: &mut Vec<Option<MinAccumulator>>,
+ max_values: &mut [Option<MaxAccumulator>],
+ min_values: &mut [Option<MinAccumulator>],
) -> Vec<ColumnStatistics> {
(0..schema.fields().len())
.map(|i| {
diff --git a/datafusion/src/logical_plan/extension.rs b/datafusion/src/logical_plan/extension.rs
index 43bf96ffb..ee19ad43e 100644
--- a/datafusion/src/logical_plan/extension.rs
+++ b/datafusion/src/logical_plan/extension.rs
@@ -71,6 +71,7 @@ pub trait UserDefinedLogicalNode: fmt::Debug {
/// of self.inputs and self.exprs.
///
/// So, `self.from_template(exprs, ..).expressions() == exprs
+ #[allow(clippy::wrong_self_convention)]
fn from_template(
&self,
exprs: &[Expr],
diff --git a/datafusion/src/physical_plan/hash_utils.rs b/datafusion/src/physical_plan/hash_utils.rs
index 27a5376cf..00073a659 100644
--- a/datafusion/src/physical_plan/hash_utils.rs
+++ b/datafusion/src/physical_plan/hash_utils.rs
@@ -42,7 +42,7 @@ fn combine_hashes(l: u64, r: u64) -> u64 {
fn hash_decimal128<'a>(
array: &ArrayRef,
random_state: &RandomState,
- hashes_buffer: &'a mut Vec<u64>,
+ hashes_buffer: &'a mut [u64],
mul_col: bool,
) {
let array = array.as_any().downcast_ref::<DecimalArray>().unwrap();
@@ -207,7 +207,7 @@ macro_rules! hash_array_float {
fn create_hashes_dictionary<K: ArrowDictionaryKeyType>(
array: &ArrayRef,
random_state: &RandomState,
- hashes_buffer: &mut Vec<u64>,
+ hashes_buffer: &mut [u64],
multi_col: bool,
) -> Result<()> {
let dict_array = array.as_any().downcast_ref::<DictionaryArray<K>>().unwrap();
diff --git a/datafusion/src/physical_plan/tdigest/mod.rs b/datafusion/src/physical_plan/tdigest/mod.rs
index 5bd8b9e35..4268bcf03 100644
--- a/datafusion/src/physical_plan/tdigest/mod.rs
+++ b/datafusion/src/physical_plan/tdigest/mod.rs
@@ -359,7 +359,7 @@ impl TDigest {
}
fn external_merge(
- centroids: &mut Vec<Centroid>,
+ centroids: &mut [Centroid],
first: usize,
middle: usize,
last: usize,
@@ -582,23 +582,23 @@ impl TDigest {
/// ┌────────┬────────┬────────┬───────┬────────┬────────┐
/// │max_size│ sum │ count │ max │ min │centroid│
/// └────────┴────────┴────────┴───────┴────────┴────────┘
- /// │
- /// ┌─────────────────────┘
- /// ▼
- /// ┌ List ───┐
- /// │┌ ─ ─ ─ ┐│
- /// │ mean │
- /// │├ ─ ─ ─ ┼│─ ─ Centroid 1
- /// │ weight │
- /// │└ ─ ─ ─ ┘│
- /// │ │
- /// │┌ ─ ─ ─ ┐│
- /// │ mean │
- /// │├ ─ ─ ─ ┼│─ ─ Centroid 2
- /// │ weight │
- /// │└ ─ ─ ─ ┘│
- /// │ │
- /// ...
+ /// │
+ /// ┌─────────────────────┘
+ /// ▼
+ /// ┌ List ───┐
+ /// │┌ ─ ─ ─ ┐│
+ /// │ mean │
+ /// │├ ─ ─ ─ ┼│─ ─ Centroid 1
+ /// │ weight │
+ /// │└ ─ ─ ─ ┘│
+ /// │ │
+ /// │┌ ─ ─ ─ ┐│
+ /// │ mean │
+ /// │├ ─ ─ ─ ┼│─ ─ Centroid 2
+ /// │ weight │
+ /// │└ ─ ─ ─ ┘│
+ /// │ │
+ /// ...
///
/// ```
///