You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by tu...@apache.org on 2022/07/04 11:01:18 UTC
[arrow-rs] branch master updated: Implements Into for T: Array (#1992)
This is an automated email from the ASF dual-hosted git repository.
tustvold pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git
The following commit(s) were added to refs/heads/master by this push:
new e4360412b Implements Into<ArrayData> for T: Array (#1992)
e4360412b is described below
commit e4360412bd5d06b7b0a92e063bc79411e2de9e38
Author: Rutvik Patel <he...@gmail.com>
AuthorDate: Mon Jul 4 16:31:13 2022 +0530
Implements Into<ArrayData> for T: Array (#1992)
* impl Into for Array types
* removes redundant methods
---
arrow/src/array/array.rs | 15 +++++++++-
arrow/src/array/array_binary.rs | 20 ++++++++++++++
arrow/src/array/array_boolean.rs | 10 +++++++
arrow/src/array/array_decimal.rs | 4 +++
arrow/src/array/array_dictionary.rs | 10 +++++++
arrow/src/array/array_list.rs | 24 +++++++++++++++-
arrow/src/array/array_map.rs | 22 +++++++++++----
arrow/src/array/array_primitive.rs | 13 ++++-----
arrow/src/array/array_string.rs | 15 +++++-----
arrow/src/array/array_struct.rs | 14 ++++++++--
arrow/src/array/array_union.rs | 12 +++++++-
arrow/src/array/builder/map_builder.rs | 2 +-
.../array/builder/primitive_dictionary_builder.rs | 2 +-
arrow/src/array/builder/struct_builder.rs | 2 +-
arrow/src/array/data.rs | 32 +++++++++++-----------
arrow/src/array/equal/mod.rs | 12 ++++----
arrow/src/array/null.rs | 10 +++++++
arrow/src/array/transform/mod.rs | 10 +++----
arrow/src/compute/kernels/cast.rs | 6 ++--
arrow/src/compute/kernels/take.rs | 4 +--
arrow/src/compute/util.rs | 4 +--
arrow/src/ffi.rs | 24 ++++++++--------
arrow/src/ipc/reader.rs | 22 +++++++--------
arrow/src/json/reader.rs | 20 ++++++--------
arrow/src/json/writer.rs | 8 +++---
arrow/src/record_batch.rs | 6 ++--
arrow/src/util/integration_util.rs | 2 +-
integration-testing/src/lib.rs | 12 ++++----
parquet/src/arrow/array_reader/list_array.rs | 2 +-
parquet/src/arrow/array_reader/map_array.rs | 6 ++--
parquet/src/arrow/arrow_writer/levels.rs | 22 +++++++--------
parquet/src/arrow/arrow_writer/mod.rs | 26 +++++++++---------
parquet/src/arrow/buffer/dictionary_buffer.rs | 4 +--
33 files changed, 256 insertions(+), 141 deletions(-)
diff --git a/arrow/src/array/array.rs b/arrow/src/array/array.rs
index c566ff99f..f01fa5cc9 100644
--- a/arrow/src/array/array.rs
+++ b/arrow/src/array/array.rs
@@ -60,6 +60,9 @@ pub trait Array: fmt::Debug + Send + Sync + JsonEqual {
/// Returns a reference to the underlying data of this array.
fn data(&self) -> &ArrayData;
+ /// Returns the underlying data of this array.
+ fn into_data(self) -> ArrayData;
+
/// Returns a reference-counted pointer to the underlying data of this array.
fn data_ref(&self) -> &ArrayData {
self.data()
@@ -237,6 +240,10 @@ impl Array for ArrayRef {
self.as_ref().data()
}
+ fn into_data(self) -> ArrayData {
+ self.into()
+ }
+
fn data_ref(&self) -> &ArrayData {
self.as_ref().data_ref()
}
@@ -407,6 +414,12 @@ impl From<ArrayData> for ArrayRef {
}
}
+impl From<ArrayRef> for ArrayData {
+ fn from(array: ArrayRef) -> Self {
+ array.data().clone()
+ }
+}
+
/// Creates a new empty array
///
/// ```
@@ -550,7 +563,7 @@ pub fn new_null_array(data_type: &DataType, length: usize) -> ArrayRef {
keys.null_buffer().cloned(),
0,
keys.buffers().into(),
- vec![new_empty_array(value.as_ref()).data().clone()],
+ vec![new_empty_array(value.as_ref()).into_data()],
)
})
}
diff --git a/arrow/src/array/array_binary.rs b/arrow/src/array/array_binary.rs
index 2738b0c8a..d9cad1cce 100644
--- a/arrow/src/array/array_binary.rs
+++ b/arrow/src/array/array_binary.rs
@@ -239,6 +239,10 @@ impl<OffsetSize: OffsetSizeTrait> Array for GenericBinaryArray<OffsetSize> {
fn data(&self) -> &ArrayData {
&self.data
}
+
+ fn into_data(self) -> ArrayData {
+ self.into()
+ }
}
impl<OffsetSize: OffsetSizeTrait> From<ArrayData> for GenericBinaryArray<OffsetSize> {
@@ -263,6 +267,12 @@ impl<OffsetSize: OffsetSizeTrait> From<ArrayData> for GenericBinaryArray<OffsetS
}
}
+impl<OffsetSize: OffsetSizeTrait> From<GenericBinaryArray<OffsetSize>> for ArrayData {
+ fn from(array: GenericBinaryArray<OffsetSize>) -> Self {
+ array.data
+ }
+}
+
impl<Ptr, OffsetSize: OffsetSizeTrait> FromIterator<Option<Ptr>>
for GenericBinaryArray<OffsetSize>
where
@@ -669,6 +679,12 @@ impl From<ArrayData> for FixedSizeBinaryArray {
}
}
+impl From<FixedSizeBinaryArray> for ArrayData {
+ fn from(array: FixedSizeBinaryArray) -> Self {
+ array.data
+ }
+}
+
/// Creates a `FixedSizeBinaryArray` from `FixedSizeList<u8>` array
impl From<FixedSizeListArray> for FixedSizeBinaryArray {
fn from(v: FixedSizeListArray) -> Self {
@@ -724,6 +740,10 @@ impl Array for FixedSizeBinaryArray {
fn data(&self) -> &ArrayData {
&self.data
}
+
+ fn into_data(self) -> ArrayData {
+ self.into()
+ }
}
#[cfg(test)]
diff --git a/arrow/src/array/array_boolean.rs b/arrow/src/array/array_boolean.rs
index f4e9ce28b..279db3253 100644
--- a/arrow/src/array/array_boolean.rs
+++ b/arrow/src/array/array_boolean.rs
@@ -151,6 +151,10 @@ impl Array for BooleanArray {
fn data(&self) -> &ArrayData {
&self.data
}
+
+ fn into_data(self) -> ArrayData {
+ self.into()
+ }
}
impl From<Vec<bool>> for BooleanArray {
@@ -194,6 +198,12 @@ impl From<ArrayData> for BooleanArray {
}
}
+impl From<BooleanArray> for ArrayData {
+ fn from(array: BooleanArray) -> Self {
+ array.data
+ }
+}
+
impl<'a> IntoIterator for &'a BooleanArray {
type Item = Option<bool>;
type IntoIter = BooleanIter<'a>;
diff --git a/arrow/src/array/array_decimal.rs b/arrow/src/array/array_decimal.rs
index 67c1328fe..62ff7905a 100644
--- a/arrow/src/array/array_decimal.rs
+++ b/arrow/src/array/array_decimal.rs
@@ -345,6 +345,10 @@ impl Array for DecimalArray {
fn data(&self) -> &ArrayData {
&self.data
}
+
+ fn into_data(self) -> ArrayData {
+ self.into()
+ }
}
#[cfg(test)]
diff --git a/arrow/src/array/array_dictionary.rs b/arrow/src/array/array_dictionary.rs
index d885a6bc0..8a7e05aac 100644
--- a/arrow/src/array/array_dictionary.rs
+++ b/arrow/src/array/array_dictionary.rs
@@ -257,6 +257,12 @@ impl<T: ArrowPrimitiveType> From<ArrayData> for DictionaryArray<T> {
}
}
+impl<T: ArrowPrimitiveType> From<DictionaryArray<T>> for ArrayData {
+ fn from(array: DictionaryArray<T>) -> Self {
+ array.data
+ }
+}
+
/// Constructs a `DictionaryArray` from an iterator of optional strings.
///
/// # Example:
@@ -343,6 +349,10 @@ impl<T: ArrowPrimitiveType> Array for DictionaryArray<T> {
fn data(&self) -> &ArrayData {
&self.data
}
+
+ fn into_data(self) -> ArrayData {
+ self.into()
+ }
}
impl<T: ArrowPrimitiveType> fmt::Debug for DictionaryArray<T> {
diff --git a/arrow/src/array/array_list.rs b/arrow/src/array/array_list.rs
index 36ad30715..ac37754e9 100644
--- a/arrow/src/array/array_list.rs
+++ b/arrow/src/array/array_list.rs
@@ -177,7 +177,7 @@ impl<OffsetSize: OffsetSizeTrait> GenericListArray<OffsetSize> {
let array_data = ArrayData::builder(data_type)
.len(null_buf.len())
.add_buffer(offsets.into())
- .add_child_data(values.data().clone())
+ .add_child_data(values.into_data())
.null_bit_buffer(Some(null_buf.into()));
let array_data = unsafe { array_data.build_unchecked() };
@@ -193,6 +193,14 @@ impl<OffsetSize: OffsetSizeTrait> From<ArrayData> for GenericListArray<OffsetSiz
}
}
+impl<OffsetSize: 'static + OffsetSizeTrait> From<GenericListArray<OffsetSize>>
+ for ArrayData
+{
+ fn from(array: GenericListArray<OffsetSize>) -> Self {
+ array.data
+ }
+}
+
impl<OffsetSize: OffsetSizeTrait> GenericListArray<OffsetSize> {
fn try_new_from_array_data(data: ArrayData) -> Result<Self, ArrowError> {
if data.buffers().len() != 1 {
@@ -245,6 +253,10 @@ impl<OffsetSize: 'static + OffsetSizeTrait> Array for GenericListArray<OffsetSiz
fn data(&self) -> &ArrayData {
&self.data
}
+
+ fn into_data(self) -> ArrayData {
+ self.into()
+ }
}
impl<OffsetSize: OffsetSizeTrait> fmt::Debug for GenericListArray<OffsetSize> {
@@ -434,6 +446,12 @@ impl From<ArrayData> for FixedSizeListArray {
}
}
+impl From<FixedSizeListArray> for ArrayData {
+ fn from(array: FixedSizeListArray) -> Self {
+ array.data
+ }
+}
+
impl Array for FixedSizeListArray {
fn as_any(&self) -> &dyn Any {
self
@@ -442,6 +460,10 @@ impl Array for FixedSizeListArray {
fn data(&self) -> &ArrayData {
&self.data
}
+
+ fn into_data(self) -> ArrayData {
+ self.into()
+ }
}
impl fmt::Debug for FixedSizeListArray {
diff --git a/arrow/src/array/array_map.rs b/arrow/src/array/array_map.rs
index 081362021..471d56c9c 100644
--- a/arrow/src/array/array_map.rs
+++ b/arrow/src/array/array_map.rs
@@ -107,6 +107,12 @@ impl From<ArrayData> for MapArray {
}
}
+impl From<MapArray> for ArrayData {
+ fn from(array: MapArray) -> Self {
+ array.data
+ }
+}
+
impl MapArray {
fn try_new_from_array_data(data: ArrayData) -> Result<Self, ArrowError> {
if data.buffers().len() != 1 {
@@ -188,7 +194,7 @@ impl MapArray {
let map_data = ArrayData::builder(map_data_type)
.len(entry_offsets.len() - 1)
.add_buffer(entry_offsets_buffer)
- .add_child_data(entry_struct.data().clone())
+ .add_child_data(entry_struct.into_data())
.build()?;
Ok(MapArray::from(map_data))
@@ -204,6 +210,10 @@ impl Array for MapArray {
&self.data
}
+ fn into_data(self) -> ArrayData {
+ self.into()
+ }
+
/// Returns the total number of bytes of memory occupied by the buffers owned by this [MapArray].
fn get_buffer_memory_size(&self) -> usize {
self.data.get_buffer_memory_size()
@@ -277,7 +287,7 @@ mod tests {
let map_data = ArrayData::builder(map_data_type)
.len(3)
.add_buffer(entry_offsets)
- .add_child_data(entry_struct.data().clone())
+ .add_child_data(entry_struct.into_data())
.build()
.unwrap();
MapArray::from(map_data)
@@ -323,7 +333,7 @@ mod tests {
let map_data = ArrayData::builder(map_data_type)
.len(3)
.add_buffer(entry_offsets)
- .add_child_data(entry_struct.data().clone())
+ .add_child_data(entry_struct.into_data())
.build()
.unwrap();
let map_array = MapArray::from(map_data);
@@ -345,7 +355,7 @@ mod tests {
]);
assert_eq!(
struct_array,
- StructArray::from(map_array.value(0).data().clone())
+ StructArray::from(map_array.value(0).into_data())
);
assert_eq!(
&struct_array,
@@ -454,7 +464,7 @@ mod tests {
let expected_map_data = ArrayData::builder(map_data_type)
.len(2)
.add_buffer(entry_offsets)
- .add_child_data(entry_struct.data().clone())
+ .add_child_data(entry_struct.into_data())
.build()
.unwrap();
let expected_map_array = MapArray::from(expected_map_data);
@@ -505,7 +515,7 @@ mod tests {
StructArray::from(vec![(keys_field, key_array), (values_field, value_array)]);
assert_eq!(
struct_array,
- StructArray::from(map_array.value(0).data().clone())
+ StructArray::from(map_array.value(0).into_data())
);
assert_eq!(
&struct_array,
diff --git a/arrow/src/array/array_primitive.rs b/arrow/src/array/array_primitive.rs
index d36caaca8..efac5a60c 100644
--- a/arrow/src/array/array_primitive.rs
+++ b/arrow/src/array/array_primitive.rs
@@ -166,16 +166,11 @@ impl<T: ArrowPrimitiveType> PrimitiveArray<T> {
) -> impl Iterator<Item = Option<T::Native>> + 'a {
indexes.map(|opt_index| opt_index.map(|index| self.value_unchecked(index)))
}
-
- /// Returns the backing [`ArrayData`] of this [`PrimitiveArray`]
- pub fn into_data(self) -> ArrayData {
- self.into()
- }
}
impl<T: ArrowPrimitiveType> From<PrimitiveArray<T>> for ArrayData {
- fn from(a: PrimitiveArray<T>) -> Self {
- a.data
+ fn from(array: PrimitiveArray<T>) -> Self {
+ array.data
}
}
@@ -187,6 +182,10 @@ impl<T: ArrowPrimitiveType> Array for PrimitiveArray<T> {
fn data(&self) -> &ArrayData {
&self.data
}
+
+ fn into_data(self) -> ArrayData {
+ self.into()
+ }
}
fn as_datetime<T: ArrowPrimitiveType>(v: i64) -> Option<NaiveDateTime> {
diff --git a/arrow/src/array/array_string.rs b/arrow/src/array/array_string.rs
index d00758293..b48f058cf 100644
--- a/arrow/src/array/array_string.rs
+++ b/arrow/src/array/array_string.rs
@@ -195,11 +195,6 @@ impl<OffsetSize: OffsetSizeTrait> GenericStringArray<OffsetSize> {
) -> impl Iterator<Item = Option<&str>> + 'a {
indexes.map(|opt_index| opt_index.map(|index| self.value_unchecked(index)))
}
-
- /// Returns the backing [`ArrayData`] of this [`GenericStringArray`]
- pub fn into_data(self) -> ArrayData {
- self.into()
- }
}
impl<'a, Ptr, OffsetSize: OffsetSizeTrait> FromIterator<&'a Option<Ptr>>
@@ -297,6 +292,10 @@ impl<OffsetSize: OffsetSizeTrait> Array for GenericStringArray<OffsetSize> {
fn data(&self) -> &ArrayData {
&self.data
}
+
+ fn into_data(self) -> ArrayData {
+ self.into()
+ }
}
impl<OffsetSize: OffsetSizeTrait> From<ArrayData> for GenericStringArray<OffsetSize> {
@@ -342,8 +341,8 @@ impl<OffsetSize: OffsetSizeTrait> From<Vec<String>> for GenericStringArray<Offse
}
impl<OffsetSize: OffsetSizeTrait> From<GenericStringArray<OffsetSize>> for ArrayData {
- fn from(a: GenericStringArray<OffsetSize>) -> Self {
- a.data
+ fn from(array: GenericStringArray<OffsetSize>) -> Self {
+ array.data
}
}
@@ -413,7 +412,7 @@ mod tests {
#[should_panic(expected = "[Large]StringArray expects Datatype::[Large]Utf8")]
fn test_string_array_from_int() {
let array = LargeStringArray::from(vec!["a", "b"]);
- drop(StringArray::from(array.data().clone()));
+ drop(StringArray::from(array.into_data()));
}
#[test]
diff --git a/arrow/src/array/array_struct.rs b/arrow/src/array/array_struct.rs
index 91c77c72b..a6c3146ae 100644
--- a/arrow/src/array/array_struct.rs
+++ b/arrow/src/array/array_struct.rs
@@ -118,6 +118,12 @@ impl From<ArrayData> for StructArray {
}
}
+impl From<StructArray> for ArrayData {
+ fn from(array: StructArray) -> Self {
+ array.data
+ }
+}
+
impl TryFrom<Vec<(&str, ArrayRef)>> for StructArray {
type Error = ArrowError;
@@ -196,6 +202,10 @@ impl Array for StructArray {
&self.data
}
+ fn into_data(self) -> ArrayData {
+ self.into()
+ }
+
/// Returns the length (i.e., number of elements) of this array
fn len(&self) -> usize {
self.data_ref().len()
@@ -222,7 +232,7 @@ impl From<Vec<(Field, ArrayRef)>> for StructArray {
}
let array_data = ArrayData::builder(DataType::Struct(field_types))
- .child_data(field_values.into_iter().map(|a| a.data().clone()).collect())
+ .child_data(field_values.into_iter().map(|a| a.into_data()).collect())
.len(length);
let array_data = unsafe { array_data.build_unchecked() };
Self::from(array_data)
@@ -269,7 +279,7 @@ impl From<(Vec<(Field, ArrayRef)>, Buffer)> for StructArray {
let array_data = ArrayData::builder(DataType::Struct(field_types))
.null_bit_buffer(Some(pair.1))
- .child_data(field_values.into_iter().map(|a| a.data().clone()).collect())
+ .child_data(field_values.into_iter().map(|a| a.into_data()).collect())
.len(length);
let array_data = unsafe { array_data.build_unchecked() };
Self::from(array_data)
diff --git a/arrow/src/array/array_union.rs b/arrow/src/array/array_union.rs
index 4ff0a31c6..639b82ae9 100644
--- a/arrow/src/array/array_union.rs
+++ b/arrow/src/array/array_union.rs
@@ -158,7 +158,7 @@ impl UnionArray {
mode,
))
.add_buffer(type_ids)
- .child_data(field_values.into_iter().map(|a| a.data().clone()).collect())
+ .child_data(field_values.into_iter().map(|a| a.into_data()).collect())
.len(len);
let data = match value_offsets {
@@ -303,6 +303,12 @@ impl From<ArrayData> for UnionArray {
}
}
+impl From<UnionArray> for ArrayData {
+ fn from(array: UnionArray) -> Self {
+ array.data
+ }
+}
+
impl Array for UnionArray {
fn as_any(&self) -> &dyn Any {
self
@@ -312,6 +318,10 @@ impl Array for UnionArray {
&self.data
}
+ fn into_data(self) -> ArrayData {
+ self.into()
+ }
+
/// Union types always return non null as there is no validity buffer.
/// To check validity correctly you must check the underlying vector.
fn is_null(&self, _index: usize) -> bool {
diff --git a/arrow/src/array/builder/map_builder.rs b/arrow/src/array/builder/map_builder.rs
index 30ea9ad1b..7c3021897 100644
--- a/arrow/src/array/builder/map_builder.rs
+++ b/arrow/src/array/builder/map_builder.rs
@@ -153,7 +153,7 @@ impl<K: ArrayBuilder, V: ArrayBuilder> MapBuilder<K, V> {
let array_data = ArrayData::builder(DataType::Map(map_field, false)) // TODO: support sorted keys
.len(len)
.add_buffer(offset_buffer)
- .add_child_data(struct_array.data().clone())
+ .add_child_data(struct_array.into_data())
.null_bit_buffer(Some(null_bit_buffer));
let array_data = unsafe { array_data.build_unchecked() };
diff --git a/arrow/src/array/builder/primitive_dictionary_builder.rs b/arrow/src/array/builder/primitive_dictionary_builder.rs
index 5e989f3cc..5cbd81720 100644
--- a/arrow/src/array/builder/primitive_dictionary_builder.rs
+++ b/arrow/src/array/builder/primitive_dictionary_builder.rs
@@ -19,7 +19,7 @@ use std::any::Any;
use std::collections::HashMap;
use std::sync::Arc;
-use crate::array::{ArrayRef, ArrowPrimitiveType, DictionaryArray};
+use crate::array::{Array, ArrayRef, ArrowPrimitiveType, DictionaryArray};
use crate::datatypes::{ArrowNativeType, DataType, ToByteSlice};
use crate::error::{ArrowError, Result};
diff --git a/arrow/src/array/builder/struct_builder.rs b/arrow/src/array/builder/struct_builder.rs
index e69844b71..206eb17c2 100644
--- a/arrow/src/array/builder/struct_builder.rs
+++ b/arrow/src/array/builder/struct_builder.rs
@@ -218,7 +218,7 @@ impl StructBuilder {
let mut child_data = Vec::with_capacity(self.field_builders.len());
for f in &mut self.field_builders {
let arr = f.finish();
- child_data.push(arr.data().clone());
+ child_data.push(arr.into_data());
}
let null_bit_buffer = self.bitmap_builder.finish();
diff --git a/arrow/src/array/data.rs b/arrow/src/array/data.rs
index c1d608228..ce2693395 100644
--- a/arrow/src/array/data.rs
+++ b/arrow/src/array/data.rs
@@ -1797,7 +1797,7 @@ mod tests {
Box::new(DataType::Int32),
Box::new(DataType::LargeUtf8),
);
- let child_data = string_array.data().clone();
+ let child_data = string_array.into_data();
ArrayData::try_new(data_type, 1, None, 0, vec![i32_buffer], vec![child_data])
.unwrap();
}
@@ -2055,7 +2055,7 @@ mod tests {
None,
0,
vec![],
- vec![child_array.data().clone()],
+ vec![child_array.into_data()],
)
.unwrap();
}
@@ -2074,7 +2074,7 @@ mod tests {
None,
0,
vec![],
- vec![field1.data().clone()],
+ vec![field1.into_data()],
)
.unwrap();
}
@@ -2095,7 +2095,7 @@ mod tests {
None,
0,
vec![],
- vec![field1.data().clone()],
+ vec![field1.into_data()],
)
.unwrap();
}
@@ -2260,7 +2260,7 @@ mod tests {
None,
0,
vec![keys.data().buffers[0].clone()],
- vec![values.data().clone()],
+ vec![values.into_data()],
)
.unwrap();
}
@@ -2286,7 +2286,7 @@ mod tests {
None,
0,
vec![keys.data().buffers[0].clone()],
- vec![values.data().clone()],
+ vec![values.into_data()],
)
.unwrap();
}
@@ -2311,7 +2311,7 @@ mod tests {
None,
0,
vec![keys.data().buffers[0].clone()],
- vec![values.data().clone()],
+ vec![values.into_data()],
)
.unwrap();
}
@@ -2337,7 +2337,7 @@ mod tests {
None,
0,
vec![keys.data().buffers[0].clone()],
- vec![values.data().clone()],
+ vec![values.into_data()],
)
.unwrap();
}
@@ -2360,7 +2360,7 @@ mod tests {
None,
0,
vec![offsets_buffer],
- vec![values.data().clone()],
+ vec![values.into_data()],
)
.unwrap();
}
@@ -2404,7 +2404,7 @@ mod tests {
None,
0,
vec![offsets_buffer],
- vec![values.data().clone()],
+ vec![values.into_data()],
)
.unwrap();
}
@@ -2434,7 +2434,7 @@ mod tests {
None,
0,
vec![keys.data().buffers[0].clone()],
- vec![values.data().clone()],
+ vec![values.into_data()],
)
};
@@ -2477,7 +2477,7 @@ mod tests {
None,
0,
vec![type_ids],
- vec![field1.data().clone(), field2.data().clone()],
+ vec![field1.into_data(), field2.into_data()],
)
.unwrap();
}
@@ -2508,7 +2508,7 @@ mod tests {
None,
0,
vec![type_ids],
- vec![field1.data().clone(), field2.data().clone()],
+ vec![field1.into_data(), field2.into_data()],
)
.unwrap();
}
@@ -2535,7 +2535,7 @@ mod tests {
None,
0,
vec![type_ids], // need offsets buffer here too
- vec![field1.data().clone(), field2.data().clone()],
+ vec![field1.into_data(), field2.into_data()],
)
.unwrap();
}
@@ -2565,7 +2565,7 @@ mod tests {
None,
0,
vec![type_ids, offsets],
- vec![field1.data().clone(), field2.data().clone()],
+ vec![field1.into_data(), field2.into_data()],
)
.unwrap();
}
@@ -2790,7 +2790,7 @@ mod tests {
None,
0,
vec![offsets],
- vec![values_sliced.data().clone()],
+ vec![values_sliced.into_data()],
)
};
diff --git a/arrow/src/array/equal/mod.rs b/arrow/src/array/equal/mod.rs
index c3b0bbc95..74599c2ed 100644
--- a/arrow/src/array/equal/mod.rs
+++ b/arrow/src/array/equal/mod.rs
@@ -616,7 +616,7 @@ mod tests {
builder.append(false).unwrap()
}
}
- builder.finish().data().clone()
+ builder.finish().into_data()
}
#[test]
@@ -712,7 +712,7 @@ mod tests {
))))
.len(6)
.add_buffer(Buffer::from(vec![0i32, 2, 3, 4, 6, 7, 8].to_byte_slice()))
- .add_child_data(c_values.data().clone())
+ .add_child_data(c_values.into_data())
.null_bit_buffer(Some(Buffer::from(vec![0b00001001])))
.build()
.unwrap();
@@ -734,7 +734,7 @@ mod tests {
))))
.len(6)
.add_buffer(Buffer::from(vec![0i32, 2, 3, 4, 6, 7, 8].to_byte_slice()))
- .add_child_data(d_values.data().clone())
+ .add_child_data(d_values.into_data())
.null_bit_buffer(Some(Buffer::from(vec![0b00001001])))
.build()
.unwrap();
@@ -774,7 +774,7 @@ mod tests {
builder.append_null().unwrap();
}
}
- builder.finish().data().clone()
+ builder.finish().into_data()
}
#[test]
@@ -941,7 +941,7 @@ mod tests {
builder.append(false).unwrap()
}
}
- builder.finish().data().clone()
+ builder.finish().into_data()
}
#[test]
@@ -1250,7 +1250,7 @@ mod tests {
builder.append_null().unwrap()
}
}
- builder.finish().data().clone()
+ builder.finish().into_data()
}
#[test]
diff --git a/arrow/src/array/null.rs b/arrow/src/array/null.rs
index 310c04a9c..467121f6c 100644
--- a/arrow/src/array/null.rs
+++ b/arrow/src/array/null.rs
@@ -67,6 +67,10 @@ impl Array for NullArray {
&self.data
}
+ fn into_data(self) -> ArrayData {
+ self.into()
+ }
+
/// Returns whether the element at `index` is null.
/// All elements of a `NullArray` are always null.
fn is_null(&self, _index: usize) -> bool {
@@ -106,6 +110,12 @@ impl From<ArrayData> for NullArray {
}
}
+impl From<NullArray> for ArrayData {
+ fn from(array: NullArray) -> Self {
+ array.data
+ }
+}
+
impl fmt::Debug for NullArray {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "NullArray({})", self.len())
diff --git a/arrow/src/array/transform/mod.rs b/arrow/src/array/transform/mod.rs
index 68ae7f6d4..a103c35e5 100644
--- a/arrow/src/array/transform/mod.rs
+++ b/arrow/src/array/transform/mod.rs
@@ -977,7 +977,7 @@ mod tests {
builder.append_null().unwrap()
}
}
- builder.finish().data().clone()
+ builder.finish().into_data()
}
#[test]
@@ -1235,7 +1235,7 @@ mod tests {
None,
0,
vec![list_value_offsets],
- vec![expected_int_array.data().clone()],
+ vec![expected_int_array.into_data()],
)
.unwrap();
assert_eq!(finished, expected_list_data);
@@ -1316,7 +1316,7 @@ mod tests {
Some(Buffer::from(&[0b11011011, 0b1110])),
0,
vec![list_value_offsets],
- vec![expected_int_array.data().clone()],
+ vec![expected_int_array.into_data()],
)
.unwrap();
assert_eq!(result, expected_list_data);
@@ -1500,7 +1500,7 @@ mod tests {
Some(Buffer::from(&[0b11011011, 0b1110])),
0,
vec![map_offsets],
- vec![expected_entry_array.data().clone()],
+ vec![expected_entry_array.into_data()],
)
.unwrap();
assert_eq!(result, expected_list_data);
@@ -1571,7 +1571,7 @@ mod tests {
None,
0,
vec![list_value_offsets],
- vec![expected_string_array.data().clone()],
+ vec![expected_string_array.into_data()],
)
.unwrap();
assert_eq!(result, expected_list_data);
diff --git a/arrow/src/compute/kernels/cast.rs b/arrow/src/compute/kernels/cast.rs
index 4ffb1ea5d..73288c040 100644
--- a/arrow/src/compute/kernels/cast.rs
+++ b/arrow/src/compute/kernels/cast.rs
@@ -1755,7 +1755,7 @@ fn dictionary_cast<K: ArrowDictionaryKeyType>(
.map(|bitmap| bitmap.bits),
cast_keys.data().offset(),
cast_keys.data().buffers().to_vec(),
- vec![cast_values.data().clone()],
+ vec![cast_values.into_data()],
)
};
@@ -1973,7 +1973,7 @@ fn cast_primitive_to_list<OffsetSize: OffsetSizeTrait + NumCast>(
.map(|bitmap| bitmap.bits),
0,
vec![offsets.into()],
- vec![cast_array.data().clone()],
+ vec![cast_array.into_data()],
)
};
let list_array =
@@ -2001,7 +2001,7 @@ fn cast_list_inner<OffsetSize: OffsetSizeTrait>(
array.offset(),
// reuse offset buffer
data.buffers().to_vec(),
- vec![cast_array.data().clone()],
+ vec![cast_array.into_data()],
)
};
let list = GenericListArray::<OffsetSize>::from(array_data);
diff --git a/arrow/src/compute/kernels/take.rs b/arrow/src/compute/kernels/take.rs
index 624e9ddcd..57471f459 100644
--- a/arrow/src/compute/kernels/take.rs
+++ b/arrow/src/compute/kernels/take.rs
@@ -832,7 +832,7 @@ where
.len(indices.len())
.null_bit_buffer(Some(null_buf.into()))
.offset(0)
- .add_child_data(taken.data().clone())
+ .add_child_data(taken.into_data())
.add_buffer(value_offsets);
let list_data = unsafe { list_data.build_unchecked() };
@@ -875,7 +875,7 @@ where
.len(indices.len())
.null_bit_buffer(Some(null_buf.into()))
.offset(0)
- .add_child_data(taken.data().clone());
+ .add_child_data(taken.into_data());
let list_data = unsafe { list_data.build_unchecked() };
diff --git a/arrow/src/compute/util.rs b/arrow/src/compute/util.rs
index c8e68fbeb..29a90b65c 100644
--- a/arrow/src/compute/util.rs
+++ b/arrow/src/compute/util.rs
@@ -370,7 +370,7 @@ pub(super) mod tests {
offset.push(values.len() as i64);
}
- let value_data = PrimitiveArray::<T>::from(values).data().clone();
+ let value_data = PrimitiveArray::<T>::from(values).into_data();
let (list_data_type, value_offsets) = if TypeId::of::<S>() == TypeId::of::<i32>()
{
(
@@ -439,7 +439,7 @@ pub(super) mod tests {
length,
);
- let child_data = PrimitiveArray::<T>::from(values).data().clone();
+ let child_data = PrimitiveArray::<T>::from(values).into_data();
let list_data = ArrayData::builder(list_data_type)
.len(list_len)
diff --git a/arrow/src/ffi.rs b/arrow/src/ffi.rs
index ad2062b4c..b55ca7eb5 100644
--- a/arrow/src/ffi.rs
+++ b/arrow/src/ffi.rs
@@ -53,7 +53,7 @@
//! assert_eq!(array, Int32Array::from(vec![Some(2), None, Some(6)]));
//!
//! // Simulate if raw pointers are provided by consumer
-//! let array = make_array(Int32Array::from(vec![Some(1), None, Some(3)]).data().clone());
+//! let array = make_array(Int32Array::from(vec![Some(1), None, Some(3)]).into_data());
//!
//! let out_array = Box::new(FFI_ArrowArray::empty());
//! let out_schema = Box::new(FFI_ArrowSchema::empty());
@@ -910,7 +910,7 @@ mod tests {
let array = Int32Array::from(vec![1, 2, 3]);
// export it
- let array = ArrowArray::try_from(array.data().clone())?;
+ let array = ArrowArray::try_from(array.into_data())?;
// (simulate consumer) import it
let data = ArrayData::try_from(array)?;
@@ -961,7 +961,7 @@ mod tests {
GenericStringArray::<Offset>::from(vec![Some("a"), None, Some("aaa")]);
// export it
- let array = ArrowArray::try_from(array.data().clone())?;
+ let array = ArrowArray::try_from(array.into_data())?;
// (simulate consumer) import it
let data = ArrayData::try_from(array)?;
@@ -1033,7 +1033,7 @@ mod tests {
let array = GenericListArray::<Offset>::from(list_data.clone());
// export it
- let array = ArrowArray::try_from(array.data().clone())?;
+ let array = ArrowArray::try_from(array.into_data())?;
// (simulate consumer) import it
let data = ArrayData::try_from(array)?;
@@ -1073,7 +1073,7 @@ mod tests {
let array = GenericBinaryArray::<Offset>::from(array);
// export it
- let array = ArrowArray::try_from(array.data().clone())?;
+ let array = ArrowArray::try_from(array.into_data())?;
// (simulate consumer) import it
let data = ArrayData::try_from(array)?;
@@ -1118,7 +1118,7 @@ mod tests {
let array = BooleanArray::from(vec![None, Some(true), Some(false)]);
// export it
- let array = ArrowArray::try_from(array.data().clone())?;
+ let array = ArrowArray::try_from(array.into_data())?;
// (simulate consumer) import it
let data = ArrayData::try_from(array)?;
@@ -1144,7 +1144,7 @@ mod tests {
let array = Time32MillisecondArray::from(vec![None, Some(1), Some(2)]);
// export it
- let array = ArrowArray::try_from(array.data().clone())?;
+ let array = ArrowArray::try_from(array.into_data())?;
// (simulate consumer) import it
let data = ArrayData::try_from(array)?;
@@ -1180,7 +1180,7 @@ mod tests {
let array = TimestampMillisecondArray::from(vec![None, Some(1), Some(2)]);
// export it
- let array = ArrowArray::try_from(array.data().clone())?;
+ let array = ArrowArray::try_from(array.into_data())?;
// (simulate consumer) import it
let data = ArrayData::try_from(array)?;
@@ -1223,7 +1223,7 @@ mod tests {
let array = FixedSizeBinaryArray::try_from_sparse_iter(values.into_iter())?;
// export it
- let array = ArrowArray::try_from(array.data().clone())?;
+ let array = ArrowArray::try_from(array.into_data())?;
// (simulate consumer) import it
let data = ArrayData::try_from(array)?;
@@ -1328,7 +1328,7 @@ mod tests {
let dict_array: DictionaryArray<Int8Type> = values.into_iter().collect();
// export it
- let array = ArrowArray::try_from(dict_array.data().clone())?;
+ let array = ArrowArray::try_from(dict_array.into_data())?;
// (simulate consumer) import it
let data = ArrayData::try_from(array)?;
@@ -1352,7 +1352,7 @@ mod tests {
#[test]
fn test_export_array_into_raw() -> Result<()> {
- let array = make_array(Int32Array::from(vec![1, 2, 3]).data().clone());
+ let array = make_array(Int32Array::from(vec![1, 2, 3]).into_data());
// Assume two raw pointers provided by the consumer
let out_array = Box::new(FFI_ArrowArray::empty());
@@ -1389,7 +1389,7 @@ mod tests {
let array = DurationSecondArray::from(vec![None, Some(1), Some(2)]);
// export it
- let array = ArrowArray::try_from(array.data().clone())?;
+ let array = ArrowArray::try_from(array.into_data())?;
// (simulate consumer) import it
let data = ArrayData::try_from(array)?;
diff --git a/arrow/src/ipc/reader.rs b/arrow/src/ipc/reader.rs
index da5098a5e..e8abd3a63 100644
--- a/arrow/src/ipc/reader.rs
+++ b/arrow/src/ipc/reader.rs
@@ -453,7 +453,7 @@ fn create_primitive_array(
let values = Arc::new(Int64Array::from(data)) as ArrayRef;
// this cast is infallible, the unwrap is safe
let casted = cast(&values, data_type).unwrap();
- casted.data().clone()
+ casted.into_data()
} else {
let builder = ArrayData::builder(data_type.clone())
.len(length)
@@ -477,7 +477,7 @@ fn create_primitive_array(
let values = Arc::new(Float64Array::from(data)) as ArrayRef;
// this cast is infallible, the unwrap is safe
let casted = cast(&values, data_type).unwrap();
- casted.data().clone()
+ casted.into_data()
} else {
let builder = ArrayData::builder(data_type.clone())
.len(length)
@@ -536,7 +536,7 @@ fn create_list_array(
.len(field_node.length() as usize)
.buffers(buffers[1..2].to_vec())
.offset(0)
- .child_data(vec![child_array.data().clone()])
+ .child_data(vec![child_array.into_data()])
.null_bit_buffer((null_count > 0).then(|| buffers[0].clone()));
make_array(unsafe { builder.build_unchecked() })
@@ -546,7 +546,7 @@ fn create_list_array(
.len(field_node.length() as usize)
.buffers(buffers[1..1].to_vec())
.offset(0)
- .child_data(vec![child_array.data().clone()])
+ .child_data(vec![child_array.into_data()])
.null_bit_buffer((null_count > 0).then(|| buffers[0].clone()));
make_array(unsafe { builder.build_unchecked() })
@@ -556,7 +556,7 @@ fn create_list_array(
.len(field_node.length() as usize)
.buffers(buffers[1..2].to_vec())
.offset(0)
- .child_data(vec![child_array.data().clone()])
+ .child_data(vec![child_array.into_data()])
.null_bit_buffer((null_count > 0).then(|| buffers[0].clone()));
make_array(unsafe { builder.build_unchecked() })
@@ -579,7 +579,7 @@ fn create_dictionary_array(
.len(field_node.length() as usize)
.buffers(buffers[1..2].to_vec())
.offset(0)
- .child_data(vec![value_array.data().clone()])
+ .child_data(vec![value_array.into_data()])
.null_bit_buffer((null_count > 0).then(|| buffers[0].clone()));
make_array(unsafe { builder.build_unchecked() })
@@ -1509,8 +1509,8 @@ mod tests {
Some(vec![Some(-30)]),
]));
let array9 = ArrayDataBuilder::new(schema.field(9).data_type().clone())
- .add_child_data(array9_id.data().clone())
- .add_child_data(array9_list.data().clone())
+ .add_child_data(array9_id.into_data())
+ .add_child_data(array9_list.into_data())
.len(3)
.build()
.unwrap();
@@ -1800,8 +1800,8 @@ mod tests {
false,
);
let entry_struct = StructArray::from(vec![
- (keys_field, make_array(key_dict_array.data().clone())),
- (values_field, make_array(value_dict_array.data().clone())),
+ (keys_field, make_array(key_dict_array.into_data())),
+ (values_field, make_array(value_dict_array.into_data())),
]);
let map_data_type = DataType::Map(
Box::new(Field::new(
@@ -1816,7 +1816,7 @@ mod tests {
let map_data = ArrayData::builder(map_data_type)
.len(3)
.add_buffer(entry_offsets)
- .add_child_data(entry_struct.data().clone())
+ .add_child_data(entry_struct.into_data())
.build()
.unwrap();
let map_array = MapArray::from(map_data);
diff --git a/arrow/src/json/reader.rs b/arrow/src/json/reader.rs
index 3ac2566e7..260d185da 100644
--- a/arrow/src/json/reader.rs
+++ b/arrow/src/json/reader.rs
@@ -1031,7 +1031,7 @@ impl Decoder {
});
let valid_len = cur_offset.to_usize().unwrap();
let array_data = match list_field.data_type() {
- DataType::Null => NullArray::new(valid_len).data().clone(),
+ DataType::Null => NullArray::new(valid_len).into_data(),
DataType::Boolean => {
let num_bytes = bit_util::ceil(valid_len, 8);
let mut bool_values = MutableBuffer::from_len_zeroed(num_bytes);
@@ -1103,12 +1103,12 @@ impl Decoder {
DataType::List(field) => {
let child = self
.build_nested_list_array::<i32>(&flatten_json_values(rows), field)?;
- child.data().clone()
+ child.into_data()
}
DataType::LargeList(field) => {
let child = self
.build_nested_list_array::<i64>(&flatten_json_values(rows), field)?;
- child.data().clone()
+ child.into_data()
}
DataType::Struct(fields) => {
// extract list values, with non-lists converted to Value::Null
@@ -1144,9 +1144,7 @@ impl Decoder {
ArrayDataBuilder::new(data_type)
.len(rows.len())
.null_bit_buffer(Some(buf))
- .child_data(
- arrays.into_iter().map(|a| a.data().clone()).collect(),
- )
+ .child_data(arrays.into_iter().map(|a| a.into_data()).collect())
.build_unchecked()
}
}
@@ -1353,7 +1351,7 @@ impl Decoder {
.len(len)
.null_bit_buffer(Some(null_buffer.into()))
.child_data(
- arrays.into_iter().map(|a| a.data().clone()).collect(),
+ arrays.into_iter().map(|a| a.into_data()).collect(),
);
let data = unsafe { data.build_unchecked() };
Ok(make_array(data))
@@ -1463,7 +1461,7 @@ impl Decoder {
vec![],
struct_children
.into_iter()
- .map(|array| array.data().clone())
+ .map(|array| array.into_data())
.collect(),
)],
)))
@@ -1525,7 +1523,7 @@ impl Decoder {
})
.collect::<Vec<Option<T::Native>>>();
let array = values.iter().collect::<PrimitiveArray<T>>();
- array.data().clone()
+ array.into_data()
}
}
@@ -2233,14 +2231,14 @@ mod tests {
let d = StringArray::from(vec![Some("text"), None, Some("text"), None]);
let c = ArrayDataBuilder::new(c_field.data_type().clone())
.len(4)
- .add_child_data(d.data().clone())
+ .add_child_data(d.into_data())
.null_bit_buffer(Some(Buffer::from(vec![0b00000101])))
.build()
.unwrap();
let b = BooleanArray::from(vec![Some(true), Some(false), Some(true), None]);
let a = ArrayDataBuilder::new(a_field.data_type().clone())
.len(4)
- .add_child_data(b.data().clone())
+ .add_child_data(b.into_data())
.add_child_data(c)
.null_bit_buffer(Some(Buffer::from(vec![0b00000111])))
.build()
diff --git a/arrow/src/json/writer.rs b/arrow/src/json/writer.rs
index 72a4d6252..0755a5758 100644
--- a/arrow/src/json/writer.rs
+++ b/arrow/src/json/writer.rs
@@ -1118,7 +1118,7 @@ mod tests {
let a_list_data = ArrayData::builder(field_c1.data_type().clone())
.len(5)
.add_buffer(a_value_offsets)
- .add_child_data(a_values.data().clone())
+ .add_child_data(a_values.into_data())
.null_bit_buffer(Some(Buffer::from(vec![0b00011111])))
.build()
.unwrap();
@@ -1171,7 +1171,7 @@ mod tests {
.len(3)
.add_buffer(a_value_offsets)
.null_bit_buffer(Some(Buffer::from(vec![0b00000111])))
- .add_child_data(a_values.data().clone())
+ .add_child_data(a_values.into_data())
.build()
.unwrap();
@@ -1253,7 +1253,7 @@ mod tests {
let c1_list_data = ArrayData::builder(field_c1.data_type().clone())
.len(3)
.add_buffer(c1_value_offsets)
- .add_child_data(struct_values.data().clone())
+ .add_child_data(struct_values.into_data())
.null_bit_buffer(Some(Buffer::from(vec![0b00000101])))
.build()
.unwrap();
@@ -1438,7 +1438,7 @@ mod tests {
.len(6)
.null_bit_buffer(Some(valid_buffer))
.add_buffer(entry_offsets)
- .add_child_data(entry_struct.data().clone())
+ .add_child_data(entry_struct.into_data())
.build()
.unwrap();
diff --git a/arrow/src/record_batch.rs b/arrow/src/record_batch.rs
index 3f3ec1b7c..47257b496 100644
--- a/arrow/src/record_batch.rs
+++ b/arrow/src/record_batch.rs
@@ -645,7 +645,7 @@ mod tests {
DataType::Int8,
false,
))))
- .add_child_data(a2_child.data().clone())
+ .add_child_data(a2_child.into_data())
.len(2)
.add_buffer(Buffer::from(vec![0i32, 3, 4].to_byte_slice()))
.build()
@@ -655,8 +655,8 @@ mod tests {
Field::new("aa1", DataType::Int32, false),
Field::new("a2", a2.data_type().clone(), false),
]))
- .add_child_data(a1.data().clone())
- .add_child_data(a2.data().clone())
+ .add_child_data(a1.into_data())
+ .add_child_data(a2.into_data())
.len(2)
.build()
.unwrap();
diff --git a/arrow/src/util/integration_util.rs b/arrow/src/util/integration_util.rs
index a174da6ea..ee32f0c39 100644
--- a/arrow/src/util/integration_util.rs
+++ b/arrow/src/util/integration_util.rs
@@ -944,7 +944,7 @@ mod tests {
let list_data = ArrayData::builder(list_data_type)
.len(3)
.add_buffer(value_offsets)
- .add_child_data(value_data.data().clone())
+ .add_child_data(value_data.into_data())
.build()
.unwrap();
let lists = ListArray::from(list_data);
diff --git a/integration-testing/src/lib.rs b/integration-testing/src/lib.rs
index c7796ece4..e4cc872ff 100644
--- a/integration-testing/src/lib.rs
+++ b/integration-testing/src/lib.rs
@@ -495,7 +495,7 @@ fn array_from_json(
.len(json_col.count)
.offset(0)
.add_buffer(Buffer::from(&offsets.to_byte_slice()))
- .add_child_data(child_array.data().clone())
+ .add_child_data(child_array.into_data())
.null_bit_buffer(Some(null_buf))
.build()
.unwrap();
@@ -523,7 +523,7 @@ fn array_from_json(
.len(json_col.count)
.offset(0)
.add_buffer(Buffer::from(&offsets.to_byte_slice()))
- .add_child_data(child_array.data().clone())
+ .add_child_data(child_array.into_data())
.null_bit_buffer(Some(null_buf))
.build()
.unwrap();
@@ -539,7 +539,7 @@ fn array_from_json(
let null_buf = create_null_buf(&json_col);
let list_data = ArrayData::builder(field.data_type().clone())
.len(json_col.count)
- .add_child_data(child_array.data().clone())
+ .add_child_data(child_array.into_data())
.null_bit_buffer(Some(null_buf))
.build()
.unwrap();
@@ -554,7 +554,7 @@ fn array_from_json(
for (field, col) in fields.iter().zip(json_col.children.unwrap()) {
let array = array_from_json(field, col, dictionaries)?;
- array_data = array_data.add_child_data(array.data().clone());
+ array_data = array_data.add_child_data(array.into_data());
}
let array = StructArray::from(array_data.build().unwrap());
@@ -628,7 +628,7 @@ fn array_from_json(
let array_data = ArrayData::builder(field.data_type().clone())
.len(json_col.count)
.add_buffer(Buffer::from(&offsets.to_byte_slice()))
- .add_child_data(child_array.data().clone())
+ .add_child_data(child_array.into_data())
.null_bit_buffer(Some(null_buf))
.build()
.unwrap();
@@ -718,7 +718,7 @@ fn dictionary_array_from_json(
.len(keys.len())
.add_buffer(keys.data().buffers()[0].clone())
.null_bit_buffer(Some(null_buf))
- .add_child_data(values.data().clone())
+ .add_child_data(values.into_data())
.build()
.unwrap();
diff --git a/parquet/src/arrow/array_reader/list_array.rs b/parquet/src/arrow/array_reader/list_array.rs
index ab51cd87d..e1cd71b9e 100644
--- a/parquet/src/arrow/array_reader/list_array.rs
+++ b/parquet/src/arrow/array_reader/list_array.rs
@@ -326,7 +326,7 @@ mod tests {
let l3 = ArrayDataBuilder::new(l3_type.clone())
.len(10)
.add_buffer(offsets)
- .add_child_data(leaf.data().clone())
+ .add_child_data(leaf.into_data())
.null_bit_buffer(Some(Buffer::from([0b11111101, 0b00000010])))
.build()
.unwrap();
diff --git a/parquet/src/arrow/array_reader/map_array.rs b/parquet/src/arrow/array_reader/map_array.rs
index efeafe201..92487ebbc 100644
--- a/parquet/src/arrow/array_reader/map_array.rs
+++ b/parquet/src/arrow/array_reader/map_array.rs
@@ -18,7 +18,7 @@
use crate::arrow::array_reader::ArrayReader;
use crate::errors::ParquetError::ArrowError;
use crate::errors::{ParquetError, Result};
-use arrow::array::{ArrayDataBuilder, ArrayRef, MapArray};
+use arrow::array::{Array, ArrayDataBuilder, ArrayRef, MapArray};
use arrow::buffer::{Buffer, MutableBuffer};
use arrow::datatypes::DataType as ArrowType;
use arrow::datatypes::ToByteSlice;
@@ -97,8 +97,8 @@ impl ArrayReader for MapArrayReader {
let entry_data = ArrayDataBuilder::new(entry_data_type)
.len(key_length)
- .add_child_data(key_array.data().clone())
- .add_child_data(value_array.data().clone());
+ .add_child_data(key_array.into_data())
+ .add_child_data(value_array.into_data());
let entry_data = unsafe { entry_data.build_unchecked() };
let entry_len = rep_levels.iter().filter(|level| **level == 0).count();
diff --git a/parquet/src/arrow/arrow_writer/levels.rs b/parquet/src/arrow/arrow_writer/levels.rs
index 073754262..51e494d41 100644
--- a/parquet/src/arrow/arrow_writer/levels.rs
+++ b/parquet/src/arrow/arrow_writer/levels.rs
@@ -506,7 +506,7 @@ mod tests {
let inner_list = ArrayDataBuilder::new(inner_type)
.len(4)
.add_buffer(offsets)
- .add_child_data(primitives.data().clone())
+ .add_child_data(primitives.into_data())
.build()
.unwrap();
@@ -590,7 +590,7 @@ mod tests {
let list = ArrayDataBuilder::new(list_type.clone())
.len(5)
.add_buffer(offsets)
- .add_child_data(leaf_array.data().clone())
+ .add_child_data(leaf_array.into_data())
.build()
.unwrap();
let list = make_array(list);
@@ -621,7 +621,7 @@ mod tests {
let list = ArrayDataBuilder::new(list_type.clone())
.len(5)
.add_buffer(offsets)
- .add_child_data(leaf_array.data().clone())
+ .add_child_data(leaf_array.into_data())
.null_bit_buffer(Some(Buffer::from([0b00011101])))
.build()
.unwrap();
@@ -662,7 +662,7 @@ mod tests {
let list_type = DataType::List(Box::new(leaf_field));
let list = ArrayData::builder(list_type.clone())
.len(5)
- .add_child_data(leaf.data().clone())
+ .add_child_data(leaf.into_data())
.add_buffer(Buffer::from_iter([0_i32, 2, 2, 4, 8, 11]))
.build()
.unwrap();
@@ -704,7 +704,7 @@ mod tests {
let offsets = Buffer::from_iter([0_i32, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22]);
let l1 = ArrayData::builder(l1_type.clone())
.len(11)
- .add_child_data(leaf.data().clone())
+ .add_child_data(leaf.into_data())
.add_buffer(offsets)
.build()
.unwrap();
@@ -755,7 +755,7 @@ mod tests {
let list = ArrayData::builder(list_type.clone())
.len(4)
.add_buffer(Buffer::from_iter(0_i32..5))
- .add_child_data(leaf.data().clone())
+ .add_child_data(leaf.into_data())
.build()
.unwrap();
let list = make_array(list);
@@ -782,7 +782,7 @@ mod tests {
.len(4)
.add_buffer(Buffer::from_iter([0_i32, 0, 3, 5, 7]))
.null_bit_buffer(Some(Buffer::from([0b00001110])))
- .add_child_data(leaf.data().clone())
+ .add_child_data(leaf.into_data())
.build()
.unwrap();
let list = make_array(list);
@@ -817,7 +817,7 @@ mod tests {
let list_1 = ArrayData::builder(list_1_type.clone())
.len(7)
.add_buffer(Buffer::from_iter([0_i32, 1, 3, 3, 6, 10, 10, 15]))
- .add_child_data(leaf.data().clone())
+ .add_child_data(leaf.into_data())
.build()
.unwrap();
@@ -904,7 +904,7 @@ mod tests {
.len(5)
.add_buffer(a_value_offsets)
.null_bit_buffer(Some(Buffer::from(vec![0b00011011])))
- .add_child_data(a_values.data().clone())
+ .add_child_data(a_values.into_data())
.build()
.unwrap();
@@ -977,7 +977,7 @@ mod tests {
let g_list_data = ArrayData::builder(struct_field_g.data_type().clone())
.len(5)
.add_buffer(g_value_offsets)
- .add_child_data(g_value.data().clone())
+ .add_child_data(g_value.into_data())
.build()
.unwrap();
let g = ListArray::from(g_list_data);
@@ -1352,7 +1352,7 @@ mod tests {
.len(6)
.null_bit_buffer(Some(nulls))
.add_buffer(offsets)
- .add_child_data(struct_a.data().clone())
+ .add_child_data(struct_a.into_data())
.build()
.unwrap();
diff --git a/parquet/src/arrow/arrow_writer/mod.rs b/parquet/src/arrow/arrow_writer/mod.rs
index a18098ff1..fa1428118 100644
--- a/parquet/src/arrow/arrow_writer/mod.rs
+++ b/parquet/src/arrow/arrow_writer/mod.rs
@@ -838,7 +838,7 @@ mod tests {
))))
.len(5)
.add_buffer(a_value_offsets)
- .add_child_data(a_values.data().clone())
+ .add_child_data(a_values.into_data())
.null_bit_buffer(Some(Buffer::from(vec![0b00011011])))
.build()
.unwrap();
@@ -879,7 +879,7 @@ mod tests {
))))
.len(5)
.add_buffer(a_value_offsets)
- .add_child_data(a_values.data().clone())
+ .add_child_data(a_values.into_data())
.build()
.unwrap();
let a = ListArray::from(a_list_data);
@@ -1110,14 +1110,14 @@ mod tests {
let b_data = ArrayDataBuilder::new(field_b.data_type().clone())
.len(6)
.null_bit_buffer(Some(Buffer::from(vec![0b00100111])))
- .add_child_data(c.data().clone())
+ .add_child_data(c.into_data())
.build()
.unwrap();
let b = StructArray::from(b_data);
let a_data = ArrayDataBuilder::new(field_a.data_type().clone())
.len(6)
.null_bit_buffer(Some(Buffer::from(vec![0b00101111])))
- .add_child_data(b.data().clone())
+ .add_child_data(b.into_data())
.build()
.unwrap();
let a = StructArray::from(a_data);
@@ -1143,13 +1143,13 @@ mod tests {
let c = Int32Array::from(vec![1, 2, 3, 4, 5, 6]);
let b_data = ArrayDataBuilder::new(field_b.data_type().clone())
.len(6)
- .add_child_data(c.data().clone())
+ .add_child_data(c.into_data())
.build()
.unwrap();
let b = StructArray::from(b_data);
let a_data = ArrayDataBuilder::new(field_a.data_type().clone())
.len(6)
- .add_child_data(b.data().clone())
+ .add_child_data(b.into_data())
.build()
.unwrap();
let a = StructArray::from(a_data);
@@ -1176,14 +1176,14 @@ mod tests {
let b_data = ArrayDataBuilder::new(field_b.data_type().clone())
.len(6)
.null_bit_buffer(Some(Buffer::from(vec![0b00100111])))
- .add_child_data(c.data().clone())
+ .add_child_data(c.into_data())
.build()
.unwrap();
let b = StructArray::from(b_data);
// a intentionally has no null buffer, to test that this is handled correctly
let a_data = ArrayDataBuilder::new(field_a.data_type().clone())
.len(6)
- .add_child_data(b.data().clone())
+ .add_child_data(b.into_data())
.build()
.unwrap();
let a = StructArray::from(a_data);
@@ -1559,7 +1559,7 @@ mod tests {
.len(3)
.add_buffer(a_value_offsets)
.null_bit_buffer(Some(Buffer::from(vec![0b00000101])))
- .add_child_data(a_values.data().clone())
+ .add_child_data(a_values.into_data())
.build()
.unwrap();
@@ -1590,7 +1590,7 @@ mod tests {
.len(5)
.add_buffer(a_value_offsets)
.null_bit_buffer(Some(Buffer::from(vec![0b00011011])))
- .add_child_data(a_values.data().clone())
+ .add_child_data(a_values.into_data())
.build()
.unwrap();
@@ -1614,7 +1614,7 @@ mod tests {
))))
.len(5)
.add_buffer(a_value_offsets)
- .add_child_data(a_values.data().clone())
+ .add_child_data(a_values.into_data())
.null_bit_buffer(Some(Buffer::from(vec![0b00011011])))
.build()
.unwrap();
@@ -2014,7 +2014,7 @@ mod tests {
.null_bit_buffer(Some(Buffer::from_iter(vec![
true, false, true, false, true,
])))
- .child_data(vec![struct_a_array.data().clone()])
+ .child_data(vec![struct_a_array.into_data()])
.build()
.unwrap();
@@ -2038,7 +2038,7 @@ mod tests {
let list_data = ArrayDataBuilder::new(list_a.data_type().clone())
.len(2)
.add_buffer(Buffer::from_iter(vec![0_i32, 4_i32, 5_i32]))
- .child_data(vec![struct_a_array.data().clone()])
+ .child_data(vec![struct_a_array.into_data()])
.build()
.unwrap();
diff --git a/parquet/src/arrow/buffer/dictionary_buffer.rs b/parquet/src/arrow/buffer/dictionary_buffer.rs
index ffa3a4843..b64b2946b 100644
--- a/parquet/src/arrow/buffer/dictionary_buffer.rs
+++ b/parquet/src/arrow/buffer/dictionary_buffer.rs
@@ -21,7 +21,7 @@ use crate::arrow::record_reader::buffer::{
};
use crate::column::reader::decoder::ValuesBufferSlice;
use crate::errors::{ParquetError, Result};
-use arrow::array::{make_array, ArrayDataBuilder, ArrayRef, OffsetSizeTrait};
+use arrow::array::{make_array, Array, ArrayDataBuilder, ArrayRef, OffsetSizeTrait};
use arrow::buffer::Buffer;
use arrow::datatypes::{ArrowNativeType, DataType as ArrowType};
use std::sync::Arc;
@@ -161,7 +161,7 @@ impl<K: ScalarValue + ArrowNativeType + Ord, V: ScalarValue + OffsetSizeTrait>
let builder = ArrayDataBuilder::new(data_type.clone())
.len(keys.len())
.add_buffer(keys.into())
- .add_child_data(values.data().clone())
+ .add_child_data(values.into_data())
.null_bit_buffer(null_buffer);
let data = match cfg!(debug_assertions) {