You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by al...@apache.org on 2023/06/11 14:42:33 UTC
[arrow-datafusion] branch main updated: Add datafusion-cli tests to the CI Job (#6600)
This is an automated email from the ASF dual-hosted git repository.
alamb pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow-datafusion.git
The following commit(s) were added to refs/heads/main by this push:
new a2d3c3c32c Add datafusion-cli tests to the CI Job (#6600)
a2d3c3c32c is described below
commit a2d3c3c32c4ef8ef788e4f3d6e2d211f739615c3
Author: r.4ntix <r....@gmail.com>
AuthorDate: Sun Jun 11 22:42:28 2023 +0800
Add datafusion-cli tests to the CI Job (#6600)
* Add datafusion-cli tests to the CI Job
* Fix clippy error of datafusion-cli
* update tests of datafusion-cli
* fix tests error of datafusion-cli
* Splits out "cargo test datafusion-cli (amd64)" into separate CI job, fix "no space left on device" error
---
.github/workflows/rust.yml | 38 +++++++++++++++++++++++++++++++++++-
ci/scripts/rust_clippy.sh | 2 ++
ci/scripts/rust_docs.sh | 2 ++
ci/scripts/rust_fmt.sh | 2 ++
datafusion-cli/src/exec.rs | 26 ++++++++++++++----------
datafusion-cli/src/object_storage.rs | 3 +--
datafusion-cli/src/print_format.rs | 13 ++++++------
7 files changed, 66 insertions(+), 20 deletions(-)
diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml
index 7c6f468482..efc97ebaba 100644
--- a/.github/workflows/rust.yml
+++ b/.github/workflows/rust.yml
@@ -100,6 +100,33 @@ jobs:
- name: Verify Working Directory Clean
run: git diff --exit-code
+ linux-test-datafusion-cli:
+ name: cargo test datafusion-cli (amd64)
+ needs: [ linux-build-lib ]
+ runs-on: ubuntu-latest
+ container:
+ image: amd64/rust
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ submodules: true
+ - name: Cache Cargo
+ uses: actions/cache@v3
+ with:
+ path: /github/home/.cargo
+ # this key equals the ones on `linux-build-lib` for re-use
+ key: cargo-cache-
+ - name: Setup Rust toolchain
+ uses: ./.github/actions/setup-builder
+ with:
+ rust-version: stable
+ - name: Run tests (excluding doctests)
+ run: |
+ cd datafusion-cli
+ cargo test --lib --tests --bins --all-features
+ - name: Verify Working Directory Clean
+ run: git diff --exit-code
+
linux-test-example:
name: cargo examples (amd64)
needs: [ linux-build-lib ]
@@ -152,7 +179,10 @@ jobs:
rust-version: stable
# Note: this does not include dictionary_expressions to reduce codegen
- name: Run doctests
- run: cargo test --doc --features avro,json
+ run: |
+ cargo test --doc --features avro,json
+ cd datafusion-cli
+ cargo test --doc --all-features
- name: Verify Working Directory Clean
run: git diff --exit-code
@@ -173,6 +203,8 @@ jobs:
run: |
export RUSTDOCFLAGS="-D warnings -A rustdoc::private-intra-doc-links"
cargo doc --document-private-items --no-deps --workspace
+ cd datafusion-cli
+ cargo doc --document-private-items --no-deps
# verify that the benchmark queries return the correct results
verify-benchmark-results:
@@ -271,6 +303,8 @@ jobs:
run: |
export PATH=$PATH:$HOME/d/protoc/bin
cargo test --lib --tests --bins --features avro,json,dictionary_expressions
+ cd datafusion-cli
+ cargo test --lib --tests --bins --all-features
env:
# do not produce debug symbols to keep memory usage down
RUSTFLAGS: "-C debuginfo=0"
@@ -304,6 +338,8 @@ jobs:
shell: bash
run: |
cargo test --lib --tests --bins --features avro,json,dictionary_expressions
+ cd datafusion-cli
+ cargo test --lib --tests --bins --all-features
env:
# do not produce debug symbols to keep memory usage down
RUSTFLAGS: "-C debuginfo=0"
diff --git a/ci/scripts/rust_clippy.sh b/ci/scripts/rust_clippy.sh
index dfd2916981..f5c8b61e1c 100755
--- a/ci/scripts/rust_clippy.sh
+++ b/ci/scripts/rust_clippy.sh
@@ -19,3 +19,5 @@
set -ex
cargo clippy --all-targets --workspace --features avro,pyarrow -- -D warnings
+cd datafusion-cli
+cargo clippy --all-targets --all-features -- -D warnings
diff --git a/ci/scripts/rust_docs.sh b/ci/scripts/rust_docs.sh
index 033d6e890f..cf83b80b51 100755
--- a/ci/scripts/rust_docs.sh
+++ b/ci/scripts/rust_docs.sh
@@ -20,3 +20,5 @@
set -ex
export RUSTDOCFLAGS="-D warnings -A rustdoc::private-intra-doc-links"
cargo doc --document-private-items --no-deps --workspace
+cd datafusion-cli
+cargo doc --document-private-items --no-deps
diff --git a/ci/scripts/rust_fmt.sh b/ci/scripts/rust_fmt.sh
index 9d8325877a..cb9bb5e877 100755
--- a/ci/scripts/rust_fmt.sh
+++ b/ci/scripts/rust_fmt.sh
@@ -19,3 +19,5 @@
set -ex
cargo fmt --all -- --check
+cd datafusion-cli
+cargo fmt --all -- --check
diff --git a/datafusion-cli/src/exec.rs b/datafusion-cli/src/exec.rs
index 0debe240db..1ccbfef982 100644
--- a/datafusion-cli/src/exec.rs
+++ b/datafusion-cli/src/exec.rs
@@ -246,12 +246,13 @@ mod tests {
let ctx = SessionContext::new();
let plan = ctx.state().create_logical_plan(sql).await?;
- match &plan {
- LogicalPlan::Ddl(DdlStatement::CreateExternalTable(cmd)) => {
- create_external_table(&ctx, cmd).await?;
- }
- _ => unreachable!(),
- };
+ if let LogicalPlan::Ddl(DdlStatement::CreateExternalTable(cmd)) = &plan {
+ create_external_table(&ctx, cmd).await?;
+ } else {
+ return Err(DataFusionError::Plan(
+ "LogicalPlan is not a CreateExternalTable".to_string(),
+ ));
+ }
ctx.runtime_env()
.object_store(ListingTableUrl::parse(location)?)?;
@@ -312,7 +313,7 @@ mod tests {
let err = create_external_table_test(location, &sql)
.await
.unwrap_err();
- assert!(err.to_string().contains("No such file or directory"));
+ assert!(err.to_string().contains("os error 2"));
// for service_account_key
let sql = format!("CREATE EXTERNAL TABLE test STORED AS PARQUET OPTIONS('service_account_key' '{service_account_key}') LOCATION '{location}'");
@@ -327,14 +328,14 @@ mod tests {
let err = create_external_table_test(location, &sql)
.await
.unwrap_err();
- assert!(err.to_string().contains("No such file or directory"));
+ assert!(err.to_string().contains("os error 2"));
Ok(())
}
#[tokio::test]
async fn create_external_table_local_file() -> Result<()> {
- let location = "/path/to/file.parquet";
+ let location = "path/to/file.parquet";
// Ensure that local files are also registered
let sql =
@@ -342,7 +343,12 @@ mod tests {
let err = create_external_table_test(location, &sql)
.await
.unwrap_err();
- assert!(err.to_string().contains("No such file or directory"));
+
+ if let DataFusionError::IoError(e) = err {
+ assert_eq!(e.kind(), std::io::ErrorKind::NotFound);
+ } else {
+ return Err(err);
+ }
Ok(())
}
diff --git a/datafusion-cli/src/object_storage.rs b/datafusion-cli/src/object_storage.rs
index e4b7033c34..86958cc6b7 100644
--- a/datafusion-cli/src/object_storage.rs
+++ b/datafusion-cli/src/object_storage.rs
@@ -57,8 +57,7 @@ pub async fn get_s3_object_store_builder(
.ok_or_else(|| {
DataFusionError::ObjectStore(object_store::Error::Generic {
store: "S3",
- source: "Failed to get S3 credentials from environment".to_string()
- .into(),
+ source: "Failed to get S3 credentials from environment".into(),
})
})?
.clone();
diff --git a/datafusion-cli/src/print_format.rs b/datafusion-cli/src/print_format.rs
index de9e140f5c..a3953063fd 100644
--- a/datafusion-cli/src/print_format.rs
+++ b/datafusion-cli/src/print_format.rs
@@ -90,7 +90,6 @@ mod tests {
use super::*;
use arrow::array::Int32Array;
use arrow::datatypes::{DataType, Field, Schema};
- use datafusion::from_slice::FromSlice;
use std::sync::Arc;
#[test]
@@ -107,9 +106,9 @@ mod tests {
let batch = RecordBatch::try_new(
schema,
vec![
- Arc::new(Int32Array::from_slice([1, 2, 3])),
- Arc::new(Int32Array::from_slice([4, 5, 6])),
- Arc::new(Int32Array::from_slice([7, 8, 9])),
+ Arc::new(Int32Array::from(vec![1, 2, 3])),
+ Arc::new(Int32Array::from(vec![4, 5, 6])),
+ Arc::new(Int32Array::from(vec![7, 8, 9])),
],
)
.unwrap();
@@ -137,9 +136,9 @@ mod tests {
let batch = RecordBatch::try_new(
schema,
vec![
- Arc::new(Int32Array::from_slice([1, 2, 3])),
- Arc::new(Int32Array::from_slice([4, 5, 6])),
- Arc::new(Int32Array::from_slice([7, 8, 9])),
+ Arc::new(Int32Array::from(vec![1, 2, 3])),
+ Arc::new(Int32Array::from(vec![4, 5, 6])),
+ Arc::new(Int32Array::from(vec![7, 8, 9])),
],
)
.unwrap();