You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by we...@apache.org on 2018/07/24 03:24:10 UTC
[arrow] branch master updated: ARROW-2902: [Python] Clean up after
build artifacts created by root docker user in HDFS integration test
This is an automated email from the ASF dual-hosted git repository.
wesm pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow.git
The following commit(s) were added to refs/heads/master by this push:
new b0ac721 ARROW-2902: [Python] Clean up after build artifacts created by root docker user in HDFS integration test
b0ac721 is described below
commit b0ac721d29389f7e91ef53388fab3fad592b65be
Author: Wes McKinney <we...@apache.org>
AuthorDate: Mon Jul 23 23:24:05 2018 -0400
ARROW-2902: [Python] Clean up after build artifacts created by root docker user in HDFS integration test
Author: Wes McKinney <we...@apache.org>
Closes #2313 from wesm/ARROW-2902 and squashes the following commits:
b22f0bd1 <Wes McKinney> Clean up after build artifacts created by root docker user
---
dev/hdfs_integration/hdfs_integration.sh | 40 +++++++++++++++++++++-----------
1 file changed, 27 insertions(+), 13 deletions(-)
diff --git a/dev/hdfs_integration/hdfs_integration.sh b/dev/hdfs_integration/hdfs_integration.sh
index c67f18d..a73867d 100755
--- a/dev/hdfs_integration/hdfs_integration.sh
+++ b/dev/hdfs_integration/hdfs_integration.sh
@@ -41,9 +41,26 @@ export CXXFLAGS="-D_GLIBCXX_USE_CXX11_ABI=0"
export PYARROW_CXXFLAGS=$CXXFLAGS
export PYARROW_CMAKE_GENERATOR=Ninja
+_PWD=`pwd`
+ARROW_CPP_BUILD_DIR=$_PWD/arrow/cpp/hdfs-integration-build
+PARQUET_CPP_BUILD_DIR=$_PWD/parquet-cpp/hdfs-integration-build
+
+# Run tests
+export LIBHDFS3_CONF=$_PWD/arrow/dev/hdfs_integration/libhdfs3-client-config.xml
+
+function cleanup {
+ rm -rf $ARROW_CPP_BUILD_DIR
+ rm -rf $PARQUET_CPP_BUILD_DIR
+ pushd $_PWD/arrow/python
+ git clean -fdx .
+ popd
+}
+
+trap cleanup EXIT
+
# Install arrow-cpp
-mkdir -p arrow/cpp/hdfs-integration-build
-pushd arrow/cpp/hdfs-integration-build
+mkdir -p $ARROW_CPP_BUILD_DIR
+pushd $ARROW_CPP_BUILD_DIR
cmake -GNinja \
-DCMAKE_BUILD_TYPE=$ARROW_BUILD_TYPE \
@@ -57,18 +74,21 @@ cmake -GNinja \
ninja
ninja install
+# Run C++ unit tests
+debug/io-hdfs-test
+
popd
# Install parquet-cpp
-mkdir -p parquet-cpp/hdfs-integration-build
-pushd parquet-cpp/hdfs-integration-build
+mkdir -p $PARQUET_CPP_BUILD_DIR
+pushd $PARQUET_CPP_BUILD_DIR
cmake -GNinja \
-DCMAKE_BUILD_TYPE=$ARROW_BUILD_TYPE \
-DCMAKE_INSTALL_PREFIX=$PARQUET_HOME \
-DPARQUET_BUILD_BENCHMARKS=OFF \
-DPARQUET_BUILD_EXECUTABLES=OFF \
- -DPARQUET_BUILD_TESTS=ON \
+ -DPARQUET_BUILD_TESTS=OFF \
-DCMAKE_CXX_FLAGS=$CXXFLAGS \
..
ninja
@@ -88,14 +108,8 @@ python setup.py build_ext \
--with-plasma \
--inplace
-popd
-
-# Run tests
-export LIBHDFS3_CONF=arrow/dev/hdfs_integration/libhdfs3-client-config.xml
-
# Python
-python -m pytest -vv -r sxX -s arrow/python/pyarrow \
+python -m pytest -vv -r sxX -s pyarrow \
--only-parquet --only-hdfs
-# C++
-arrow/cpp/hdfs-integration-build/debug/io-hdfs-test
+popd