You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by ag...@apache.org on 2021/04/18 16:40:44 UTC
[arrow-datafusion] branch master updated: Remove non-Rust files
This is an automated email from the ASF dual-hosted git repository.
agrove pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-datafusion.git
The following commit(s) were added to refs/heads/master by this push:
new b184dab Remove non-Rust files
b184dab is described below
commit b184dab428ff9b6a72ccb6c97c79898f32efd09c
Author: Andy Grove <an...@gmail.com>
AuthorDate: Sun Apr 18 10:40:30 2021 -0600
Remove non-Rust files
---
appveyor.yml | 79 ---
cmake-format.py | 59 ---
docker-compose.yml | 1391 ---------------------------------------------------
run-cmake-format.py | 111 ----
4 files changed, 1640 deletions(-)
diff --git a/appveyor.yml b/appveyor.yml
deleted file mode 100644
index ace5bb9..0000000
--- a/appveyor.yml
+++ /dev/null
@@ -1,79 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Operating system (build VM template)
-os: Visual Studio 2017
-
-only_commits:
- # Skip commits not related to Python or C++
- files:
- - appveyor.yml
- - ci/appveyor*
- - ci/conda*
- - cpp/
- - format/
- - python/
-
-cache:
- - C:\Users\Appveyor\clcache1
-
-matrix:
- fast_finish: true
-
-environment:
- global:
- # Make these variables visible in all jobs and build steps
- MSVC_DEFAULT_OPTIONS: ON
- APPVEYOR_SAVE_CACHE_ON_ERROR: true
- # Change the clcache dir to reset caches everywhere when a setting
- # is changed incompatibly (e.g. CLCACHE_COMPRESS).
- CLCACHE_DIR: C:\Users\Appveyor\clcache1
- CLCACHE_SERVER: 1
- CLCACHE_COMPRESS: 1
- CLCACHE_COMPRESSLEVEL: 6
- ARROW_BUILD_FLIGHT: "OFF"
- ARROW_BUILD_GANDIVA: "OFF"
- ARROW_LLVM_VERSION: "7.0.*"
- ARROW_S3: "OFF"
- PYTHON: "3.7"
- ARCH: "64"
-
- matrix:
- # NOTE: clcache seems to work best with Ninja and worst with msbuild
- # (as generated by cmake)
- - JOB: "Build"
- GENERATOR: Ninja
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
- - JOB: "Toolchain"
- GENERATOR: Ninja
- ARROW_S3: "ON"
- ARROW_BUILD_FLIGHT: "ON"
- ARROW_BUILD_GANDIVA: "ON"
- - JOB: "Build_Debug"
- GENERATOR: Ninja
-
-before_build:
- - call ci\appveyor-cpp-setup.bat
-
-build_script:
- - call ci\appveyor-cpp-build.bat
-
-# Disable test discovery
-test: off
-
-after_build:
- - clcache -s
diff --git a/cmake-format.py b/cmake-format.py
deleted file mode 100644
index 0976642..0000000
--- a/cmake-format.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# cmake-format configuration file
-# Use run-cmake-format.py to reformat all cmake files in the source tree
-
-# How wide to allow formatted cmake files
-line_width = 90
-
-# How many spaces to tab for indent
-tab_size = 2
-
-# If arglists are longer than this, break them always
-max_subargs_per_line = 4
-
-# If true, separate flow control names from their parentheses with a space
-separate_ctrl_name_with_space = False
-
-# If true, separate function names from parentheses with a space
-separate_fn_name_with_space = False
-
-# If a statement is wrapped to more than one line, than dangle the closing
-# parenthesis on it's own line
-dangle_parens = False
-
-# What style line endings to use in the output.
-line_ending = 'unix'
-
-# Format command names consistently as 'lower' or 'upper' case
-command_case = 'lower'
-
-# Format keywords consistently as 'lower' or 'upper' case
-keyword_case = 'unchanged'
-
-# enable comment markup parsing and reflow
-enable_markup = False
-
-# If comment markup is enabled, don't reflow the first comment block in
-# eachlistfile. Use this to preserve formatting of your
-# copyright/licensestatements.
-first_comment_is_literal = False
-
-# If comment markup is enabled, don't reflow any comment block which matchesthis
-# (regex) pattern. Default is `None` (disabled).
-literal_comment_pattern = None
diff --git a/docker-compose.yml b/docker-compose.yml
deleted file mode 100644
index 4a3092e..0000000
--- a/docker-compose.yml
+++ /dev/null
@@ -1,1391 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Usage
-# -----
-#
-# The docker compose file is parametrized using environment variables, the
-# defaults are set in .env file.
-#
-# Example:
-# $ ARCH=arm64v8 docker-compose build ubuntu-cpp
-# $ ARCH=arm64v8 docker-compose run ubuntu-cpp
-#
-#
-# Coredumps
-# ---------
-#
-# In order to enable coredumps for the C++ tests run by CTest either with
-# command `make unittest` or `ctest --output-on-failure` the correct coredump
-# patterns must be set.
-# The kernel settings are coming from the host, so while it can be enabled from
-# a running container using --priviled option the change will affect all other
-# containers, so prefer setting it explicitly, directly on the host.
-# WARNING: setting this will affect the host machine.
-#
-# Linux host:
-# $ sudo sysctl -w kernel.core_pattern=core.%e.%p
-#
-# macOS host running Docker for Mac (won't persist between restarts):
-# $ screen ~/Library/Containers/com.docker.docker/Data/vms/0/tty
-# # echo "core.%e.%p" > /proc/sys/kernel/core_pattern
-#
-# The setup attempts to generate coredumps by default, but the correct paths
-# above must be set. In order to disable the coredump generation set
-# ULIMIT_CORE environment variable to 0 before running docker-compose
-# (or by setting it in .env file):
-#
-# ULIMIT_CORE=0 docker-compose run --rm conda-cpp
-#
-# See more in cpp/build-support/run-test.sh::print_coredumps
-
-version: '3.5'
-
-x-ccache: &ccache
- CCACHE_COMPILERCHECK: content
- CCACHE_COMPRESS: 1
- CCACHE_COMPRESSLEVEL: 6
- CCACHE_MAXSIZE: 500M
- CCACHE_DIR: /ccache
-
-x-with-gpus:
- - ubuntu-cuda-cpp
- - ubuntu-cuda-python
-
-x-hierarchy:
- # This section is used by the archery tool to enable building nested images,
- # so it is enough to call:
- # archery run debian-ruby
- # instead of a seguence of docker-compose commands:
- # docker-compose build debian-cpp
- # docker-compose build debian-c-glib
- # docker-compose build debian-ruby
- # docker-compose run --rm debian-ruby
- #
- # Each node must be either a string scalar of a list containing the
- # descendant images if any. Archery checks that all node has a corresponding
- # service entry, so any new image/service must be listed here.
- - conda:
- - conda-cpp:
- - conda-cpp-hiveserver2
- - conda-cpp-valgrind
- - conda-python:
- - conda-python-pandas
- - conda-python-dask
- - conda-python-hdfs
- - conda-python-jpype
- - conda-python-turbodbc
- - conda-python-kartothek
- - conda-python-spark
- - conda-integration
- - debian-cpp:
- - debian-c-glib:
- - debian-ruby
- - debian-python
- - debian-go
- - debian-java:
- - debian-java-jni
- - debian-js
- - fedora-cpp:
- - fedora-python
- - ubuntu-cpp:
- - ubuntu-c-glib:
- - ubuntu-ruby
- - ubuntu-lint
- - ubuntu-python:
- - ubuntu-docs
- - ubuntu-python-sdist-test
- - ubuntu-r
- - ubuntu-cuda-cpp:
- - ubuntu-cuda-python
- - ubuntu-csharp
- - ubuntu-cpp-sanitizer
- - ubuntu-cpp-thread-sanitizer
- - ubuntu-r-sanitizer
- - python-sdist
- - r
- # helper services
- - impala
- - postgres
- - python-wheel-manylinux-2010
- - python-wheel-manylinux-2014
- - python-wheel-manylinux-test-imports
- - python-wheel-manylinux-test-unittests
- - python-wheel-windows-vs2017
- - python-wheel-windows-test
-
-volumes:
- conda-ccache:
- name: ${ARCH}-conda-ccache
- debian-ccache:
- name: ${ARCH}-debian-${DEBIAN}-ccache
- ubuntu-ccache:
- name: ${ARCH}-ubuntu-${UBUNTU}-ccache
- fedora-ccache:
- name: ${ARCH}-fedora-${FEDORA}-ccache
- debian-rust:
- name: ${ARCH}-debian-${DEBIAN}-rust
- maven-cache:
- name: maven-cache
- python-wheel-manylinux2010-ccache:
- name: python-wheel-manylinux2010-ccache
- python-wheel-manylinux2014-ccache:
- name: python-wheel-manylinux2014-ccache
- python-wheel-windows-clcache:
- name: python-wheel-windows-clcache
-
-services:
-
- ################################# C++ #######################################
- # Release build:
- # docker-compose run -e ARROW_BUILD_TYPE=release conda-cpp|debian-cpp|...
- # Shared only:
- # docker-compose run -e ARROW_BUILD_STATIC=OFF conda-cpp|debian-cpp|...
- # Static only:
- # docker-compose run \
- # -e ARROW_BUILD_SHARED=OFF \
- # -e ARROW_TEST_LINKAGE=static \
- # conda-cpp|debian-cpp|...
-
- conda:
- # Base image for conda builds.
- #
- # Usage:
- # docker-compose build con
- # docker-compose run --rm conda
- # Parameters:
- # ARCH: amd64, arm32v7
- image: ${REPO}:${ARCH}-conda
- build:
- context: .
- dockerfile: ci/docker/conda.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-conda
- args:
- arch: ${ARCH}
- prefix: /opt/conda
- volumes:
- - .:/arrow:delegated
-
- conda-cpp:
- # C++ build in conda environment, including the doxygen docs.
- #
- # Usage:
- # docker-compose build conda
- # docker-compose build conda-cpp
- # docker-compose run --rm conda-cpp
- # Parameters:
- # ARCH: amd64, arm32v7
- image: ${REPO}:${ARCH}-conda-cpp
- build:
- context: .
- dockerfile: ci/docker/conda-cpp.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-conda-cpp
- args:
- repo: ${REPO}
- arch: ${ARCH}
- shm_size: &shm-size 2G
- ulimits: &ulimits
- core: ${ULIMIT_CORE}
- environment:
- <<: *ccache
- ARROW_BUILD_BENCHMARKS: "ON"
- ARROW_ENABLE_TIMING_TESTS: # inherit
- ARROW_MIMALLOC: "ON"
- ARROW_USE_LD_GOLD: "ON"
- ARROW_USE_PRECOMPILED_HEADERS: "ON"
- volumes: &conda-volumes
- - .:/arrow:delegated
- - ${DOCKER_VOLUME_PREFIX}conda-ccache:/ccache:delegated
- command: &cpp-conda-command
- ["/arrow/ci/scripts/cpp_build.sh /arrow /build true &&
- /arrow/ci/scripts/cpp_test.sh /arrow /build"]
-
- conda-cpp-valgrind:
- # Usage:
- # docker-compose build conda
- # docker-compose build conda-cpp
- # docker-compose run --rm conda-cpp-valgrind
- # Parameters:
- # ARCH: amd64, arm32v7
- image: ${REPO}:${ARCH}-conda-cpp
- build:
- context: .
- dockerfile: ci/docker/conda-cpp.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-conda-cpp
- args:
- repo: ${REPO}
- arch: ${ARCH}
- prefix: /opt/conda
- shm_size: *shm-size
- environment:
- <<: *ccache
- ARROW_CXXFLAGS: "-Og" # Shrink test runtime by enabling minimal optimizations
- ARROW_ENABLE_TIMING_TESTS: # inherit
- ARROW_FLIGHT: "OFF"
- ARROW_GANDIVA: "OFF"
- ARROW_JEMALLOC: "OFF"
- ARROW_RUNTIME_SIMD_LEVEL: "AVX2" # AVX512 not supported by Valgrind (ARROW-9851)
- ARROW_S3: "OFF"
- ARROW_TEST_MEMCHECK: "ON"
- ARROW_USE_LD_GOLD: "ON"
- BUILD_WARNING_LEVEL: "PRODUCTION"
- volumes: *conda-volumes
- command: *cpp-conda-command
-
- debian-cpp:
- # Usage:
- # docker-compose build debian-cpp
- # docker-compose run --rm debian-cpp
- # Parameters:
- # ARCH: amd64, arm64v8, ...
- # DEBIAN: 9, 10
- image: ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
- build:
- context: .
- dockerfile: ci/docker/debian-${DEBIAN}-cpp.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
- args:
- arch: ${ARCH}
- llvm: ${LLVM}
- shm_size: *shm-size
- ulimits: *ulimits
- environment:
- <<: *ccache
- ARROW_ENABLE_TIMING_TESTS: # inherit
- ARROW_MIMALLOC: "ON"
- volumes: &debian-volumes
- - .:/arrow:delegated
- - ${DOCKER_VOLUME_PREFIX}debian-ccache:/ccache:delegated
- command: &cpp-command >
- /bin/bash -c "
- /arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/cpp_test.sh /arrow /build"
-
- ubuntu-cpp:
- # Usage:
- # docker-compose build ubuntu-cpp
- # docker-compose run --rm ubuntu-cpp
- # Parameters:
- # ARCH: amd64, arm64v8, s390x, ...
- # UBUNTU: 18.04, 20.04
- image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
- build:
- context: .
- dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
- args:
- arch: ${ARCH}
- base: "${ARCH}/ubuntu:${UBUNTU}"
- clang_tools: ${CLANG_TOOLS}
- llvm: ${LLVM}
- shm_size: *shm-size
- ulimits: *ulimits
- environment:
- <<: *ccache
- ARROW_ENABLE_TIMING_TESTS: # inherit
- ARROW_MIMALLOC: "ON"
- volumes: &ubuntu-volumes
- - .:/arrow:delegated
- - ${DOCKER_VOLUME_PREFIX}ubuntu-ccache:/ccache:delegated
- command: *cpp-command
-
- ubuntu-cuda-cpp:
- # Usage:
- # docker-compose build cuda-cpp
- # docker-compose run --rm cuda-cpp
- # Also need to edit the host docker configuration as follows:
- # https://github.com/docker/compose/issues/6691#issuecomment-561504928
- # Parameters:
- # ARCH: amd64
- # CUDA: 9.1, 10.0, 10.1
- image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-cpp
- build:
- context: .
- dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-cpp
- args:
- arch: ${ARCH}
- base: nvidia/cuda:${CUDA}-devel-ubuntu${UBUNTU}
- clang_tools: ${CLANG_TOOLS}
- llvm: ${LLVM}
- shm_size: *shm-size
- ulimits: *ulimits
- environment:
- <<: *ccache
- ARROW_CUDA: "ON"
- volumes: *ubuntu-volumes
- command: *cpp-command
-
- ubuntu-cpp-sanitizer:
- # Usage:
- # docker-compose build ubuntu-cpp-sanitizer
- # docker-compose run --rm ubuntu-cpp-sanitizer
- # Parameters:
- # ARCH: amd64, arm64v8, ...
- # UBUNTU: 18.04, 20.04
- image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
- cap_add:
- # For LeakSanitizer
- - SYS_PTRACE
- build:
- context: .
- dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
- args:
- arch: ${ARCH}
- clang_tools: ${CLANG_TOOLS}
- llvm: ${LLVM}
- shm_size: *shm-size
- volumes: *ubuntu-volumes
- environment:
- <<: *ccache
- CC: clang-${CLANG_TOOLS}
- CXX: clang++-${CLANG_TOOLS}
- ARROW_ENABLE_TIMING_TESTS: # inherit
- ARROW_FUZZING: "ON" # Check fuzz regressions
- ARROW_JEMALLOC: "OFF"
- ARROW_ORC: "OFF"
- ARROW_S3: "OFF"
- ARROW_USE_ASAN: "ON"
- ARROW_USE_UBSAN: "ON"
- # utf8proc 2.1.0 in Ubuntu Bionic has test failures
- utf8proc_SOURCE: "BUNDLED"
- command: *cpp-command
-
- ubuntu-cpp-thread-sanitizer:
- # Usage:
- # docker-compose build ubuntu-cpp-thread-sanitizer
- # docker-compose run --rm ubuntu-cpp-thread-sanitizer
- # Parameters:
- # ARCH: amd64, arm64v8, ...
- # UBUNTU: 18.04, 20.04
- image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
- build:
- context: .
- dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
- args:
- arch: ${ARCH}
- clang_tools: ${CLANG_TOOLS}
- llvm: ${LLVM}
- shm_size: *shm-size
- volumes: *ubuntu-volumes
- environment:
- <<: *ccache
- CC: clang-${CLANG_TOOLS}
- CXX: clang++-${CLANG_TOOLS}
- ARROW_ENABLE_TIMING_TESTS: # inherit
- ARROW_DATASET: "ON"
- ARROW_JEMALLOC: "OFF"
- ARROW_ORC: "OFF"
- ARROW_S3: "OFF"
- ARROW_USE_TSAN: "ON"
- command: *cpp-command
-
- fedora-cpp:
- # Usage:
- # docker-compose build fedora-cpp
- # docker-compose run --rm fedora-cpp
- # Parameters:
- # ARCH: amd64, arm64v8, ...
- # FEDORA: 33
- image: ${REPO}:${ARCH}-fedora-${FEDORA}-cpp
- build:
- context: .
- dockerfile: ci/docker/fedora-${FEDORA}-cpp.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-fedora-${FEDORA}-cpp
- args:
- arch: ${ARCH}
- llvm: ${LLVM}
- shm_size: *shm-size
- ulimits: *ulimits
- environment:
- <<: *ccache
- ARROW_ENABLE_TIMING_TESTS: # inherit
- ARROW_MIMALLOC: "ON"
- volumes: &fedora-volumes
- - .:/arrow:delegated
- - ${DOCKER_VOLUME_PREFIX}fedora-ccache:/ccache:delegated
- command: *cpp-command
-
- ############################### C GLib ######################################
-
- debian-c-glib:
- # Usage:
- # docker-compose build debian-cpp
- # docker-compose build debian-c-glib
- # docker-compose run --rm debian-c-glib
- # Parameters:
- # ARCH: amd64, arm64v8, ...
- # DEBIAN: 9, 10
- image: ${REPO}:${ARCH}-debian-${DEBIAN}-c-glib
- build:
- context: .
- dockerfile: ci/docker/linux-apt-c-glib.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-debian-${DEBIAN}-c-glib
- args:
- base: ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
- shm_size: *shm-size
- ulimits: *ulimits
- environment:
- <<: *ccache
- ARROW_GLIB_GTK_DOC: "true"
- volumes: *debian-volumes
- command: &c-glib-command >
- /bin/bash -c "
- /arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/c_glib_build.sh /arrow /build &&
- /arrow/ci/scripts/c_glib_test.sh /arrow /build"
-
- ubuntu-c-glib:
- # Usage:
- # docker-compose build ubuntu-cpp
- # docker-compose build ubuntu-c-glib
- # docker-compose run --rm ubuntu-c-glib
- # Parameters:
- # ARCH: amd64, arm64v8, ...
- # UBUNTU: 18.04, 20.04
- image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-c-glib
- build:
- context: .
- dockerfile: ci/docker/linux-apt-c-glib.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-c-glib
- args:
- base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
- shm_size: *shm-size
- ulimits: *ulimits
- environment:
- <<: *ccache
- ARROW_GLIB_GTK_DOC: "true"
- volumes: *ubuntu-volumes
- command: *c-glib-command
-
- ############################### Ruby ########################################
- # Until Ruby is the only dependent implementation on top of C Glib we can
- # test C Glib and Ruby in one pass. This is an optimization to avoid
- # redundant (one for C GLib and one for Ruby doing the same work twice)
- # builds on CI services.
-
- debian-ruby:
- # Usage:
- # docker-compose build debian-cpp
- # docker-compose build debian-c-glib
- # docker-compose build debian-ruby
- # docker-compose run --rm debian-ruby
- # Parameters:
- # ARCH: amd64, arm64v8, ...
- # DEBIAN: 9, 10
- image: ${REPO}:${ARCH}-debian-${DEBIAN}-ruby
- build:
- context: .
- dockerfile: ci/docker/linux-apt-ruby.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-debian-${DEBIAN}-ruby
- args:
- base: ${REPO}:${ARCH}-debian-${DEBIAN}-c-glib
- shm_size: *shm-size
- ulimits: *ulimits
- environment:
- <<: *ccache
- volumes: *debian-volumes
- command: &ruby-command >
- /bin/bash -c "
- /arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/c_glib_build.sh /arrow /build &&
- /arrow/ci/scripts/c_glib_test.sh /arrow /build &&
- /arrow/ci/scripts/ruby_test.sh /arrow /build"
-
- ubuntu-ruby:
- # Usage:
- # docker-compose build ubuntu-cpp
- # docker-compose build ubuntu-c-glib
- # docker-compose build ubuntu-ruby
- # docker-compose run --rm ubuntu-ruby
- # Parameters:
- # ARCH: amd64, arm64v8, ...
- # UBUNTU: 18.04, 20.04
- image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-ruby
- build:
- context: .
- dockerfile: ci/docker/linux-apt-ruby.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-ruby
- args:
- base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-c-glib
- shm_size: *shm-size
- ulimits: *ulimits
- environment:
- <<: *ccache
- volumes: *ubuntu-volumes
- command: *ruby-command
-
- ############################### Python ######################################
-
- conda-python:
- # Usage:
- # docker-compose build conda
- # docker-compose build conda-cpp
- # docker-compose build conda-python
- # docker-compose run --rm conda-python
- # Parameters:
- # ARCH: amd64, arm32v7
- # PYTHON: 3.6, 3.7, 3.8, 3.9
- image: ${REPO}:${ARCH}-conda-python-${PYTHON}
- build:
- context: .
- dockerfile: ci/docker/conda-python.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-conda-python-${PYTHON}
- args:
- repo: ${REPO}
- arch: ${ARCH}
- python: ${PYTHON}
- shm_size: *shm-size
- environment:
- <<: *ccache
- volumes: *conda-volumes
- command: &python-conda-command
- ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/python_build.sh /arrow /build &&
- /arrow/ci/scripts/python_test.sh /arrow"]
-
- ubuntu-cuda-python:
- # Usage:
- # docker-compose build cuda-cpp
- # docker-compose build cuda-python
- # docker-compose run --rm cuda-python
- # Parameters:
- # ARCH: amd64
- # CUDA: 8.0, 10.0, ...
- image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-python-3
- build:
- context: .
- dockerfile: ci/docker/linux-apt-python-3.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-python-3
- args:
- base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-cpp
- shm_size: *shm-size
- environment:
- <<: *ccache
- ARROW_CUDA: "ON"
- volumes: *ubuntu-volumes
- command: &python-command >
- /bin/bash -c "
- /arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/python_build.sh /arrow /build &&
- /arrow/ci/scripts/python_test.sh /arrow"
-
- debian-python:
- # Usage:
- # docker-compose build debian-cpp
- # docker-compose build debian-python
- # docker-compose run --rm debian-python
- # Parameters:
- # ARCH: amd64, arm64v8, ...
- # DEBIAN: 9, 10
- image: ${REPO}:${ARCH}-debian-${DEBIAN}-python-3
- build:
- context: .
- dockerfile: ci/docker/linux-apt-python-3.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-debian-${DEBIAN}-python-3
- args:
- base: ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
- shm_size: *shm-size
- environment:
- <<: *ccache
- volumes: *debian-volumes
- command: *python-command
-
- ubuntu-python:
- # Usage:
- # docker-compose build ubuntu-cpp
- # docker-compose build ubuntu-python
- # docker-compose run --rm ubuntu-python
- # Parameters:
- # ARCH: amd64, arm64v8, ...
- # UBUNTU: 18.04, 20.04
- image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
- build:
- context: .
- dockerfile: ci/docker/linux-apt-python-3.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
- args:
- base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
- shm_size: *shm-size
- environment:
- <<: *ccache
- volumes: *ubuntu-volumes
- command: *python-command
-
- fedora-python:
- # Usage:
- # docker-compose build fedora-cpp
- # docker-compose build fedora-python
- # docker-compose run --rm fedora-python
- # Parameters:
- # ARCH: amd64, arm64v8, ...
- # FEDORA: 33
- image: ${REPO}:${ARCH}-fedora-${FEDORA}-python-3
- build:
- context: .
- dockerfile: ci/docker/linux-dnf-python-3.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-fedora-${FEDORA}-python-3
- args:
- base: ${REPO}:${ARCH}-fedora-${FEDORA}-cpp
- shm_size: *shm-size
- environment:
- <<: *ccache
- volumes: *fedora-volumes
- command: *python-command
-
- ############################ Python sdist ###################################
-
- python-sdist:
- # Usage:
- # docker-compose build python-sdist
- # docker-compose run --rm python-sdist
- # Parameters:
- # PYARROW_VERSION: The pyarrow version for sdist such as "3.0.0"
- image: ${REPO}:python-sdist
- build:
- context: .
- dockerfile: ci/docker/python-sdist.dockerfile
- cache_from:
- - ${REPO}:python-sdist
- environment:
- PYARROW_VERSION: ${PYARROW_VERSION:-}
- volumes:
- - .:/arrow:delegated
- command: /arrow/ci/scripts/python_sdist_build.sh /arrow
-
- ubuntu-python-sdist-test:
- # Usage:
- # docker-compose build ubuntu-cpp
- # docker-compose build ubuntu-python-sdist-test
- # docker-compose run --rm ubuntu-python-sdist-test
- # Parameters:
- # ARCH: amd64, arm64v8, ...
- # PYARROW_VERSION: The test target pyarrow version such as "3.0.0"
- # UBUNTU: 18.04, 20.04
- image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
- build:
- context: .
- dockerfile: ci/docker/linux-apt-python-3.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
- args:
- base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
- shm_size: *shm-size
- environment:
- <<: *ccache
- PYARROW_VERSION: ${PYARROW_VERSION:-}
- volumes: *ubuntu-volumes
- command: >
- /bin/bash -c "
- /arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/python_sdist_test.sh /arrow"
-
- ############################ Python wheels ##################################
-
- # See available versions at:
- # https://quay.io/repository/pypa/manylinux2010_x86_64?tab=tags
- # only amd64 arch is supported
- python-wheel-manylinux-2010:
- image: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2010
- build:
- args:
- arch_alias: ${ARCH_ALIAS}
- arch_short_alias: ${ARCH_SHORT_ALIAS}
- base: quay.io/pypa/manylinux2010_${ARCH_ALIAS}:2020-12-03-912b0de
- vcpkg: ${VCPKG}
- python: ${PYTHON}
- context: .
- dockerfile: ci/docker/python-wheel-manylinux-201x.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2010
- environment:
- <<: *ccache
- MANYLINUX_VERSION: 2010
- volumes:
- - .:/arrow:delegated
- - ${DOCKER_VOLUME_PREFIX}python-wheel-manylinux2010-ccache:/ccache:delegated
- command: /arrow/ci/scripts/python_wheel_manylinux_build.sh
-
- # See available versions at:
- # https://quay.io/repository/pypa/manylinux2014_x86_64?tab=tags
- python-wheel-manylinux-2014:
- image: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2014
- build:
- args:
- arch_alias: ${ARCH_ALIAS}
- arch_short_alias: ${ARCH_SHORT_ALIAS}
- base: quay.io/pypa/manylinux2014_${ARCH_ALIAS}:2020-11-11-bc8ce45
- vcpkg: ${VCPKG}
- python: ${PYTHON}
- context: .
- dockerfile: ci/docker/python-wheel-manylinux-201x.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2014
- environment:
- <<: *ccache
- MANYLINUX_VERSION: 2014
- volumes:
- - .:/arrow:delegated
- - ${DOCKER_VOLUME_PREFIX}python-wheel-manylinux2014-ccache:/ccache:delegated
- command: /arrow/ci/scripts/python_wheel_manylinux_build.sh
-
- python-wheel-manylinux-test-imports:
- image: ${ARCH}/python:${PYTHON}
- shm_size: 2G
- volumes:
- - .:/arrow:delegated
- command: /arrow/ci/scripts/python_wheel_manylinux_test.sh imports
-
- python-wheel-manylinux-test-unittests:
- image: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-test
- build:
- args:
- arch: ${ARCH}
- python: ${PYTHON}
- context: .
- dockerfile: ci/docker/python-wheel-manylinux-test.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-test
- shm_size: 2G
- volumes:
- - .:/arrow:delegated
- command: /arrow/ci/scripts/python_wheel_manylinux_test.sh unittests
-
- python-wheel-windows-vs2017:
- image: ${REPO}:python-${PYTHON}-wheel-windows-vs2017
- build:
- args:
- vcpkg: ${VCPKG}
- python: ${PYTHON}
- context: .
- dockerfile: ci/docker/python-wheel-windows-vs2017.dockerfile
- # This should make the pushed images reusable, but the image gets rebuilt.
- # Uncomment if no local cache is available.
- # cache_from:
- # - mcr.microsoft.com/windows/servercore:ltsc2019
- # - ${REPO}:wheel-windows-vs2017
- volumes:
- - "${DOCKER_VOLUME_PREFIX}python-wheel-windows-clcache:C:/clcache"
- - type: bind
- source: .
- target: "C:/arrow"
- command: arrow\\ci\\scripts\\python_wheel_windows_build.bat
-
- python-wheel-windows-test:
- image: python:${PYTHON}-windowsservercore-1809
- volumes:
- - type: bind
- source: .
- target: "C:/arrow"
- command: arrow\\ci\\scripts\\python_wheel_windows_test.bat
-
- ############################## Integration #################################
-
- conda-python-pandas:
- # Possible $PANDAS parameters:
- # - `latest`: latest release
- # - `master`: git master branch, use `docker-compose run --no-cache`
- # - `<version>`: specific version available on conda-forge
- # Usage:
- # docker-compose build conda
- # docker-compose build conda-cpp
- # docker-compose build conda-python
- # docker-compose build conda-python-pandas
- # docker-compose run --rm conda-python-pandas
- image: ${REPO}:${ARCH}-conda-python-${PYTHON}-pandas-${PANDAS}
- build:
- context: .
- dockerfile: ci/docker/conda-python-pandas.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-conda-python-${PYTHON}-pandas-${PANDAS}
- args:
- repo: ${REPO}
- arch: ${ARCH}
- python: ${PYTHON}
- numpy: ${NUMPY}
- pandas: ${PANDAS}
- shm_size: *shm-size
- environment:
- <<: *ccache
- volumes: *conda-volumes
- command: *python-conda-command
-
- conda-python-dask:
- # Possible $DASK parameters:
- # - `latest`: latest release
- # - `master`: git master branch, use `docker-compose run --no-cache`
- # - `<version>`: specific version available on conda-forge
- # Usage:
- # docker-compose build conda
- # docker-compose build conda-cpp
- # docker-compose build conda-python
- # docker-compose build conda-python-dask
- # docker-compose run --rm conda-python-dask
- image: ${REPO}:${ARCH}-conda-python-${PYTHON}-dask-${DASK}
- build:
- context: .
- dockerfile: ci/docker/conda-python-dask.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-conda-python-${PYTHON}-dask-${DASK}
- args:
- repo: ${REPO}
- arch: ${ARCH}
- python: ${PYTHON}
- dask: ${DASK}
- shm_size: *shm-size
- environment:
- <<: *ccache
- volumes: *conda-volumes
- command:
- ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/python_build.sh /arrow /build &&
- /arrow/ci/scripts/integration_dask.sh"]
-
- conda-python-jpype:
- # Usage:
- # docker-compose build conda
- # docker-compose build conda-cpp
- # docker-compose build conda-python
- # docker-compose build conda-python-jpype
- # docker-compose run --rm conda-python-jpype
- image: ${REPO}:${ARCH}-conda-python-${PYTHON}-jpype
- build:
- context: .
- dockerfile: ci/docker/conda-python-jpype.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-conda-python-${PYTHON}-jpype
- args:
- repo: ${REPO}
- arch: ${ARCH}
- python: ${PYTHON}
- shm_size: *shm-size
- environment:
- <<: *ccache
- ARROW_FLIGHT: "OFF"
- ARROW_GANDIVA: "OFF"
- volumes: *conda-volumes
- command:
- ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/python_build.sh /arrow /build &&
- /arrow/ci/scripts/java_build.sh /arrow /build &&
- /arrow/ci/scripts/python_test.sh /arrow"]
-
- conda-python-turbodbc:
- # Possible $TURBODBC parameters:
- # - `latest`: latest release
- # - `master`: git master branch, use `docker-compose run --no-cache`
- # - `<version>`: specific version available under github releases
- # Usage:
- # docker-compose build conda
- # docker-compose build conda-cpp
- # docker-compose build conda-python
- # docker-compose build conda-python-turbodbc
- # docker-compose run --rm conda-python-turbodbc
- image: ${REPO}:${ARCH}-conda-python-${PYTHON}-turbodbc-${TURBODBC}
- build:
- context: .
- dockerfile: ci/docker/conda-python-turbodbc.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-conda-python-${PYTHON}-turbodbc-${TURBODBC}
- args:
- repo: ${REPO}
- arch: ${ARCH}
- python: ${PYTHON}
- turbodbc: ${TURBODBC}
- shm_size: *shm-size
- environment:
- <<: *ccache
- volumes: *conda-volumes
- command:
- ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/python_build.sh /arrow /build &&
- /arrow/ci/scripts/integration_turbodbc.sh /turbodbc /build"]
-
- conda-python-kartothek:
- # Possible $KARTOTHEK parameters:
- # - `latest`: latest release
- # - `master`: git master branch, use `docker-compose run --no-cache`
- # - `<version>`: specific version available under github releases
- # Usage:
- # docker-compose build conda
- # docker-compose build conda-cpp
- # docker-compose build conda-python
- # docker-compose build conda-python-kartothek
- # docker-compose run --rm conda-python-kartothek
- image: ${REPO}:${ARCH}-conda-python-${PYTHON}-kartothek-${KARTOTHEK}
- build:
- context: .
- dockerfile: ci/docker/conda-python-kartothek.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-conda-python-${PYTHON}-kartothek-${KARTOTHEK}
- args:
- repo: ${REPO}
- arch: ${ARCH}
- python: ${PYTHON}
- kartothek: ${KARTOTHEK}
- shm_size: *shm-size
- environment:
- <<: *ccache
- volumes: *conda-volumes
- command:
- ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/python_build.sh /arrow /build &&
- /arrow/ci/scripts/integration_kartothek.sh /kartothek /build"]
-
- ################################## R ########################################
-
- ubuntu-r:
- # Usage:
- # docker-compose build ubuntu-cpp
- # docker-compose build ubuntu-r
- # docker-compose run ubuntu-r
- image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-r-${R}
- build:
- context: .
- dockerfile: ci/docker/linux-apt-r.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-r-${R}
- args:
- arch: ${ARCH}
- r: ${R}
- base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
- shm_size: *shm-size
- environment:
- <<: *ccache
- ARROW_R_CXXFLAGS: '-Werror'
- LIBARROW_BUILD: 'false'
- NOT_CRAN: 'true'
- volumes: *ubuntu-volumes
- command: >
- /bin/bash -c "
- /arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/python_build.sh /arrow /build &&
- /arrow/ci/scripts/r_test.sh /arrow"
-
- r:
- # This lets you test building/installing the arrow R package
- # (including building the C++ library) on any Docker image that contains R
- #
- # Usage:
- # R_ORG=rhub R_IMAGE=ubuntu-gcc-release R_TAG=latest docker-compose build r
- # R_ORG=rhub R_IMAGE=ubuntu-gcc-release R_TAG=latest docker-compose run r
- image: ${REPO}:r-${R_ORG}-${R_IMAGE}-${R_TAG}
- build:
- context: .
- dockerfile: ci/docker/linux-r.dockerfile
- cache_from:
- - ${REPO}:r-${R_ORG}-${R_IMAGE}-${R_TAG}
- args:
- base: ${R_ORG}/${R_IMAGE}:${R_TAG}
- r_dev: ${ARROW_R_DEV}
- devtoolset_version: ${DEVTOOLSET_VERSION}
- shm_size: *shm-size
- environment:
- LIBARROW_DOWNLOAD: "false"
- ARROW_SOURCE_HOME: "/arrow"
- ARROW_R_DEV: ${ARROW_R_DEV}
- # To test for CRAN release, delete ^^ these two env vars so we download the Apache release
- ARROW_USE_PKG_CONFIG: "false"
- devtoolset_version: ${DEVTOOLSET_VERSION}
- volumes:
- - .:/arrow:delegated
- command: >
- /bin/bash -c "/arrow/ci/scripts/r_test.sh /arrow"
-
- ubuntu-r-sanitizer:
- # Only 18.04 and amd64 supported
- # Usage:
- # docker-compose build ubuntu-r-sanitizer
- # docker-compose run ubuntu-r-sanitizer
- image: ${REPO}:amd64-ubuntu-18.04-r-sanitizer
- cap_add:
- # LeakSanitizer and gdb requires ptrace(2)
- - SYS_PTRACE
- build:
- context: .
- dockerfile: ci/docker/linux-r.dockerfile
- cache_from:
- - ${REPO}:amd64-ubuntu-18.04-r-sanitizer
- args:
- base: wch1/r-debug:latest
- r_bin: RDsan
- environment:
- <<: *ccache
- volumes: *ubuntu-volumes
- command: >
- /bin/bash -c "
- /arrow/ci/scripts/r_sanitize.sh /arrow"
-
- ################################# Go ########################################
-
- debian-go:
- # Usage:
- # docker-compose build debian-go
- # docker-compose run debian-go
- image: ${REPO}:${ARCH}-debian-10-go-${GO}
- build:
- context: .
- dockerfile: ci/docker/debian-10-go.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-debian-10-go-${GO}
- args:
- arch: ${ARCH}
- go: ${GO}
- shm_size: *shm-size
- volumes: *debian-volumes
- command: &go-command >
- /bin/bash -c "
- /arrow/ci/scripts/go_build.sh /arrow &&
- /arrow/ci/scripts/go_test.sh /arrow"
-
- ############################# JavaScript ####################################
-
- debian-js:
- # Usage:
- # docker-compose build debian-js
- # docker-compose run debian-js
- image: ${REPO}:${ARCH}-debian-10-js-${NODE}
- build:
- context: .
- dockerfile: ci/docker/debian-10-js.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-debian-10-js-${NODE}
- args:
- arch: ${ARCH}
- node: ${NODE}
- shm_size: *shm-size
- volumes: *debian-volumes
- command: &js-command >
- /bin/bash -c "
- /arrow/ci/scripts/js_build.sh /arrow &&
- /arrow/ci/scripts/js_test.sh /arrow"
-
- #################################### C# #####################################
-
- ubuntu-csharp:
- # Usage:
- # docker-compose build ubuntu-csharp
- # docker-compose run ubuntu-csharp
- image: ${REPO}:${ARCH}-ubuntu-18.04-csharp-${DOTNET}
- build:
- context: .
- dockerfile: ci/docker/ubuntu-18.04-csharp.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-ubuntu-18.04-csharp-${DOTNET}
- args:
- dotnet: ${DOTNET}
- platform: bionic # use bionic-arm64v8 for ARM
- shm_size: *shm-size
- volumes: *ubuntu-volumes
- command: &csharp-command >
- /bin/bash -c "
- /arrow/ci/scripts/csharp_build.sh /arrow &&
- /arrow/ci/scripts/csharp_test.sh /arrow &&
- /arrow/ci/scripts/csharp_pack.sh /arrow"
-
- ################################ Java #######################################
-
- debian-java:
- # Usage:
- # docker-compose build debian-java
- # docker-compose run debian-java
- image: ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}
- build:
- context: .
- dockerfile: ci/docker/debian-9-java.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}
- args:
- arch: ${ARCH}
- jdk: ${JDK}
- maven: ${MAVEN}
- shm_size: *shm-size
- volumes: &java-volumes
- - .:/arrow:delegated
- - ${DOCKER_VOLUME_PREFIX}maven-cache:/root/.m2:delegated
- command: &java-command >
- /bin/bash -c "
- /arrow/ci/scripts/java_build.sh /arrow /build &&
- /arrow/ci/scripts/java_test.sh /arrow /build"
-
- debian-java-jni:
- # Includes plasma test and jni for gandiva and orc.
- # Usage:
- # docker-compose build debian-java
- # docker-compose build debian-java-jni
- # docker-compose run debian-java-jni
- image: ${REPO}:${ARCH}-debian-9-java-jni
- build:
- context: .
- dockerfile: ci/docker/linux-apt-jni.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-debian-9-java-jni
- args:
- base: ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}
- llvm: ${LLVM}
- shm_size: *shm-size
- environment:
- <<: *ccache
- volumes:
- - .:/arrow:delegated
- - ${DOCKER_VOLUME_PREFIX}maven-cache:/root/.m2:delegated
- - ${DOCKER_VOLUME_PREFIX}debian-ccache:/ccache:delegated
- command:
- /bin/bash -c "
- /arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/java_build.sh /arrow /build &&
- /arrow/ci/scripts/java_test.sh /arrow /build"
-
- ############################## Integration ##################################
-
- conda-integration:
- # Usage:
- # docker-compose build conda-cpp
- # docker-compose build conda-integration
- # docker-compose run conda-integration
- image: ${REPO}:${ARCH}-conda-integration
- build:
- context: .
- dockerfile: ci/docker/conda-integration.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-conda-integration
- args:
- repo: ${REPO}
- arch: ${ARCH}
- jdk: ${JDK}
- # conda-forge doesn't have 3.5.4 so pinning explicitly, but this should
- # be set to ${MAVEN}
- maven: 3.5
- node: ${NODE}
- go: ${GO}
- volumes: *conda-volumes
- environment:
- <<: *ccache
- # tell archery where the arrow binaries are located
- ARROW_CPP_EXE_PATH: /build/cpp/debug
- command:
- ["/arrow/ci/scripts/rust_build.sh /arrow /build &&
- /arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/go_build.sh /arrow &&
- /arrow/ci/scripts/java_build.sh /arrow /build &&
- /arrow/ci/scripts/js_build.sh /arrow /build &&
- /arrow/ci/scripts/integration_arrow.sh /arrow /build"]
-
- ################################ Docs #######################################
-
- ubuntu-docs:
- # Usage:
- # docker-compose build ubuntu-cpp
- # docker-compose build ubuntu-python
- # docker-compose build ubuntu-docs
- # docker-compose run --rm ubuntu-docs
- image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-docs
- build:
- context: .
- dockerfile: ci/docker/linux-apt-docs.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-docs
- args:
- jdk: ${JDK}
- node: ${NODE}
- base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
- environment:
- <<: *ccache
- ARROW_CUDA: "ON"
- ARROW_GLIB_GTK_DOC: "true"
- volumes: *ubuntu-volumes
- command: &docs-command >
- /bin/bash -c "
- /arrow/ci/scripts/cpp_build.sh /arrow /build true &&
- /arrow/ci/scripts/c_glib_build.sh /arrow /build &&
- /arrow/ci/scripts/python_build.sh /arrow /build &&
- /arrow/ci/scripts/java_build.sh /arrow /build true &&
- /arrow/ci/scripts/js_build.sh /arrow true &&
- /arrow/ci/scripts/r_build.sh /arrow true &&
- /arrow/ci/scripts/docs_build.sh /arrow /build"
-
- ################################# Tools #####################################
-
- ubuntu-lint:
- # Usage:
- # docker-compose build ubuntu-cpp
- # docker-compose build ubuntu-lint
- # docker-compose run ubuntu-lint
- image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-lint
- build:
- context: .
- dockerfile: ci/docker/linux-apt-lint.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-lint
- args:
- base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
- clang_tools: ${CLANG_TOOLS}
- rust: ${RUST}
- environment:
- <<: *ccache
- volumes: *ubuntu-volumes
- command: >
- /bin/bash -c "
- pip install -e /arrow/dev/archery &&
- archery lint --all --no-clang-tidy --no-iwyu --no-numpydoc"
-
- ######################### Integration Tests #################################
-
- postgres:
- # required for the impala service
- image: postgres
- ports:
- - 5432:5432
- environment:
- POSTGRES_PASSWORD: postgres
-
- impala:
- # required for the hiveserver and hdfs tests
- image: ibisproject/impala:latest
- hostname: impala
- links:
- - postgres:postgres
- environment:
- PGPASSWORD: postgres
- ports:
- # HDFS
- - 9020:9020
- - 50070:50070
- - 50075:50075
- - 8020:8020
- - 8042:8042
- # Hive
- - 9083:9083
- # Impala
- - 21000:21000
- - 21050:21050
- - 25000:25000
- - 25010:25010
- - 25020:25020
-
- conda-cpp-hiveserver2:
- # Usage:
- # docker-compose build conda-cpp
- # docker-compose build conda-cpp-hiveserver2
- # docker-compose run conda-cpp-hiveserver2
- image: ${REPO}:${ARCH}-conda-cpp
- links:
- - impala:impala
- environment:
- <<: *ccache
- ARROW_FLIGHT: "OFF"
- ARROW_GANDIVA: "OFF"
- ARROW_PLASMA: "OFF"
- ARROW_HIVESERVER2: "ON"
- ARROW_HIVESERVER2_TEST_HOST: impala
- shm_size: *shm-size
- volumes: *conda-volumes
- command:
- ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/integration_hiveserver2.sh /arrow /build"]
-
- conda-python-hdfs:
- # Usage:
- # docker-compose build conda-cpp
- # docker-compose build conda-python
- # docker-compose build conda-python-hdfs
- # docker-compose run conda-python-hdfs
- image: ${REPO}:${ARCH}-conda-python-${PYTHON}-hdfs-${HDFS}
- build:
- context: .
- dockerfile: ci/docker/conda-python-hdfs.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-conda-python-${PYTHON}-hdfs-${HDFS}
- args:
- repo: ${REPO}
- arch: ${ARCH}
- python: ${PYTHON}
- jdk: ${JDK}
- # conda-forge doesn't have 3.5.4 so pinning explicitly, but this should
- # be set to ${MAVEN}
- maven: 3.5
- hdfs: ${HDFS}
- links:
- - impala:impala
- environment:
- <<: *ccache
- ARROW_HDFS: "ON"
- ARROW_HDFS_TEST_HOST: impala
- ARROW_HDFS_TEST_PORT: 8020
- ARROW_HDFS_TEST_USER: hdfs
- ARROW_S3: "OFF"
- CMAKE_UNITY_BUILD: "ON"
- shm_size: *shm-size
- volumes: &conda-maven-volumes
- - .:/arrow:delegated
- - ${DOCKER_VOLUME_PREFIX}maven-cache:/root/.m2:delegated
- - ${DOCKER_VOLUME_PREFIX}conda-ccache:/ccache:delegated
- command:
- ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/python_build.sh /arrow /build &&
- /arrow/ci/scripts/integration_hdfs.sh /arrow /build"]
-
- conda-python-spark:
- # Usage:
- # docker-compose build conda-cpp
- # docker-compose build conda-python
- # docker-compose build conda-python-spark
- # docker-compose run conda-python-spark
- image: ${REPO}:${ARCH}-conda-python-${PYTHON}-spark-${SPARK}
- build:
- context: .
- dockerfile: ci/docker/conda-python-spark.dockerfile
- cache_from:
- - ${REPO}:${ARCH}-conda-python-${PYTHON}-spark-${SPARK}
- args:
- repo: ${REPO}
- arch: ${ARCH}
- python: ${PYTHON}
- jdk: ${JDK}
- # conda-forge doesn't have 3.5.4 so pinning explicitly, but this should
- # be set to ${MAVEN}
- maven: 3.5
- spark: ${SPARK}
- shm_size: *shm-size
- environment:
- <<: *ccache
- volumes: *conda-maven-volumes
- command:
- ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
- /arrow/ci/scripts/python_build.sh /arrow /build &&
- /arrow/ci/scripts/java_build.sh /arrow /build &&
- /arrow/ci/scripts/integration_spark.sh /arrow /spark ${TEST_PYARROW_ONLY:-false}"]
diff --git a/run-cmake-format.py b/run-cmake-format.py
deleted file mode 100755
index 1ff1038..0000000
--- a/run-cmake-format.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python3
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import argparse
-import fnmatch
-import hashlib
-import pathlib
-import subprocess
-import sys
-
-# Keep an explicit list of files to format as we don't want to reformat
-# files we imported from other location.
-PATTERNS = [
- 'ci/**/*.cmake',
- 'cpp/CMakeLists.txt',
- 'cpp/src/**/CMakeLists.txt',
- 'cpp/cmake_modules/*.cmake',
- 'go/**/CMakeLists.txt',
- 'java/**/CMakeLists.txt',
- 'matlab/**/CMakeLists.txt',
-]
-EXCLUDE = [
- 'cpp/cmake_modules/FindNumPy.cmake',
- 'cpp/cmake_modules/FindPythonLibsNew.cmake',
- 'cpp/cmake_modules/UseCython.cmake',
- 'cpp/src/arrow/util/config.h.cmake',
-]
-
-here = pathlib.Path(__file__).parent
-
-
-def find_cmake_files():
- for pat in PATTERNS:
- yield from here.glob(pat)
-
-
-def run_cmake_format(paths):
- # cmake-format is fast enough that running in parallel doesn't seem
- # necessary
- # autosort is off because it breaks in cmake_format 5.1
- # See: https://github.com/cheshirekow/cmake_format/issues/111
- cmd = ['cmake-format', '--in-place', '--autosort=false'] + paths
- try:
- subprocess.run(cmd, check=True)
- except FileNotFoundError:
- try:
- import cmake_format
- except ImportError:
- raise ImportError(
- "Please install cmake-format: `pip install cmake_format`")
- else:
- # Other error, re-raise
- raise
-
-
-def check_cmake_format(paths):
- hashes = {}
- for p in paths:
- contents = p.read_bytes()
- hashes[p] = hashlib.sha256(contents).digest()
-
- run_cmake_format(paths)
-
- # Check contents didn't change
- changed = []
- for p in paths:
- contents = p.read_bytes()
- if hashes[p] != hashlib.sha256(contents).digest():
- changed.append(p)
-
- if changed:
- items = "\n".join("- %s" % p for p in sorted(changed))
- print("The following cmake files need re-formatting:\n%s" % (items,))
- print()
- print("Consider running `run-cmake-format.py`")
- sys.exit(1)
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
- parser.add_argument('--check', action='store_true')
- parser.add_argument('paths', nargs='*', type=pathlib.Path)
- args = parser.parse_args()
-
- paths = find_cmake_files()
- if args.paths:
- paths = set(paths) & set([path.resolve() for path in args.paths])
- paths = [
- path for path in paths
- if path.relative_to(here).as_posix() not in EXCLUDE
- ]
- if args.check:
- check_cmake_format(paths)
- else:
- run_cmake_format(paths)