You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by uw...@apache.org on 2018/12/09 21:22:17 UTC

[arrow] branch master updated: ARROW-3963: [Packaging/Docker] Nightly test for building sphinx documentations

This is an automated email from the ASF dual-hosted git repository.

uwe pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow.git


The following commit(s) were added to refs/heads/master by this push:
     new cc24218  ARROW-3963: [Packaging/Docker] Nightly test for building sphinx documentations
cc24218 is described below

commit cc24218ed8a5abe0a8d35cb6fd7ef1a283384be1
Author: Krisztián Szűcs <sz...@gmail.com>
AuthorDate: Sun Dec 9 22:22:08 2018 +0100

    ARROW-3963: [Packaging/Docker] Nightly test for building sphinx documentations
    
    Test is here: https://github.com/kszucs/crossbow/branches/all?utf8=%E2%9C%93&query=docker-docs
    
    Author: Krisztián Szűcs <sz...@gmail.com>
    
    Closes #3130 from kszucs/ARROW-3963 and squashes the following commits:
    
    0b5be2cc <Krisztián Szűcs> add docker-docs to docker group
    1575909e <Krisztián Szűcs> path corrections
    51768fc0 <Krisztián Szűcs> use sphinx-build command instead of setup.py
    60635acc <Krisztián Szűcs> error msg
    a93fcad6 <Krisztián Szűcs> merge _as_type and ensure_type
    8d3d58fd <Krisztián Szűcs> nightly test for building cpp and python docs
---
 ci/docker_build_sphinx.sh        |  4 +---
 dev/tasks/tests.yml              | 15 +++++++++++++++
 docker-compose.yml               |  2 +-
 docs/Dockerfile                  |  1 +
 python/pyarrow/gandiva.pyx       | 20 +++++++++++++-------
 python/pyarrow/tests/test_csv.py |  2 +-
 python/pyarrow/types.pxi         | 21 ++++++++-------------
 python/pyarrow/types.py          |  3 +--
 8 files changed, 41 insertions(+), 27 deletions(-)

diff --git a/ci/docker_build_sphinx.sh b/ci/docker_build_sphinx.sh
index 9578043..4a65f81 100755
--- a/ci/docker_build_sphinx.sh
+++ b/ci/docker_build_sphinx.sh
@@ -22,9 +22,7 @@ pushd /arrow/cpp/apidoc
 doxygen
 popd
 
-pushd /arrow/python
-python setup.py build_sphinx -s ../docs/source --build-dir ../docs/_build
-popd
+sphinx-build -b html /arrow/docs/source /arrow/docs/_build/html
 
 mkdir -p /arrow/site/asf-site/docs/latest
 rsync -r /arrow/docs/_build/html/ /arrow/site/asf-site/docs/latest/
diff --git a/dev/tasks/tests.yml b/dev/tasks/tests.yml
index c158481..d51fa7e 100644
--- a/dev/tasks/tests.yml
+++ b/dev/tasks/tests.yml
@@ -31,6 +31,7 @@ groups:
     - docker-python-3.6-alpine
     - docker-java
     - docker-js
+    - docker-docs
     - docker-lint
     - docker-iwyu
     - docker-clang-format
@@ -174,6 +175,20 @@ tasks:
         - docker-compose build python-alpine
         - docker-compose run python-alpine
 
+  ###################### Documentation building tests #########################
+
+  docker-docs:
+    platform: linux
+    template: docker-tests/travis.linux.yml
+    params:
+      environment:
+        PYTHON_VERSION: 3.6
+      commands:
+        - docker-compose build cpp
+        - docker-compose build python
+        - docker-compose build docs
+        - docker-compose run docs
+
   ############################## Linter tests #################################
 
   docker-lint:
diff --git a/docker-compose.yml b/docker-compose.yml
index d6f1100..51f1a49 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -196,7 +196,7 @@ services:
     build:
       context: .
       dockerfile: docs/Dockerfile
-    volumes: *volumes
+    volumes: *ubuntu-volumes
 
   ######################### Integration Tests #################################
 
diff --git a/docs/Dockerfile b/docs/Dockerfile
index 4908110..31ad84e 100644
--- a/docs/Dockerfile
+++ b/docs/Dockerfile
@@ -21,6 +21,7 @@ ADD ci/conda_env_sphinx.yml /arrow/ci/
 RUN conda install -c conda-forge \
         --file arrow/ci/conda_env_sphinx.yml && \
     conda clean --all
+
 CMD arrow/ci/docker_build_cpp.sh && \
     arrow/ci/docker_build_python.sh && \
     arrow/ci/docker_build_sphinx.sh
diff --git a/python/pyarrow/gandiva.pyx b/python/pyarrow/gandiva.pyx
index 418d0d6..76e55d6 100644
--- a/python/pyarrow/gandiva.pyx
+++ b/python/pyarrow/gandiva.pyx
@@ -28,10 +28,9 @@ from libc.stdint cimport int64_t, int32_t, uint8_t, uintptr_t
 
 from pyarrow.includes.libarrow cimport *
 from pyarrow.compat import frombytes
-from pyarrow.types import _as_type
 from pyarrow.lib cimport (Array, DataType, Field, MemoryPool, RecordBatch,
                           Schema, check_status, pyarrow_wrap_array,
-                          pyarrow_wrap_data_type)
+                          pyarrow_wrap_data_type, ensure_type)
 
 from pyarrow.includes.libgandiva cimport (
     CCondition, CExpression,
@@ -173,8 +172,10 @@ cdef class Filter:
         return self
 
     def evaluate(self, RecordBatch batch, MemoryPool pool, dtype='int32'):
-        cdef shared_ptr[CSelectionVector] selection
-        cdef DataType type = _as_type(dtype)
+        cdef:
+            DataType type = ensure_type(dtype)
+            shared_ptr[CSelectionVector] selection
+
         if type.id == _Type_INT16:
             check_status(SelectionVector_MakeInt16(
                 batch.num_rows, pool.pool, &selection))
@@ -187,6 +188,7 @@ cdef class Filter:
         else:
             raise ValueError("'dtype' of the selection vector should be "
                              "one of 'int16', 'int32' and 'int64'.")
+
         check_status(self.filter.get().Evaluate(
             batch.sp_batch.get()[0], selection))
         return SelectionVector.create(selection)
@@ -195,8 +197,10 @@ cdef class Filter:
 cdef class TreeExprBuilder:
 
     def make_literal(self, value, dtype):
-        cdef shared_ptr[CNode] r
-        cdef DataType type = _as_type(dtype)
+        cdef:
+            DataType type = ensure_type(dtype)
+            shared_ptr[CNode] r
+
         if type.id == _Type_BOOL:
             r = TreeExprBuilder_MakeBoolLiteral(value)
         elif type.id == _Type_UINT8:
@@ -225,6 +229,7 @@ cdef class TreeExprBuilder:
             r = TreeExprBuilder_MakeBinaryLiteral(value)
         else:
             raise TypeError("Didn't recognize dtype " + str(dtype))
+
         return Node.create(r)
 
     def make_expression(self, Node root_node, Field return_field):
@@ -353,7 +358,8 @@ cdef class TreeExprBuilder:
         return Node.create(r)
 
     def make_in_expression(self, Node node, values, dtype):
-        cdef DataType type = _as_type(dtype)
+        cdef DataType type = ensure_type(dtype)
+
         if type.id == _Type_INT32:
             return self._make_in_expression_int32(node, values)
         elif type.id == _Type_INT64:
diff --git a/python/pyarrow/tests/test_csv.py b/python/pyarrow/tests/test_csv.py
index 115595b..c5816de 100644
--- a/python/pyarrow/tests/test_csv.py
+++ b/python/pyarrow/tests/test_csv.py
@@ -146,7 +146,7 @@ def test_convert_options():
     opts.column_types = [('x', pa.binary())]
     assert opts.column_types == {'x': pa.binary()}
 
-    with pytest.raises(TypeError, match='data type expected'):
+    with pytest.raises(TypeError, match='DataType expected'):
         opts.column_types = {'a': None}
     with pytest.raises(TypeError):
         opts.column_types = 0
diff --git a/python/pyarrow/types.pxi b/python/pyarrow/types.pxi
index d5d99e4..1ebd196 100644
--- a/python/pyarrow/types.pxi
+++ b/python/pyarrow/types.pxi
@@ -869,7 +869,7 @@ def field(name, type, bint nullable=True, dict metadata=None):
     cdef:
         shared_ptr[CKeyValueMetadata] c_meta
         Field result = Field.__new__(Field)
-        DataType _type = _as_type(type)
+        DataType _type = ensure_type(type, allow_none=False)
 
     if metadata is not None:
         convert_metadata(metadata, &c_meta)
@@ -1479,20 +1479,15 @@ def type_for_alias(name):
     return alias()
 
 
-def _as_type(typ):
-    if isinstance(typ, DataType):
-        return typ
-    elif isinstance(typ, six.string_types):
-        return type_for_alias(typ)
-    else:
-        raise TypeError("data type expected, got '%r'" % (type(typ),))
-
-
-cdef DataType ensure_type(object type, c_bool allow_none=False):
-    if allow_none and type is None:
+cdef DataType ensure_type(object ty, c_bool allow_none=False):
+    if allow_none and ty is None:
         return None
+    elif isinstance(ty, DataType):
+        return ty
+    elif isinstance(ty, six.string_types):
+        return type_for_alias(ty)
     else:
-        return _as_type(type)
+        raise TypeError('DataType expected, got {!r}'.format(type(ty)))
 
 
 def schema(fields, dict metadata=None):
diff --git a/python/pyarrow/types.py b/python/pyarrow/types.py
index d07dcca..2bd7027 100644
--- a/python/pyarrow/types.py
+++ b/python/pyarrow/types.py
@@ -19,8 +19,7 @@
 
 from pyarrow.lib import (is_boolean_value,  # noqa
                          is_integer_value,
-                         is_float_value,
-                         _as_type)
+                         is_float_value)
 
 import pyarrow.lib as lib