You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by ap...@apache.org on 2019/04/24 09:09:20 UTC
[arrow] branch master updated: ARROW-5201: [Python] handle
collections.abc deprecation warnings
This is an automated email from the ASF dual-hosted git repository.
apitrou pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow.git
The following commit(s) were added to refs/heads/master by this push:
new 813b4d5 ARROW-5201: [Python] handle collections.abc deprecation warnings
813b4d5 is described below
commit 813b4d512d232fdef7b83c4972c167f2bcdeb7e3
Author: Joris Van den Bossche <jo...@gmail.com>
AuthorDate: Wed Apr 24 11:09:11 2019 +0200
ARROW-5201: [Python] handle collections.abc deprecation warnings
https://issues.apache.org/jira/browse/ARROW-5201
Author: Joris Van den Bossche <jo...@gmail.com>
Closes #4187 from jorisvandenbossche/ARROW-5201-collections-abc and squashes the following commits:
352e93ff9 <Joris Van den Bossche> Merge remote-tracking branch 'upstream/master' into ARROW-5201-collections-abc
14dcec20c <Joris Van den Bossche> trigger appveyor
2b84c3e50 <Joris Van den Bossche> ARROW-5201: handle collections.abc deprecation warnings
---
python/pyarrow/_csv.pyx | 4 +---
python/pyarrow/_plasma.pyx | 3 +--
python/pyarrow/compat.py | 4 ++++
python/pyarrow/pandas_compat.py | 5 ++---
python/pyarrow/tests/test_array.py | 4 ++--
python/pyarrow/tests/test_table.py | 5 +++--
python/pyarrow/types.pxi | 8 +++++---
7 files changed, 18 insertions(+), 15 deletions(-)
diff --git a/python/pyarrow/_csv.pyx b/python/pyarrow/_csv.pyx
index 2932a2d..cfed987 100644
--- a/python/pyarrow/_csv.pyx
+++ b/python/pyarrow/_csv.pyx
@@ -27,9 +27,7 @@ from pyarrow.lib cimport (check_status, Field, MemoryPool, ensure_type,
pyarrow_wrap_table, pyarrow_wrap_data_type,
pyarrow_unwrap_data_type)
-from pyarrow.compat import frombytes, tobytes
-
-from collections import Mapping
+from pyarrow.compat import frombytes, tobytes, Mapping
cdef unsigned char _single_char(s) except 0:
diff --git a/python/pyarrow/_plasma.pyx b/python/pyarrow/_plasma.pyx
index 04963b6..5be724d 100644
--- a/python/pyarrow/_plasma.pyx
+++ b/python/pyarrow/_plasma.pyx
@@ -29,7 +29,6 @@ from libc.stdint cimport int64_t, uint8_t, uintptr_t
from cython.operator cimport dereference as deref, preincrement as inc
from cpython.pycapsule cimport *
-import collections
import random
import socket
import warnings
@@ -524,7 +523,7 @@ cdef class PlasmaClient:
the object_ids and ObjectNotAvailable if the object was not
available.
"""
- if isinstance(object_ids, collections.Sequence):
+ if isinstance(object_ids, compat.Sequence):
results = []
buffers = self.get_buffers(object_ids, timeout_ms)
for i in range(len(object_ids)):
diff --git a/python/pyarrow/compat.py b/python/pyarrow/compat.py
index 824e0b2..0549b16 100644
--- a/python/pyarrow/compat.py
+++ b/python/pyarrow/compat.py
@@ -45,6 +45,8 @@ if PY2:
except ImportError:
from decimal import Decimal
+ from collections import Iterable, Mapping, Sequence
+
unicode_type = unicode
file_type = file
lzip = zip
@@ -81,6 +83,8 @@ else:
except ImportError:
import pickle as builtin_pickle
+ from collections.abc import Iterable, Mapping, Sequence
+
unicode_type = str
file_type = None
def lzip(*x):
diff --git a/python/pyarrow/pandas_compat.py b/python/pyarrow/pandas_compat.py
index c4dd9d7..7712b89 100644
--- a/python/pyarrow/pandas_compat.py
+++ b/python/pyarrow/pandas_compat.py
@@ -16,7 +16,6 @@
# under the License.
import ast
-import collections
import json
import operator
import re
@@ -27,7 +26,7 @@ import six
import pyarrow as pa
from pyarrow.lib import _pandas_api
-from pyarrow.compat import (builtin_pickle, PY2, zip_longest) # noqa
+from pyarrow.compat import (builtin_pickle, PY2, zip_longest, Sequence) # noqa
_logical_type_map = {}
@@ -291,7 +290,7 @@ def _column_name_to_strings(name):
return name.decode('utf8')
elif isinstance(name, tuple):
return str(tuple(map(_column_name_to_strings, name)))
- elif isinstance(name, collections.Sequence):
+ elif isinstance(name, Sequence):
raise TypeError("Unsupported type for MultiIndex level")
elif name is None:
return None
diff --git a/python/pyarrow/tests/test_array.py b/python/pyarrow/tests/test_array.py
index ffbf7e3..476740d 100644
--- a/python/pyarrow/tests/test_array.py
+++ b/python/pyarrow/tests/test_array.py
@@ -16,7 +16,6 @@
# specific language governing permissions and limitations
# under the License.
-import collections
import datetime
import hypothesis as h
import hypothesis.strategies as st
@@ -34,6 +33,7 @@ except ImportError:
import pyarrow as pa
import pyarrow.tests.strategies as past
+from pyarrow import compat
def test_total_bytes_allocated():
@@ -255,7 +255,7 @@ def test_array_iter():
for i, j in zip(range(10), arr):
assert i == j
- assert isinstance(arr, collections.Iterable)
+ assert isinstance(arr, compat.Iterable)
def test_struct_array_slice():
diff --git a/python/pyarrow/tests/test_table.py b/python/pyarrow/tests/test_table.py
index 1605579..56c3592 100644
--- a/python/pyarrow/tests/test_table.py
+++ b/python/pyarrow/tests/test_table.py
@@ -15,13 +15,14 @@
# specific language governing permissions and limitations
# under the License.
-from collections import OrderedDict, Iterable
+from collections import OrderedDict
import pickle
import sys
import numpy as np
import pytest
import pyarrow as pa
+from pyarrow import compat
def test_chunked_array_basics():
@@ -100,7 +101,7 @@ def test_chunked_array_iter():
for i, j in zip(range(10), arr):
assert i == j
- assert isinstance(arr, Iterable)
+ assert isinstance(arr, compat.Iterable)
def test_chunked_array_equals():
diff --git a/python/pyarrow/types.pxi b/python/pyarrow/types.pxi
index 4b1f2f4..b71fc8c 100644
--- a/python/pyarrow/types.pxi
+++ b/python/pyarrow/types.pxi
@@ -15,10 +15,12 @@
# specific language governing permissions and limitations
# under the License.
-import collections
import re
import warnings
+from pyarrow import compat
+
+
# These are imprecise because the type (in pandas 0.x) depends on the presence
# of nulls
cdef dict _pandas_type_map = {
@@ -1486,7 +1488,7 @@ def struct(fields):
vector[shared_ptr[CField]] c_fields
cdef shared_ptr[CDataType] struct_type
- if isinstance(fields, collections.Mapping):
+ if isinstance(fields, compat.Mapping):
fields = fields.items()
for item in fields:
@@ -1656,7 +1658,7 @@ def schema(fields, metadata=None):
Field py_field
vector[shared_ptr[CField]] c_fields
- if isinstance(fields, collections.Mapping):
+ if isinstance(fields, compat.Mapping):
fields = fields.items()
for item in fields: