You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ignite.apache.org by is...@apache.org on 2018/10/15 10:29:52 UTC
[1/6] ignite git commit: IGNITE-7782 Python thin client
Repository: ignite
Updated Branches:
refs/heads/master eeb25e63b -> 7e547b139
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/queries/__init__.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/queries/__init__.py b/modules/platforms/python/pyignite/queries/__init__.py
new file mode 100644
index 0000000..f43d60e
--- /dev/null
+++ b/modules/platforms/python/pyignite/queries/__init__.py
@@ -0,0 +1,339 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This module is a source of some basic information about the binary protocol.
+
+Most importantly, it contains `Query` and `Response` base classes. They are
+used internally by :mod:`pyignite.api` module. They, in turn, based on
+:mod:`pyignite.datatypes` binary parser/generator classes.
+"""
+
+from collections import OrderedDict
+import ctypes
+from random import randint
+
+import attr
+
+from pyignite.api.result import APIResult
+from pyignite.constants import *
+from pyignite.datatypes import (
+ AnyDataObject, Bool, Int, Long, String, StringArray, Struct,
+)
+from .op_codes import *
+
+
+@attr.s
+class Response:
+ following = attr.ib(type=list, factory=list)
+ _response_header = None
+
+ def __attrs_post_init__(self):
+ # replace None with empty list
+ self.following = self.following or []
+
+ @classmethod
+ def build_header(cls):
+ if cls._response_header is None:
+ cls._response_header = type(
+ 'ResponseHeader',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('length', ctypes.c_int),
+ ('query_id', ctypes.c_long),
+ ('status_code', ctypes.c_int),
+ ],
+ },
+ )
+ return cls._response_header
+
+ def parse(self, client: 'Client'):
+ header_class = self.build_header()
+ buffer = client.recv(ctypes.sizeof(header_class))
+ header = header_class.from_buffer_copy(buffer)
+ fields = []
+
+ if header.status_code == OP_SUCCESS:
+ for name, ignite_type in self.following:
+ c_type, buffer_fragment = ignite_type.parse(client)
+ buffer += buffer_fragment
+ fields.append((name, c_type))
+ else:
+ c_type, buffer_fragment = String.parse(client)
+ buffer += buffer_fragment
+ fields.append(('error_message', c_type))
+
+ response_class = type(
+ 'Response',
+ (header_class,),
+ {
+ '_pack_': 1,
+ '_fields_': fields,
+ }
+ )
+ return response_class, buffer
+
+ def to_python(self, ctype_object, *args, **kwargs):
+ result = OrderedDict()
+
+ for name, c_type in self.following:
+ result[name] = c_type.to_python(
+ getattr(ctype_object, name),
+ *args, **kwargs
+ )
+
+ return result if result else None
+
+
+@attr.s
+class SQLResponse(Response):
+ """
+ The response class of SQL functions is special in the way the row-column
+ data is counted in it. Basically, Ignite thin client API is following a
+ “counter right before the counted objects” rule in most of its parts.
+ SQL ops are breaking this rule.
+ """
+ include_field_names = attr.ib(type=bool, default=False)
+ has_cursor = attr.ib(type=bool, default=False)
+
+ def fields_or_field_count(self):
+ if self.include_field_names:
+ return 'fields', StringArray
+ return 'field_count', Int
+
+ def parse(self, client: 'Client'):
+ header_class = self.build_header()
+ buffer = client.recv(ctypes.sizeof(header_class))
+ header = header_class.from_buffer_copy(buffer)
+ fields = []
+
+ if header.status_code == OP_SUCCESS:
+ following = [
+ self.fields_or_field_count(),
+ ('row_count', Int),
+ ]
+ if self.has_cursor:
+ following.insert(0, ('cursor', Long))
+ body_struct = Struct(following)
+ body_class, body_buffer = body_struct.parse(client)
+ body = body_class.from_buffer_copy(body_buffer)
+
+ if self.include_field_names:
+ field_count = body.fields.length
+ else:
+ field_count = body.field_count
+
+ data_fields = []
+ data_buffer = b''
+ for i in range(body.row_count):
+ row_fields = []
+ row_buffer = b''
+ for j in range(field_count):
+ field_class, field_buffer = AnyDataObject.parse(client)
+ row_fields.append(('column_{}'.format(j), field_class))
+ row_buffer += field_buffer
+
+ row_class = type(
+ 'SQLResponseRow',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': row_fields,
+ }
+ )
+ data_fields.append(('row_{}'.format(i), row_class))
+ data_buffer += row_buffer
+
+ data_class = type(
+ 'SQLResponseData',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': data_fields,
+ }
+ )
+ fields += body_class._fields_ + [
+ ('data', data_class),
+ ('more', ctypes.c_bool),
+ ]
+ buffer += body_buffer + data_buffer
+ else:
+ c_type, buffer_fragment = String.parse(client)
+ buffer += buffer_fragment
+ fields.append(('error_message', c_type))
+
+ final_class = type(
+ 'SQLResponse',
+ (header_class,),
+ {
+ '_pack_': 1,
+ '_fields_': fields,
+ }
+ )
+ buffer += client.recv(ctypes.sizeof(final_class) - len(buffer))
+ return final_class, buffer
+
+ def to_python(self, ctype_object, *args, **kwargs):
+ if ctype_object.status_code == 0:
+ result = {
+ 'more': Bool.to_python(
+ ctype_object.more, *args, **kwargs
+ ),
+ 'data': [],
+ }
+ if hasattr(ctype_object, 'fields'):
+ result['fields'] = StringArray.to_python(
+ ctype_object.fields, *args, **kwargs
+ )
+ else:
+ result['field_count'] = Int.to_python(
+ ctype_object.field_count, *args, **kwargs
+ )
+ if hasattr(ctype_object, 'cursor'):
+ result['cursor'] = Long.to_python(
+ ctype_object.cursor, *args, **kwargs
+ )
+ for row_item in ctype_object.data._fields_:
+ row_name = row_item[0]
+ row_object = getattr(ctype_object.data, row_name)
+ row = []
+ for col_item in row_object._fields_:
+ col_name = col_item[0]
+ col_object = getattr(row_object, col_name)
+ row.append(
+ AnyDataObject.to_python(col_object, *args, **kwargs)
+ )
+ result['data'].append(row)
+ return result
+
+
+@attr.s
+class Query:
+ op_code = attr.ib(type=int)
+ following = attr.ib(type=list, factory=list)
+ query_id = attr.ib(type=int, default=None)
+ _query_c_type = None
+
+ @classmethod
+ def build_c_type(cls):
+ if cls._query_c_type is None:
+ cls._query_c_type = type(
+ cls.__name__,
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('length', ctypes.c_int),
+ ('op_code', ctypes.c_short),
+ ('query_id', ctypes.c_long),
+ ],
+ },
+ )
+ return cls._query_c_type
+
+ def from_python(self, values: dict=None):
+ if values is None:
+ values = {}
+ buffer = b''
+
+ header_class = self.build_c_type()
+ header = header_class()
+ header.op_code = self.op_code
+ if self.query_id is None:
+ header.query_id = randint(MIN_LONG, MAX_LONG)
+
+ for name, c_type in self.following:
+ buffer += c_type.from_python(values[name])
+
+ header.length = (
+ len(buffer)
+ + ctypes.sizeof(header_class)
+ - ctypes.sizeof(ctypes.c_int)
+ )
+ return header.query_id, bytes(header) + buffer
+
+ def perform(
+ self, conn: 'Connection', query_params: dict=None,
+ response_config: list=None,
+ ) -> APIResult:
+ """
+ Perform query and process result.
+
+ :param conn: connection to Ignite server,
+ :param query_params: (optional) dict of named query parameters.
+ Defaults to no parameters,
+ :param response_config: (optional) response configuration − list of
+ (name, type_hint) tuples. Defaults to empty return value,
+ :return: instance of :class:`~pyignite.api.result.APIResult` with raw
+ value (may undergo further processing in API functions).
+ """
+ _, send_buffer = self.from_python(query_params)
+ conn.send(send_buffer)
+ response_struct = Response(response_config)
+ response_ctype, recv_buffer = response_struct.parse(conn)
+ response = response_ctype.from_buffer_copy(recv_buffer)
+ result = APIResult(response)
+ if result.status == 0:
+ result.value = response_struct.to_python(response)
+ return result
+
+
+class ConfigQuery(Query):
+ """
+ This is a special query, used for creating caches with configuration.
+ """
+ _query_c_type = None
+
+ @classmethod
+ def build_c_type(cls):
+ if cls._query_c_type is None:
+ cls._query_c_type = type(
+ cls.__name__,
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('length', ctypes.c_int),
+ ('op_code', ctypes.c_short),
+ ('query_id', ctypes.c_long),
+ ('config_length', ctypes.c_int),
+ ],
+ },
+ )
+ return cls._query_c_type
+
+ def from_python(self, values: dict = None):
+ if values is None:
+ values = {}
+ buffer = b''
+
+ header_class = self.build_c_type()
+ header = header_class()
+ header.op_code = self.op_code
+ if self.query_id is None:
+ header.query_id = randint(MIN_LONG, MAX_LONG)
+
+ for name, c_type in self.following:
+ buffer += c_type.from_python(values[name])
+
+ header.length = (
+ len(buffer)
+ + ctypes.sizeof(header_class)
+ - ctypes.sizeof(ctypes.c_int)
+ )
+ header.config_length = header.length - ctypes.sizeof(header_class)
+ return header.query_id, bytes(header) + buffer
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/queries/op_codes.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/queries/op_codes.py b/modules/platforms/python/pyignite/queries/op_codes.py
new file mode 100644
index 0000000..1396e83
--- /dev/null
+++ b/modules/platforms/python/pyignite/queries/op_codes.py
@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Named constants that represents request operation codes. These are the way
+of telling Ignite server what one want to do in their request.
+"""
+
+OP_SUCCESS = 0
+
+OP_RESOURCE_CLOSE = 0
+
+OP_CACHE_GET = 1000
+OP_CACHE_PUT = 1001
+OP_CACHE_PUT_IF_ABSENT = 1002
+OP_CACHE_GET_ALL = 1003
+OP_CACHE_PUT_ALL = 1004
+OP_CACHE_GET_AND_PUT = 1005
+OP_CACHE_GET_AND_REPLACE = 1006
+OP_CACHE_GET_AND_REMOVE = 1007
+OP_CACHE_GET_AND_PUT_IF_ABSENT = 1008
+OP_CACHE_REPLACE = 1009
+OP_CACHE_REPLACE_IF_EQUALS = 1010
+OP_CACHE_CONTAINS_KEY = 1011
+OP_CACHE_CONTAINS_KEYS = 1012
+OP_CACHE_CLEAR = 1013
+OP_CACHE_CLEAR_KEY = 1014
+OP_CACHE_CLEAR_KEYS = 1015
+OP_CACHE_REMOVE_KEY = 1016
+OP_CACHE_REMOVE_IF_EQUALS = 1017
+OP_CACHE_REMOVE_KEYS = 1018
+OP_CACHE_REMOVE_ALL = 1019
+OP_CACHE_GET_SIZE = 1020
+
+OP_CACHE_GET_NAMES = 1050
+OP_CACHE_CREATE_WITH_NAME = 1051
+OP_CACHE_GET_OR_CREATE_WITH_NAME = 1052
+OP_CACHE_CREATE_WITH_CONFIGURATION = 1053
+OP_CACHE_GET_OR_CREATE_WITH_CONFIGURATION = 1054
+OP_CACHE_GET_CONFIGURATION = 1055
+OP_CACHE_DESTROY = 1056
+
+OP_QUERY_SCAN = 2000
+OP_QUERY_SCAN_CURSOR_GET_PAGE = 2001
+OP_QUERY_SQL = 2002
+OP_QUERY_SQL_CURSOR_GET_PAGE = 2003
+OP_QUERY_SQL_FIELDS = 2004
+OP_QUERY_SQL_FIELDS_CURSOR_GET_PAGE = 2005
+
+P_GET_BINARY_TYPE_NAME = 3000
+OP_REGISTER_BINARY_TYPE_NAME = 3001
+OP_GET_BINARY_TYPE = 3002
+OP_PUT_BINARY_TYPE = 3003
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/utils.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/utils.py b/modules/platforms/python/pyignite/utils.py
new file mode 100644
index 0000000..a08bc9b
--- /dev/null
+++ b/modules/platforms/python/pyignite/utils.py
@@ -0,0 +1,168 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from functools import wraps
+from typing import Any, Type, Union
+
+from .constants import *
+
+
+def is_iterable(value):
+ """ Check if value is iterable. """
+ try:
+ iter(value)
+ return True
+ except TypeError:
+ return False
+
+
+def is_binary(value):
+ """
+ Check if a value is a pythonic representation of a Complex object.
+ """
+ return all([
+ hasattr(value, 'type_name'),
+ hasattr(value, 'type_id'),
+ hasattr(value, 'schema'),
+ hasattr(value, 'schema_id'),
+ ])
+
+
+def is_hinted(value):
+ """
+ Check if a value is a tuple of data item and its type hint.
+ """
+ return (
+ isinstance(value, tuple)
+ and len(value) == 2
+ and isinstance(value[1], object)
+ )
+
+
+def is_wrapped(value: Any) -> bool:
+ return (
+ type(value) is tuple
+ and len(value) == 2
+ and type(value[0]) is bytes
+ and type(value[1]) is int
+ )
+
+
+def int_overflow(value: int) -> int:
+ """
+ Simulates 32bit integer overflow.
+ """
+ return ((value ^ 0x80000000) & 0xffffffff) - 0x80000000
+
+
+def unwrap_binary(client: 'Client', wrapped: tuple):
+ """
+ Unwrap wrapped BinaryObject and convert it to Python data.
+
+ :param client: connection to Ignite cluster,
+ :param wrapped: `WrappedDataObject` value,
+ :return: dict representing wrapped BinaryObject.
+ """
+ from pyignite.datatypes.complex import BinaryObject
+
+ blob, offset = wrapped
+ client_clone = client.clone(prefetch=blob)
+ client_clone.pos = offset
+ data_class, data_bytes = BinaryObject.parse(client_clone)
+ return BinaryObject.to_python(
+ data_class.from_buffer_copy(data_bytes),
+ client,
+ )
+
+
+def hashcode(string: Union[str, bytes]) -> int:
+ """
+ Calculate hash code used for identifying objects in Ignite binary API.
+
+ :param string: UTF-8-encoded string identifier of binary buffer,
+ :return: hash code.
+ """
+ result = 0
+ for char in string:
+ try:
+ char = ord(char)
+ except TypeError:
+ pass
+ result = int_overflow(31 * result + char)
+ return result
+
+
+def cache_id(cache: Union[str, int]) -> int:
+ """
+ Create a cache ID from cache name.
+
+ :param cache: cache name or ID,
+ :return: cache ID.
+ """
+ return cache if type(cache) is int else hashcode(cache)
+
+
+def entity_id(cache: Union[str, int]) -> int:
+ """
+ Create a type ID from type name or field ID from field name.
+
+ :param cache: entity name or ID,
+ :return: entity ID.
+ """
+ return cache if type(cache) is int else hashcode(cache.lower())
+
+
+def schema_id(schema: Union[int, dict]) -> int:
+ """
+ Calculate Complex Object schema ID.
+
+ :param schema: a dict of field names: field types,
+ :return: schema ID.
+ """
+ if type(schema) is int:
+ return schema
+ if schema is None:
+ return 0
+ s_id = FNV1_OFFSET_BASIS if schema else 0
+ for field_name in schema.keys():
+ field_id = entity_id(field_name)
+ s_id ^= (field_id & 0xff)
+ s_id = int_overflow(s_id * FNV1_PRIME)
+ s_id ^= ((field_id >> 8) & 0xff)
+ s_id = int_overflow(s_id * FNV1_PRIME)
+ s_id ^= ((field_id >> 16) & 0xff)
+ s_id = int_overflow(s_id * FNV1_PRIME)
+ s_id ^= ((field_id >> 24) & 0xff)
+ s_id = int_overflow(s_id * FNV1_PRIME)
+ return s_id
+
+
+def status_to_exception(exc: Type[Exception]):
+ """
+ Converts erroneous status code with error message to an exception
+ of the given class.
+
+ :param exc: the class of exception to raise,
+ :return: decorator.
+ """
+ def ste_decorator(fn):
+ @wraps(fn)
+ def ste_wrapper(*args, **kwargs):
+ result = fn(*args, **kwargs)
+ if result.status != 0:
+ raise exc(result.message)
+ return result.value
+ return ste_wrapper
+ return ste_decorator
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/requirements/docs.txt
----------------------------------------------------------------------
diff --git a/modules/platforms/python/requirements/docs.txt b/modules/platforms/python/requirements/docs.txt
new file mode 100644
index 0000000..75ab231
--- /dev/null
+++ b/modules/platforms/python/requirements/docs.txt
@@ -0,0 +1,6 @@
+# these packages are required for documentation building
+# (look up the prebuilt docs in `docs/generated`)
+
+-r install.txt
+Sphinx==1.7.5
+sphinxcontrib-fulltoc==1.2.0
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/requirements/install.txt
----------------------------------------------------------------------
diff --git a/modules/platforms/python/requirements/install.txt b/modules/platforms/python/requirements/install.txt
new file mode 100644
index 0000000..9b87ae8
--- /dev/null
+++ b/modules/platforms/python/requirements/install.txt
@@ -0,0 +1,4 @@
+# these pip packages are necessary for the pyignite to run
+
+typing==3.6.6; python_version<'3.5'
+attrs==18.1.0
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/requirements/setup.txt
----------------------------------------------------------------------
diff --git a/modules/platforms/python/requirements/setup.txt b/modules/platforms/python/requirements/setup.txt
new file mode 100644
index 0000000..7c55f83
--- /dev/null
+++ b/modules/platforms/python/requirements/setup.txt
@@ -0,0 +1,3 @@
+# additional package for integrating pytest in setuptools
+
+pytest-runner==4.2
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/requirements/tests.txt
----------------------------------------------------------------------
diff --git a/modules/platforms/python/requirements/tests.txt b/modules/platforms/python/requirements/tests.txt
new file mode 100644
index 0000000..c107c8b
--- /dev/null
+++ b/modules/platforms/python/requirements/tests.txt
@@ -0,0 +1,5 @@
+# these packages are used for testing
+
+pytest==3.6.1
+pytest-cov==2.5.1
+teamcity-messages==1.21
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/setup.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/setup.py b/modules/platforms/python/setup.py
new file mode 100644
index 0000000..403b170
--- /dev/null
+++ b/modules/platforms/python/setup.py
@@ -0,0 +1,100 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import defaultdict
+import setuptools
+import sys
+
+
+PYTHON_REQUIRED = (3, 4)
+PYTHON_INSTALLED = sys.version_info[:2]
+
+if PYTHON_INSTALLED < PYTHON_REQUIRED:
+ sys.stderr.write('''
+
+`pyignite` is not compatible with Python {}.{}!
+Use Python {}.{} or above.
+
+
+'''.format(
+ PYTHON_INSTALLED[0],
+ PYTHON_INSTALLED[1],
+ PYTHON_REQUIRED[0],
+ PYTHON_REQUIRED[1],
+ )
+ )
+ sys.exit(1)
+
+
+def is_a_requirement(line):
+ return not any([
+ line.startswith('#'),
+ line.startswith('-r'),
+ len(line) == 0,
+ ])
+
+
+requirement_sections = [
+ 'install',
+ 'setup',
+ 'tests',
+ 'docs',
+]
+requirements = defaultdict(list)
+
+for section in requirement_sections:
+ with open('requirements/{}.txt'.format(section), 'r') as requirements_file:
+ for line in requirements_file.readlines():
+ line = line.strip('\n')
+ if is_a_requirement(line):
+ requirements[section].append(line)
+
+with open('README.md', 'r') as readme_file:
+ long_description = readme_file.read()
+
+setuptools.setup(
+ name='pyignite',
+ version='0.3.1',
+ python_requires='>={}.{}'.format(*PYTHON_REQUIRED),
+ author='Dmitry Melnichuk',
+ author_email='dmitry.melnichuk@nobitlost.com',
+ description='Apache Ignite binary client Python API',
+ long_description=long_description,
+ long_description_content_type='text/markdown',
+ url=(
+ 'https://github.com/apache/ignite/tree/master'
+ '/modules/platforms/python'
+ ),
+ packages=setuptools.find_packages(),
+ install_requires=requirements['install'],
+ tests_require=requirements['tests'],
+ setup_requires=requirements['setup'],
+ extras_require={
+ 'docs': requirements['docs'],
+ },
+ classifiers=[
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3 :: Only',
+ 'Intended Audience :: Developers',
+ 'Topic :: Database :: Front-Ends',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Operating System :: OS Independent',
+ ],
+)
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/tests/conftest.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/tests/conftest.py b/modules/platforms/python/tests/conftest.py
new file mode 100644
index 0000000..be6d029
--- /dev/null
+++ b/modules/platforms/python/tests/conftest.py
@@ -0,0 +1,218 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+from distutils.util import strtobool
+import ssl
+
+import pytest
+
+from pyignite import Client
+from pyignite.constants import *
+from pyignite.api import cache_create, cache_get_names, cache_destroy
+
+
+class UseSSLParser(argparse.Action):
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ values = True if values is None else bool(strtobool(values))
+ setattr(namespace, self.dest, values)
+
+
+class CertReqsParser(argparse.Action):
+ conv_map = {
+ 'NONE': ssl.CERT_NONE,
+ 'OPTIONAL': ssl.CERT_OPTIONAL,
+ 'REQUIRED': ssl.CERT_REQUIRED,
+ }
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ value = values.upper()
+ if value in self.conv_map:
+ setattr(namespace, self.dest, self.conv_map[value])
+ else:
+ raise ValueError(
+ 'Undefined argument: --ssl-cert-reqs={}'.format(value)
+ )
+
+
+class SSLVersionParser(argparse.Action):
+ conv_map = {
+ 'TLSV1_1': ssl.PROTOCOL_TLSv1_1,
+ 'TLSV1_2': ssl.PROTOCOL_TLSv1_2,
+ }
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ value = values.upper()
+ if value in self.conv_map:
+ setattr(namespace, self.dest, self.conv_map[value])
+ else:
+ raise ValueError(
+ 'Undefined argument: --ssl-version={}'.format(value)
+ )
+
+
+@pytest.fixture(scope='module')
+def client(
+ ignite_host, ignite_port, timeout, use_ssl, ssl_keyfile, ssl_certfile,
+ ssl_ca_certfile, ssl_cert_reqs, ssl_ciphers, ssl_version,
+ username, password,
+):
+ client = Client(
+ timeout=timeout,
+ use_ssl=use_ssl,
+ ssl_keyfile=ssl_keyfile,
+ ssl_certfile=ssl_certfile,
+ ssl_ca_certfile=ssl_ca_certfile,
+ ssl_cert_reqs=ssl_cert_reqs,
+ ssl_ciphers=ssl_ciphers,
+ ssl_version=ssl_version,
+ username=username,
+ password=password,
+ )
+ client.connect(ignite_host, ignite_port)
+ yield client
+ for cache_name in cache_get_names(client).value:
+ cache_destroy(client, cache_name)
+ client.close()
+
+
+@pytest.fixture
+def cache(client):
+ cache_name = 'my_bucket'
+ cache_create(client, cache_name)
+ yield cache_name
+ cache_destroy(client, cache_name)
+
+
+def pytest_addoption(parser):
+ parser.addoption(
+ '--ignite-host',
+ action='append',
+ default=[IGNITE_DEFAULT_HOST],
+ help='Ignite binary protocol test server host (default: localhost)'
+ )
+ parser.addoption(
+ '--ignite-port',
+ action='append',
+ default=[IGNITE_DEFAULT_PORT],
+ type=int,
+ help='Ignite binary protocol test server port (default: 10800)'
+ )
+ parser.addoption(
+ '--timeout',
+ action='store',
+ type=float,
+ default=None,
+ help=(
+ 'Timeout (in seconds) for each socket operation. Can accept '
+ 'integer or float value. Default is None'
+ )
+ )
+ parser.addoption(
+ '--use-ssl',
+ action=UseSSLParser,
+ nargs='?',
+ default=False,
+ help='Use SSL encryption'
+ )
+ parser.addoption(
+ '--ssl-keyfile',
+ action='store',
+ default=None,
+ type=str,
+ help='a path to SSL key file to identify local party'
+ )
+ parser.addoption(
+ '--ssl-certfile',
+ action='store',
+ default=None,
+ type=str,
+ help='a path to ssl certificate file to identify local party'
+ )
+ parser.addoption(
+ '--ssl-ca-certfile',
+ action='store',
+ default=None,
+ type=str,
+ help='a path to a trusted certificate or a certificate chain'
+ )
+ parser.addoption(
+ '--ssl-cert-reqs',
+ action=CertReqsParser,
+ default=ssl.CERT_NONE,
+ help=(
+ 'determines how the remote side certificate is treated: '
+ 'NONE (ignore, default), '
+ 'OPTIONAL (validate, if provided) or '
+ 'REQUIRED (valid remote certificate is required)'
+ )
+ )
+ parser.addoption(
+ '--ssl-ciphers',
+ action='store',
+ default=SSL_DEFAULT_CIPHERS,
+ type=str,
+ help='ciphers to use'
+ )
+ parser.addoption(
+ '--ssl-version',
+ action=SSLVersionParser,
+ default=SSL_DEFAULT_VERSION,
+ help='SSL version: TLSV1_1 or TLSV1_2'
+ )
+ parser.addoption(
+ '--username',
+ action='store',
+ type=str,
+ help='user name'
+ )
+ parser.addoption(
+ '--password',
+ action='store',
+ type=str,
+ help='password'
+ )
+ parser.addoption(
+ '--examples',
+ action='store_true',
+ help='check if examples can be run',
+ )
+
+
+def pytest_generate_tests(metafunc):
+ session_parameters = {
+ 'ignite_host': IGNITE_DEFAULT_HOST,
+ 'ignite_port': IGNITE_DEFAULT_PORT,
+ 'timeout': None,
+ 'use_ssl': False,
+ 'ssl_keyfile': None,
+ 'ssl_certfile': None,
+ 'ssl_ca_certfile': None,
+ 'ssl_cert_reqs': ssl.CERT_NONE,
+ 'ssl_ciphers': SSL_DEFAULT_CIPHERS,
+ 'ssl_version': SSL_DEFAULT_VERSION,
+ 'username': None,
+ 'password': None,
+ }
+
+ for param_name in session_parameters:
+ if param_name in metafunc.fixturenames:
+ param = metafunc.config.getoption(param_name)
+ if param is None:
+ param = session_parameters[param_name]
+ if type(param) is not list:
+ param = [param]
+ metafunc.parametrize(param_name, param, scope='session')
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/tests/test_binary.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/tests/test_binary.py b/modules/platforms/python/tests/test_binary.py
new file mode 100644
index 0000000..f6c48e9
--- /dev/null
+++ b/modules/platforms/python/tests/test_binary.py
@@ -0,0 +1,281 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import OrderedDict
+from decimal import Decimal
+
+from pyignite import GenericObjectMeta
+from pyignite.datatypes import (
+ BinaryObject, BoolObject, IntObject, DecimalObject, LongObject, String,
+)
+from pyignite.datatypes.prop_codes import *
+
+
+insert_data = [
+ [1, True, 'asdf', 42, Decimal('2.4')],
+ [2, False, 'zxcvb', 43, Decimal('2.5')],
+ [3, True, 'qwerty', 44, Decimal('2.6')],
+]
+
+page_size = 100
+
+scheme_name = 'PUBLIC'
+
+table_sql_name = 'AllDataType'
+table_cache_name = 'SQL_{}_{}'.format(
+ scheme_name,
+ table_sql_name.upper(),
+)
+
+create_query = '''
+CREATE TABLE {} (
+ test_pk INTEGER(11) PRIMARY KEY,
+ test_bool BOOLEAN,
+ test_str VARCHAR(24),
+ test_int INTEGER(11),
+ test_decimal DECIMAL(11, 5),
+)
+'''.format(table_sql_name)
+
+insert_query = '''
+INSERT INTO {} (
+ test_pk, test_bool, test_str, test_int, test_decimal,
+) VALUES (?, ?, ?, ?, ?)'''.format(table_sql_name)
+
+select_query = '''SELECT * FROM {}'''.format(table_sql_name)
+
+drop_query = 'DROP TABLE {} IF EXISTS'.format(table_sql_name)
+
+
+def test_sql_read_as_binary(client):
+
+ client.sql(drop_query)
+
+ # create table
+ client.sql(create_query)
+
+ # insert some rows
+ for line in insert_data:
+ client.sql(insert_query, query_args=line)
+
+ table_cache = client.get_cache(table_cache_name)
+ result = table_cache.scan()
+
+ # convert Binary object fields' values to a tuple
+ # to compare it with the initial data
+ for key, value in result:
+ assert key in {x[0] for x in insert_data}
+ assert (
+ value.TEST_BOOL,
+ value.TEST_STR,
+ value.TEST_INT,
+ value.TEST_DECIMAL
+ ) in {tuple(x[1:]) for x in insert_data}
+
+ client.sql(drop_query)
+
+
+def test_sql_write_as_binary(client):
+
+ client.get_or_create_cache(scheme_name)
+
+ # configure cache as an SQL table
+ type_name = table_cache_name
+
+ # register binary type
+ class AllDataType(
+ metaclass=GenericObjectMeta,
+ type_name=type_name,
+ schema=OrderedDict([
+ ('TEST_BOOL', BoolObject),
+ ('TEST_STR', String),
+ ('TEST_INT', IntObject),
+ ('TEST_DECIMAL', DecimalObject),
+ ]),
+ ):
+ pass
+
+ table_cache = client.get_or_create_cache({
+ PROP_NAME: table_cache_name,
+ PROP_SQL_SCHEMA: scheme_name,
+ PROP_QUERY_ENTITIES: [
+ {
+ 'table_name': table_sql_name.upper(),
+ 'key_field_name': 'TEST_PK',
+ 'key_type_name': 'java.lang.Integer',
+ 'field_name_aliases': [],
+ 'query_fields': [
+ {
+ 'name': 'TEST_PK',
+ 'type_name': 'java.lang.Integer',
+ 'is_key_field': True,
+ 'is_notnull_constraint_field': True,
+ },
+ {
+ 'name': 'TEST_BOOL',
+ 'type_name': 'java.lang.Boolean',
+ },
+ {
+ 'name': 'TEST_STR',
+ 'type_name': 'java.lang.String',
+ },
+ {
+ 'name': 'TEST_INT',
+ 'type_name': 'java.lang.Integer',
+ },
+ {
+ 'name': 'TEST_DECIMAL',
+ 'type_name': 'java.math.BigDecimal',
+ 'default_value': Decimal('0.00'),
+ 'precision': 11,
+ 'scale': 2,
+ },
+ ],
+ 'query_indexes': [],
+ 'value_type_name': type_name,
+ 'value_field_name': None,
+ },
+ ],
+ })
+ table_settings = table_cache.settings
+ assert table_settings, 'SQL table cache settings are empty'
+
+ # insert rows as k-v
+ for row in insert_data:
+ value = AllDataType()
+ (
+ value.TEST_BOOL,
+ value.TEST_STR,
+ value.TEST_INT,
+ value.TEST_DECIMAL,
+ ) = row[1:]
+ table_cache.put(row[0], value, key_hint=IntObject)
+
+ data = table_cache.scan()
+ assert len(list(data)) == len(insert_data), (
+ 'Not all data was read as key-value'
+ )
+
+ # read rows as SQL
+ data = client.sql(select_query, include_field_names=True)
+
+ header_row = next(data)
+ for field_name in AllDataType.schema.keys():
+ assert field_name in header_row, 'Not all field names in header row'
+
+ data = list(data)
+ assert len(data) == len(insert_data), 'Not all data was read as SQL rows'
+
+ # cleanup
+ table_cache.destroy()
+
+
+def test_nested_binary_objects(client):
+
+ nested_cache = client.get_or_create_cache('nested_binary')
+
+ class InnerType(
+ metaclass=GenericObjectMeta,
+ schema=OrderedDict([
+ ('inner_int', LongObject),
+ ('inner_str', String),
+ ]),
+ ):
+ pass
+
+ class OuterType(
+ metaclass=GenericObjectMeta,
+ schema=OrderedDict([
+ ('outer_int', LongObject),
+ ('nested_binary', BinaryObject),
+ ('outer_str', String),
+ ]),
+ ):
+ pass
+
+ inner = InnerType(inner_int=42, inner_str='This is a test string')
+
+ outer = OuterType(
+ outer_int=43,
+ nested_binary=inner,
+ outer_str='This is another test string'
+ )
+
+ nested_cache.put(1, outer)
+
+ result = nested_cache.get(1)
+ assert result.outer_int == 43
+ assert result.outer_str == 'This is another test string'
+ assert result.nested_binary.inner_int == 42
+ assert result.nested_binary.inner_str == 'This is a test string'
+
+ nested_cache.destroy()
+
+
+def test_add_schema_to_binary_object(client):
+
+ migrate_cache = client.create_cache('migrate_binary')
+
+ class MyBinaryType(
+ metaclass=GenericObjectMeta,
+ schema=OrderedDict([
+ ('test_str', String),
+ ('test_int', LongObject),
+ ('test_bool', BoolObject),
+ ]),
+ ):
+ pass
+
+ binary_object = MyBinaryType(
+ test_str='Test string',
+ test_int=42,
+ test_bool=True,
+ )
+ migrate_cache.put(1, binary_object)
+
+ result = migrate_cache.get(1)
+ assert result.test_str == 'Test string'
+ assert result.test_int == 42
+ assert result.test_bool is True
+
+ modified_schema = MyBinaryType.schema.copy()
+ modified_schema['test_decimal'] = DecimalObject
+ del modified_schema['test_bool']
+
+ class MyBinaryTypeV2(
+ metaclass=GenericObjectMeta,
+ type_name='MyBinaryType',
+ schema=modified_schema,
+ ):
+ pass
+
+ assert MyBinaryType.type_id == MyBinaryTypeV2.type_id
+ assert MyBinaryType.schema_id != MyBinaryTypeV2.schema_id
+
+ binary_object_v2 = MyBinaryTypeV2(
+ test_str='Another test',
+ test_int=43,
+ test_decimal=Decimal('2.34')
+ )
+
+ migrate_cache.put(2, binary_object_v2)
+
+ result = migrate_cache.get(2)
+ assert result.test_str == 'Another test'
+ assert result.test_int == 43
+ assert result.test_decimal == Decimal('2.34')
+ assert not hasattr(result, 'test_bool')
+
+ migrate_cache.destroy()
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/tests/test_cache_class.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/tests/test_cache_class.py b/modules/platforms/python/tests/test_cache_class.py
new file mode 100644
index 0000000..22865be
--- /dev/null
+++ b/modules/platforms/python/tests/test_cache_class.py
@@ -0,0 +1,221 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import OrderedDict
+from decimal import Decimal
+
+import pytest
+
+from pyignite import GenericObjectMeta
+from pyignite.datatypes import (
+ BoolObject, DecimalObject, FloatObject, IntObject, String,
+)
+from pyignite.datatypes.prop_codes import *
+from pyignite.exceptions import CacheError
+
+
+def test_cache_create(client):
+ cache = client.get_or_create_cache('my_oop_cache')
+ assert cache.name == cache.settings[PROP_NAME] == 'my_oop_cache'
+ cache.destroy()
+
+
+def test_cache_remove(client):
+ cache = client.get_or_create_cache('my_cache')
+ cache.clear()
+ assert cache.get_size() == 0
+
+ cache.put_all({
+ 'key_1': 1,
+ 'key_2': 2,
+ 'key_3': 3,
+ 'key_4': 4,
+ 'key_5': 5,
+ })
+ assert cache.get_size() == 5
+
+ result = cache.remove_if_equals('key_1', 42)
+ assert result is False
+ assert cache.get_size() == 5
+
+ result = cache.remove_if_equals('key_1', 1)
+ assert result is True
+ assert cache.get_size() == 4
+
+ cache.remove_keys(['key_1', 'key_3', 'key_5', 'key_7'])
+ assert cache.get_size() == 2
+
+ cache.remove_all()
+ assert cache.get_size() == 0
+
+
+def test_cache_get(client):
+ client.get_or_create_cache('my_cache')
+
+ my_cache = client.get_cache('my_cache')
+ assert my_cache.settings[PROP_NAME] == 'my_cache'
+ my_cache.destroy()
+
+ error = None
+
+ my_cache = client.get_cache('my_cache')
+ try:
+ _ = my_cache.settings[PROP_NAME]
+ except CacheError as e:
+ error = e
+
+ assert type(error) is CacheError
+
+
+def test_cache_config(client):
+ cache_config = {
+ PROP_NAME: 'my_oop_cache',
+ PROP_CACHE_KEY_CONFIGURATION: [
+ {
+ 'type_name': 'blah',
+ 'affinity_key_field_name': 'abc1234',
+ },
+ ],
+ }
+ client.create_cache(cache_config)
+
+ cache = client.get_or_create_cache('my_oop_cache')
+ assert cache.name == cache_config[PROP_NAME]
+ assert (
+ cache.settings[PROP_CACHE_KEY_CONFIGURATION]
+ == cache_config[PROP_CACHE_KEY_CONFIGURATION]
+ )
+
+ cache.destroy()
+
+
+def test_cache_get_put(client):
+ cache = client.get_or_create_cache('my_oop_cache')
+ cache.put('my_key', 42)
+ result = cache.get('my_key')
+ assert result, 42
+ cache.destroy()
+
+
+def test_cache_binary_get_put(client):
+
+ class TestBinaryType(
+ metaclass=GenericObjectMeta,
+ schema=OrderedDict([
+ ('test_bool', BoolObject),
+ ('test_str', String),
+ ('test_int', IntObject),
+ ('test_decimal', DecimalObject),
+ ]),
+ ):
+ pass
+
+ cache = client.create_cache('my_oop_cache')
+
+ my_value = TestBinaryType(
+ test_bool=True,
+ test_str='This is a test',
+ test_int=42,
+ test_decimal=Decimal('34.56'),
+ )
+ cache.put('my_key', my_value)
+
+ value = cache.get('my_key')
+ assert value.test_bool is True
+ assert value.test_str == 'This is a test'
+ assert value.test_int == 42
+ assert value.test_decimal == Decimal('34.56')
+
+ cache.destroy()
+
+
+def test_get_binary_type(client):
+ client.put_binary_type(
+ 'TestBinaryType',
+ schema=OrderedDict([
+ ('TEST_BOOL', BoolObject),
+ ('TEST_STR', String),
+ ('TEST_INT', IntObject),
+ ])
+ )
+ client.put_binary_type(
+ 'TestBinaryType',
+ schema=OrderedDict([
+ ('TEST_BOOL', BoolObject),
+ ('TEST_STR', String),
+ ('TEST_INT', IntObject),
+ ('TEST_FLOAT', FloatObject),
+ ])
+ )
+ client.put_binary_type(
+ 'TestBinaryType',
+ schema=OrderedDict([
+ ('TEST_BOOL', BoolObject),
+ ('TEST_STR', String),
+ ('TEST_INT', IntObject),
+ ('TEST_DECIMAL', DecimalObject),
+ ])
+ )
+
+ binary_type_info = client.get_binary_type('TestBinaryType')
+ assert len(binary_type_info['schemas']) == 3
+
+ binary_type_info = client.get_binary_type('NonExistentType')
+ assert binary_type_info['type_exists'] is False
+ assert len(binary_type_info) == 1
+
+
+@pytest.mark.parametrize('page_size', range(1, 17, 5))
+def test_cache_scan(client, page_size):
+ test_data = {
+ 1: 'This is a test',
+ 2: 'One more test',
+ 3: 'Foo',
+ 4: 'Buzz',
+ 5: 'Bar',
+ 6: 'Lorem ipsum',
+ 7: 'dolor sit amet',
+ 8: 'consectetur adipiscing elit',
+ 9: 'Nullam aliquet',
+ 10: 'nisl at ante',
+ 11: 'suscipit',
+ 12: 'ut cursus',
+ 13: 'metus interdum',
+ 14: 'Nulla tincidunt',
+ 15: 'sollicitudin iaculis',
+ }
+
+ cache = client.get_or_create_cache('my_oop_cache')
+ cache.put_all(test_data)
+
+ gen = cache.scan(page_size=page_size)
+ received_data = []
+ for k, v in gen:
+ assert k in test_data.keys()
+ assert v in test_data.values()
+ received_data.append((k, v))
+ assert len(received_data) == len(test_data)
+
+ cache.destroy()
+
+
+def test_get_and_put_if_absent(client):
+ cache = client.get_or_create_cache('my_oop_cache')
+
+ value = cache.get_and_put_if_absent('my_key', 42)
+ assert value is None
+ cache.put('my_key', 43)
+ value = cache.get_and_put_if_absent('my_key', 42)
+ assert value is 43
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/tests/test_cache_class_sql.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/tests/test_cache_class_sql.py b/modules/platforms/python/tests/test_cache_class_sql.py
new file mode 100644
index 0000000..5f72b39
--- /dev/null
+++ b/modules/platforms/python/tests/test_cache_class_sql.py
@@ -0,0 +1,103 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+
+initial_data = [
+ ('John', 'Doe', 5),
+ ('Jane', 'Roe', 4),
+ ('Joe', 'Bloggs', 4),
+ ('Richard', 'Public', 3),
+ ('Negidius', 'Numerius', 3),
+ ]
+
+create_query = '''CREATE TABLE Student (
+ id INT(11) PRIMARY KEY,
+ first_name CHAR(24),
+ last_name CHAR(32),
+ grade INT(11))'''
+
+insert_query = '''INSERT INTO Student(id, first_name, last_name, grade)
+VALUES (?, ?, ?, ?)'''
+
+select_query = 'SELECT id, first_name, last_name, grade FROM Student'
+
+drop_query = 'DROP TABLE Student IF EXISTS'
+
+
+@pytest.mark.parametrize('page_size', range(1, 6, 2))
+def test_sql_fields(client, page_size):
+
+ client.sql(drop_query, page_size)
+
+ result = client.sql(create_query, page_size)
+ assert next(result)[0] == 0
+
+ for i, data_line in enumerate(initial_data, start=1):
+ fname, lname, grade = data_line
+ result = client.sql(
+ insert_query,
+ page_size,
+ query_args=[i, fname, lname, grade]
+ )
+ assert next(result)[0] == 1
+
+ result = client.sql(
+ select_query,
+ page_size,
+ include_field_names=True,
+ )
+ field_names = next(result)
+ assert set(field_names) == {'ID', 'FIRST_NAME', 'LAST_NAME', 'GRADE'}
+
+ data = list(result)
+ assert len(data) == 5
+ for row in data:
+ assert len(row) == 4
+
+ client.sql(drop_query, page_size)
+
+
+@pytest.mark.parametrize('page_size', range(1, 6, 2))
+def test_sql(client, page_size):
+
+ client.sql(drop_query, page_size)
+
+ result = client.sql(create_query, page_size)
+ assert next(result)[0] == 0
+
+ for i, data_line in enumerate(initial_data, start=1):
+ fname, lname, grade = data_line
+ result = client.sql(
+ insert_query,
+ page_size,
+ query_args=[i, fname, lname, grade]
+ )
+ assert next(result)[0] == 1
+
+ student = client.get_or_create_cache('SQL_PUBLIC_STUDENT')
+ result = student.select_row('TRUE', page_size)
+ for k, v in result:
+ assert k in range(1, 6)
+ assert v.FIRST_NAME in [
+ 'John',
+ 'Jane',
+ 'Joe',
+ 'Richard',
+ 'Negidius',
+ ]
+
+ client.sql(drop_query, page_size)
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/tests/test_cache_config.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/tests/test_cache_config.py b/modules/platforms/python/tests/test_cache_config.py
new file mode 100644
index 0000000..2f01618
--- /dev/null
+++ b/modules/platforms/python/tests/test_cache_config.py
@@ -0,0 +1,75 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyignite.api import *
+from pyignite.datatypes.prop_codes import *
+
+
+def test_get_configuration(client):
+
+ result = cache_get_or_create(client, 'my_unique_cache')
+ assert result.status == 0
+
+ result = cache_get_configuration(client, 'my_unique_cache')
+ assert result.status == 0
+ assert result.value[PROP_NAME] == 'my_unique_cache'
+
+
+def test_create_with_config(client):
+
+ cache_name = 'my_very_unique_name'
+
+ result = cache_create_with_config(client, {
+ PROP_NAME: cache_name,
+ PROP_CACHE_KEY_CONFIGURATION: [
+ {
+ 'type_name': 'blah',
+ 'affinity_key_field_name': 'abc1234',
+ }
+ ],
+ })
+ assert result.status == 0
+
+ result = cache_get_names(client)
+ assert cache_name in result.value
+
+ result = cache_create_with_config(client, {
+ PROP_NAME: cache_name,
+ })
+ assert result.status != 0
+
+
+def test_get_or_create_with_config(client):
+
+ cache_name = 'my_very_unique_name'
+
+ result = cache_get_or_create_with_config(client, {
+ PROP_NAME: cache_name,
+ PROP_CACHE_KEY_CONFIGURATION: [
+ {
+ 'type_name': 'blah',
+ 'affinity_key_field_name': 'abc1234',
+ }
+ ],
+ })
+ assert result.status == 0
+
+ result = cache_get_names(client)
+ assert cache_name in result.value
+
+ result = cache_get_or_create_with_config(client, {
+ PROP_NAME: cache_name,
+ })
+ assert result.status == 0
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/tests/test_datatypes.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/tests/test_datatypes.py b/modules/platforms/python/tests/test_datatypes.py
new file mode 100644
index 0000000..d7c7977
--- /dev/null
+++ b/modules/platforms/python/tests/test_datatypes.py
@@ -0,0 +1,134 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from datetime import datetime, timedelta
+import decimal
+import pytest
+import uuid
+
+from pyignite.api.key_value import cache_get, cache_put
+from pyignite.datatypes import *
+
+
+@pytest.mark.parametrize(
+ 'value, value_hint',
+ [
+ # integers
+ (42, None),
+ (42, ByteObject),
+ (42, ShortObject),
+ (42, IntObject),
+
+ # floats
+ (3.1415, None), # True for Double but not Float
+ (3.5, FloatObject),
+
+ # char is never autodetected
+ ('ы', CharObject),
+ ('カ', CharObject),
+
+ # bool
+ (True, None),
+ (False, None),
+ (True, BoolObject),
+ (False, BoolObject),
+
+ # arrays of integers
+ ([1, 2, 3, 5], None),
+ ([1, 2, 3, 5], ByteArrayObject),
+ ([1, 2, 3, 5], ShortArrayObject),
+ ([1, 2, 3, 5], IntArrayObject),
+
+ # arrays of floats
+ ([2.2, 4.4, 6.6], None),
+ ([2.5, 6.5], FloatArrayObject),
+
+ # array of char
+ (['ы', 'カ'], CharArrayObject),
+
+ # array of bool
+ ([True, False, True], None),
+
+ # string
+ ('Little Mary had a lamb', None),
+ ('This is a test', String),
+
+ # decimals
+ (decimal.Decimal('2.5'), None),
+ (decimal.Decimal('-1.3'), None),
+
+ # uuid
+ (uuid.uuid4(), None),
+
+ # date
+ (datetime(year=1998, month=4, day=6, hour=18, minute=30), None),
+
+ # no autodetection for timestamp either
+ (
+ (datetime(year=1998, month=4, day=6, hour=18, minute=30), 1000),
+ TimestampObject
+ ),
+
+ # time
+ (timedelta(days=4, hours=4, minutes=24), None),
+
+ # enum is useless in Python, except for interoperability with Java.
+ # Also no autodetection
+ ((5, 6), BinaryEnumObject),
+
+ # arrays of standard types
+ (['String 1', 'String 2'], None),
+ (['Some of us are empty', None, 'But not the others'], None),
+
+ ([decimal.Decimal('2.71828'), decimal.Decimal('100')], None),
+ ([decimal.Decimal('2.1'), None, decimal.Decimal('3.1415')], None),
+
+ ([uuid.uuid4(), uuid.uuid4()], None),
+ (
+ [
+ datetime(year=2010, month=1, day=1),
+ datetime(year=2010, month=12, day=31),
+ ],
+ None,
+ ),
+ ([timedelta(minutes=30), timedelta(hours=2)], None),
+ (
+ [
+ (datetime(year=2010, month=1, day=1), 1000),
+ (datetime(year=2010, month=12, day=31), 200),
+ ],
+ TimestampArrayObject
+ ),
+ ((-1, [(6001, 1), (6002, 2), (6003, 3)]), BinaryEnumArrayObject),
+
+ # object array
+ ((-1, [1, 2, decimal.Decimal('3')]), None),
+
+ # collection
+ ((3, [1, 2, 3]), CollectionObject),
+
+ # map
+ ((1, {'key': 4, 5: 6.0}), None),
+ ((2, {'key': 4, 5: 6.0}), None),
+ ]
+)
+def test_put_get_data(client, cache, value, value_hint):
+
+ result = cache_put(client, cache, 'my_key', value, value_hint=value_hint)
+ assert result.status == 0
+
+ result = cache_get(client, cache, 'my_key')
+ assert result.status == 0
+ assert result.value == value
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/tests/test_examples.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/tests/test_examples.py b/modules/platforms/python/tests/test_examples.py
new file mode 100644
index 0000000..4665d8c
--- /dev/null
+++ b/modules/platforms/python/tests/test_examples.py
@@ -0,0 +1,57 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import glob
+import subprocess
+import sys
+
+import pytest
+
+
+SKIP_LIST = [
+ 'failover.py', # it hangs by design
+]
+
+
+def run_subprocess_34(script: str):
+ return subprocess.call([
+ 'python',
+ '../examples/{}'.format(script),
+ ])
+
+
+def run_subprocess_35(script: str):
+ return subprocess.run([
+ 'python',
+ '../examples/{}'.format(script),
+ ]).returncode
+
+
+@pytest.mark.skipif(
+ condition=not pytest.config.option.examples,
+ reason=(
+ 'If you wish to test examples, invoke pytest with '
+ '`--examples` option.'
+ ),
+)
+def test_examples():
+ for script in glob.glob1('../examples', '*.py'):
+ if script not in SKIP_LIST:
+ # `subprocess` module was refactored in Python 3.5
+ if sys.version_info >= (3, 5):
+ return_code = run_subprocess_35(script)
+ else:
+ return_code = run_subprocess_34(script)
+ assert return_code == 0
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/tests/test_generic_object.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/tests/test_generic_object.py b/modules/platforms/python/tests/test_generic_object.py
new file mode 100644
index 0000000..73dc870
--- /dev/null
+++ b/modules/platforms/python/tests/test_generic_object.py
@@ -0,0 +1,33 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyignite import GenericObjectMeta
+from pyignite.datatypes import *
+
+
+def test_go():
+
+ class GenericObject(
+ metaclass=GenericObjectMeta,
+ schema={
+ 'TEST_ID': IntObject,
+ 'TEST_NAME': String,
+ },
+ version=2,
+ ):
+ pass
+
+ x = GenericObject()
+ print(x.__class__.__name__)
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/tests/test_get_names.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/tests/test_get_names.py b/modules/platforms/python/tests/test_get_names.py
new file mode 100644
index 0000000..0e50f3d
--- /dev/null
+++ b/modules/platforms/python/tests/test_get_names.py
@@ -0,0 +1,30 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyignite.api import cache_create, cache_get_names
+
+
+def test_get_names(client):
+
+ bucket_names = ['my_bucket', 'my_bucket_2', 'my_bucket_3']
+ for name in bucket_names:
+ cache_create(client, name)
+
+ result = cache_get_names(client)
+ assert result.status == 0
+ assert type(result.value) == list
+ assert len(result.value) >= len(bucket_names)
+ for i, name in enumerate(bucket_names):
+ assert name in result.value
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/tests/test_handshake.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/tests/test_handshake.py b/modules/platforms/python/tests/test_handshake.py
new file mode 100644
index 0000000..54315f0
--- /dev/null
+++ b/modules/platforms/python/tests/test_handshake.py
@@ -0,0 +1,63 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import socket
+
+from pyignite import Client
+from pyignite.connection.handshake import HandshakeRequest, read_response
+
+
+def test_handshake(
+ monkeypatch,
+ ignite_host, ignite_port, use_ssl, ssl_keyfile, ssl_certfile,
+ ssl_ca_certfile, ssl_cert_reqs, ssl_ciphers, ssl_version,
+ username, password,
+):
+ client = Client(
+ use_ssl=use_ssl,
+ ssl_keyfile=ssl_keyfile,
+ ssl_certfile=ssl_certfile,
+ ssl_ca_certfile=ssl_ca_certfile,
+ ssl_cert_reqs=ssl_cert_reqs,
+ ssl_ciphers=ssl_ciphers,
+ ssl_version=ssl_version,
+ username=username,
+ password=password,
+ )
+ client._socket = client._wrap(
+ socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ )
+ client.socket.connect((ignite_host, ignite_port))
+ hs_request = HandshakeRequest(username, password)
+ client.send(hs_request)
+ hs_response = read_response(client)
+ assert hs_response['op_code'] != 0
+
+ client.close()
+
+ # intentionally pass wrong protocol version
+ from pyignite.connection import handshake
+ monkeypatch.setattr(handshake, 'PROTOCOL_VERSION_MAJOR', 10)
+
+ client._socket = client._wrap(
+ socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ )
+ client.socket.connect((ignite_host, ignite_port))
+ hs_request = HandshakeRequest(username, password)
+ client.send(hs_request)
+ hs_response = read_response(client)
+ assert hs_response['op_code'] == 0
+
+ client.close()
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/tests/test_key_value.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/tests/test_key_value.py b/modules/platforms/python/tests/test_key_value.py
new file mode 100644
index 0000000..c569c77
--- /dev/null
+++ b/modules/platforms/python/tests/test_key_value.py
@@ -0,0 +1,327 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyignite.api import *
+from pyignite.datatypes import IntObject
+
+
+def test_put_get(client, cache):
+
+ result = cache_put(client, cache, 'my_key', 5)
+ assert result.status == 0
+
+ result = cache_get(client, cache, 'my_key')
+ assert result.status == 0
+ assert result.value == 5
+
+
+def test_get_all(client, cache):
+
+ result = cache_get_all(client, cache, ['key_1', 2, (3, IntObject)])
+ assert result.status == 0
+ assert result.value == {}
+
+ cache_put(client, cache, 'key_1', 4)
+ cache_put(client, cache, 3, 18, key_hint=IntObject)
+
+ result = cache_get_all(client, cache, ['key_1', 2, (3, IntObject)])
+ assert result.status == 0
+ assert result.value == {'key_1': 4, 3: 18}
+
+
+def test_put_all(client, cache):
+
+ test_dict = {
+ 1: 2,
+ 'key_1': 4,
+ (3, IntObject): 18,
+ }
+ test_keys = ['key_1', 1, 3]
+
+ result = cache_put_all(client, cache, test_dict)
+ assert result.status == 0
+
+ result = cache_get_all(client, cache, test_keys)
+ assert result.status == 0
+ assert len(test_dict) == 3
+
+ for key in result.value:
+ assert key in test_keys
+
+
+def test_contains_key(client, cache):
+
+ cache_put(client, cache, 'test_key', 42)
+
+ result = cache_contains_key(client, cache, 'test_key')
+ assert result.value is True
+
+ result = cache_contains_key(client, cache, 'non-existant-key')
+ assert result.value is False
+
+
+def test_contains_keys(client, cache):
+
+ cache_put(client, cache, 5, 6)
+ cache_put(client, cache, 'test_key', 42)
+
+ result = cache_contains_keys(client, cache, [5, 'test_key'])
+ assert result.value is True
+
+ result = cache_contains_keys(client, cache, [5, 'non-existent-key'])
+ assert result.value is False
+
+
+def test_get_and_put(client, cache):
+
+ result = cache_get_and_put(client, cache, 'test_key', 42)
+ assert result.status == 0
+ assert result.value is None
+
+ result = cache_get(client, cache, 'test_key')
+ assert result.status == 0
+ assert result.value is 42
+
+ result = cache_get_and_put(client, cache, 'test_key', 1234)
+ assert result.status == 0
+ assert result.value == 42
+
+
+def test_get_and_replace(client, cache):
+
+ result = cache_get_and_replace(client, cache, 'test_key', 42)
+ assert result.status == 0
+ assert result.value is None
+
+ result = cache_get(client, cache, 'test_key')
+ assert result.status == 0
+ assert result.value is None
+
+ cache_put(client, cache, 'test_key', 42)
+
+ result = cache_get_and_replace(client, cache, 'test_key', 1234)
+ assert result.status == 0
+ assert result.value == 42
+
+
+def test_get_and_remove(client, cache):
+
+ result = cache_get_and_remove(client, cache, 'test_key')
+ assert result.status == 0
+ assert result.value is None
+
+ cache_put(client, cache, 'test_key', 42)
+
+ result = cache_get_and_remove(client, cache, 'test_key')
+ assert result.status == 0
+ assert result.value == 42
+
+
+def test_put_if_absent(client, cache):
+
+ result = cache_put_if_absent(client, cache, 'test_key', 42)
+ assert result.status == 0
+ assert result.value is True
+
+ result = cache_put_if_absent(client, cache, 'test_key', 1234)
+ assert result.status == 0
+ assert result.value is False
+
+
+def test_get_and_put_if_absent(client, cache):
+
+ result = cache_get_and_put_if_absent(client, cache, 'test_key', 42)
+ assert result.status == 0
+ assert result.value is None
+
+ result = cache_get_and_put_if_absent(client, cache, 'test_key', 1234)
+ assert result.status == 0
+ assert result.value == 42
+
+ result = cache_get_and_put_if_absent(client, cache, 'test_key', 5678)
+ assert result.status == 0
+ assert result.value == 42
+
+
+def test_replace(client, cache):
+
+ result = cache_replace(client, cache, 'test_key', 42)
+ assert result.status == 0
+ assert result.value is False
+
+ cache_put(client, cache, 'test_key', 1234)
+
+ result = cache_replace(client, cache, 'test_key', 42)
+ assert result.status == 0
+ assert result.value is True
+
+ result = cache_get(client, cache, 'test_key')
+ assert result.status == 0
+ assert result.value == 42
+
+
+def test_replace_if_equals(client, cache):
+
+ result = cache_replace_if_equals(client, cache, 'my_test', 42, 1234)
+ assert result.status == 0
+ assert result.value is False
+
+ cache_put(client, cache, 'my_test', 42)
+
+ result = cache_replace_if_equals(client, cache, 'my_test', 42, 1234)
+ assert result.status == 0
+ assert result.value is True
+
+ result = cache_get(client, cache, 'my_test')
+ assert result.status == 0
+ assert result.value == 1234
+
+
+def test_clear(client, cache):
+
+ result = cache_put(client, cache, 'my_test', 42)
+ assert result.status == 0
+
+ result = cache_clear(client, cache)
+ assert result.status == 0
+
+ result = cache_get(client, cache, 'my_test')
+ assert result.status == 0
+ assert result.value is None
+
+
+def test_clear_key(client, cache):
+
+ result = cache_put(client, cache, 'my_test', 42)
+ assert result.status == 0
+
+ result = cache_put(client, cache, 'another_test', 24)
+ assert result.status == 0
+
+ result = cache_clear_key(client, cache, 'my_test')
+ assert result.status == 0
+
+ result = cache_get(client, cache, 'my_test')
+ assert result.status == 0
+ assert result.value is None
+
+ result = cache_get(client, cache, 'another_test')
+ assert result.status == 0
+ assert result.value == 24
+
+
+def test_clear_keys(client, cache):
+
+ result = cache_put(client, cache, 'my_test_key', 42)
+ assert result.status == 0
+
+ result = cache_put(client, cache, 'another_test', 24)
+ assert result.status == 0
+
+ result = cache_clear_keys(client, cache, [
+ 'my_test_key',
+ 'nonexistent_key',
+ ])
+ assert result.status == 0
+
+ result = cache_get(client, cache, 'my_test_key')
+ assert result.status == 0
+ assert result.value is None
+
+ result = cache_get(client, cache, 'another_test')
+ assert result.status == 0
+ assert result.value == 24
+
+
+def test_remove_key(client, cache):
+
+ result = cache_put(client, cache, 'my_test_key', 42)
+ assert result.status == 0
+
+ result = cache_remove_key(client, cache, 'my_test_key')
+ assert result.status == 0
+ assert result.value is True
+
+ result = cache_remove_key(client, cache, 'non_existent_key')
+ assert result.status == 0
+ assert result.value is False
+
+
+def test_remove_if_equals(client, cache):
+
+ result = cache_put(client, cache, 'my_test', 42)
+ assert result.status == 0
+
+ result = cache_remove_if_equals(client, cache, 'my_test', 1234)
+ assert result.status == 0
+ assert result.value is False
+
+ result = cache_remove_if_equals(client, cache, 'my_test', 42)
+ assert result.status == 0
+ assert result.value is True
+
+ result = cache_get(client, cache, 'my_test')
+ assert result.status == 0
+ assert result.value is None
+
+
+def test_remove_keys(client, cache):
+
+ result = cache_put(client, cache, 'my_test', 42)
+ assert result.status == 0
+
+ result = cache_put(client, cache, 'another_test', 24)
+ assert result.status == 0
+
+ result = cache_remove_keys(client, cache, ['my_test', 'non_existent'])
+ assert result.status == 0
+
+ result = cache_get(client, cache, 'my_test')
+ assert result.status == 0
+ assert result.value is None
+
+ result = cache_get(client, cache, 'another_test')
+ assert result.status == 0
+ assert result.value == 24
+
+
+def test_remove_all(client, cache):
+
+ result = cache_put(client, cache, 'my_test', 42)
+ assert result.status == 0
+
+ result = cache_put(client, cache, 'another_test', 24)
+ assert result.status == 0
+
+ result = cache_remove_all(client, cache)
+ assert result.status == 0
+
+ result = cache_get(client, cache, 'my_test')
+ assert result.status == 0
+ assert result.value is None
+
+ result = cache_get(client, cache, 'another_test')
+ assert result.status == 0
+ assert result.value is None
+
+
+def test_cache_get_size(client, cache):
+
+ result = cache_put(client, cache, 'my_test', 42)
+ assert result.status == 0
+
+ result = cache_get_size(client, cache)
+ assert result.status == 0
+ assert result.value == 1
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/tests/test_scan.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/tests/test_scan.py b/modules/platforms/python/tests/test_scan.py
new file mode 100644
index 0000000..77e9613
--- /dev/null
+++ b/modules/platforms/python/tests/test_scan.py
@@ -0,0 +1,66 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyignite.api import (
+ scan, scan_cursor_get_page, resource_close, cache_put_all,
+)
+
+
+def test_scan(client, cache):
+
+ page_size = 10
+
+ result = cache_put_all(client, cache, {
+ 'key_{}'.format(v): v for v in range(page_size * 2)
+ })
+ assert result.status == 0
+
+ result = scan(client, cache, page_size)
+ assert result.status == 0
+ assert len(result.value['data']) == page_size
+ assert result.value['more'] is True
+
+ cursor = result.value['cursor']
+
+ result = scan_cursor_get_page(client, cursor)
+ assert result.status == 0
+ assert len(result.value['data']) == page_size
+ assert result.value['more'] is False
+
+ result = scan_cursor_get_page(client, cursor)
+ assert result.status != 0
+
+
+def test_close_resource(client, cache):
+
+ page_size = 10
+
+ result = cache_put_all(client, cache, {
+ 'key_{}'.format(v): v for v in range(page_size * 2)
+ })
+ assert result.status == 0
+
+ result = scan(client, cache, page_size)
+ assert result.status == 0
+ assert len(result.value['data']) == page_size
+ assert result.value['more'] is True
+
+ cursor = result.value['cursor']
+
+ result = resource_close(client, cursor)
+ assert result.status == 0
+
+ result = scan_cursor_get_page(client, cursor)
+ assert result.status != 0
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/tests/test_sql.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/tests/test_sql.py b/modules/platforms/python/tests/test_sql.py
new file mode 100644
index 0000000..d3c5f84
--- /dev/null
+++ b/modules/platforms/python/tests/test_sql.py
@@ -0,0 +1,154 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyignite.api import (
+ sql_fields, sql_fields_cursor_get_page,
+ cache_get_or_create, sql, sql_cursor_get_page,
+ cache_get_configuration,
+)
+from pyignite.datatypes.prop_codes import *
+from pyignite.utils import entity_id, unwrap_binary
+
+initial_data = [
+ ('John', 'Doe', 5),
+ ('Jane', 'Roe', 4),
+ ('Joe', 'Bloggs', 4),
+ ('Richard', 'Public', 3),
+ ('Negidius', 'Numerius', 3),
+ ]
+
+create_query = '''CREATE TABLE Student (
+ id INT(11) PRIMARY KEY,
+ first_name CHAR(24),
+ last_name CHAR(32),
+ grade INT(11))'''
+
+insert_query = '''INSERT INTO Student(id, first_name, last_name, grade)
+VALUES (?, ?, ?, ?)'''
+
+select_query = 'SELECT id, first_name, last_name, grade FROM Student'
+
+drop_query = 'DROP TABLE Student IF EXISTS'
+
+page_size = 4
+
+
+def test_sql(client):
+
+ # cleanup
+ client.sql(drop_query)
+
+ result = sql_fields(
+ client,
+ 'PUBLIC',
+ create_query,
+ page_size,
+ include_field_names=True
+ )
+ assert result.status == 0, result.message
+
+ for i, data_line in enumerate(initial_data, start=1):
+ fname, lname, grade = data_line
+ result = sql_fields(
+ client,
+ 'PUBLIC',
+ insert_query,
+ page_size,
+ query_args=[i, fname, lname, grade],
+ include_field_names=True
+ )
+ assert result.status == 0, result.message
+
+ result = cache_get_configuration(client, 'SQL_PUBLIC_STUDENT')
+ assert result.status == 0, result.message
+
+ binary_type_name = result.value[PROP_QUERY_ENTITIES][0]['value_type_name']
+ result = sql(
+ client,
+ 'SQL_PUBLIC_STUDENT',
+ binary_type_name,
+ 'TRUE',
+ page_size
+ )
+ assert result.status == 0, result.message
+ assert len(result.value['data']) == page_size
+ assert result.value['more'] is True
+
+ for wrapped_object in result.value['data'].values():
+ data = unwrap_binary(client, wrapped_object)
+ assert data.type_id == entity_id(binary_type_name)
+
+ cursor = result.value['cursor']
+
+ while result.value['more']:
+ result = sql_cursor_get_page(client, cursor)
+ assert result.status == 0, result.message
+
+ for wrapped_object in result.value['data'].values():
+ data = unwrap_binary(client, wrapped_object)
+ assert data.type_id == entity_id(binary_type_name)
+
+ # repeat cleanup
+ result = sql_fields(client, 'PUBLIC', drop_query, page_size)
+ assert result.status == 0
+
+
+def test_sql_fields(client):
+
+ # cleanup
+ client.sql(drop_query)
+
+ result = sql_fields(
+ client,
+ 'PUBLIC',
+ create_query,
+ page_size,
+ include_field_names=True
+ )
+ assert result.status == 0, result.message
+
+ for i, data_line in enumerate(initial_data, start=1):
+ fname, lname, grade = data_line
+ result = sql_fields(
+ client,
+ 'PUBLIC',
+ insert_query,
+ page_size,
+ query_args=[i, fname, lname, grade],
+ include_field_names=True
+ )
+ assert result.status == 0, result.message
+
+ result = sql_fields(
+ client,
+ 'PUBLIC',
+ select_query,
+ page_size,
+ include_field_names=True
+ )
+ assert result.status == 0
+ assert len(result.value['data']) == page_size
+ assert result.value['more'] is True
+
+ cursor = result.value['cursor']
+
+ result = sql_fields_cursor_get_page(client, cursor, field_count=4)
+ assert result.status == 0
+ assert len(result.value['data']) == len(initial_data) - page_size
+ assert result.value['more'] is False
+
+ # repeat cleanup
+ result = sql_fields(client, 'PUBLIC', drop_query, page_size)
+ assert result.status == 0
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/parent/pom.xml
----------------------------------------------------------------------
diff --git a/parent/pom.xml b/parent/pom.xml
index 23c7b56..b22905a 100644
--- a/parent/pom.xml
+++ b/parent/pom.xml
@@ -939,6 +939,11 @@
<exclude>modules/platforms/nodejs/examples/certs/ca.crt</exclude>
<exclude>modules/platforms/nodejs/examples/certs/client.key</exclude>
<exclude>modules/platforms/nodejs/api_spec/**</exclude>
+ <exclude>modules/platforms/python/**/*.md</exclude><!--markdown can not be commented-->
+ <exclude>modules/platforms/python/docs/Makefile</exclude><!--sphinx files: BSD license-->
+ <exclude>modules/platforms/python/docs/conf.py</exclude><!--sphinx files: BSD license-->
+ <exclude>modules/platforms/python/docs/source/**/*.rst</exclude><!--sphinx-apidoc generated files-->
+ <exclude>modules/platforms/python/requirements/**/*.txt</exclude><!--plain text can not be commented-->
<!--web console-->
<exclude>**/web-console/**/.eslintrc</exclude>
<exclude>**/web-console/**/.babelrc</exclude>
[4/6] ignite git commit: IGNITE-7782 Python thin client
Posted by is...@apache.org.
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/api/cache_config.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/api/cache_config.py b/modules/platforms/python/pyignite/api/cache_config.py
new file mode 100644
index 0000000..cfea416
--- /dev/null
+++ b/modules/platforms/python/pyignite/api/cache_config.py
@@ -0,0 +1,279 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Set of functions to manipulate caches.
+
+Ignite `cache` can be viewed as a named entity designed to store key-value
+pairs. Each cache is split transparently between different Ignite partitions.
+
+The choice of `cache` term is due to historical reasons. (Ignite initially had
+only non-persistent storage tier.)
+"""
+
+from typing import Union
+
+from pyignite.datatypes.cache_config import cache_config_struct
+from pyignite.datatypes.cache_properties import prop_map
+from pyignite.datatypes import (
+ Int, Byte, prop_codes, Short, String, StringArray,
+)
+from pyignite.queries import Query, ConfigQuery
+from pyignite.queries.op_codes import *
+from pyignite.utils import cache_id
+
+
+def compact_cache_config(cache_config: dict) -> dict:
+ """
+ This is to make cache config read/write-symmetrical.
+
+ :param cache_config: dict of cache config properties,
+ like {'is_onheapcache_enabled': 1},
+ :return: the same dict, but with property codes as keys,
+ like {PROP_IS_ONHEAPCACHE_ENABLED: 1}.
+ """
+ result = {}
+ for k, v in cache_config.items():
+ if k == 'length':
+ continue
+ prop_code = getattr(prop_codes, 'PROP_{}'.format(k.upper()))
+ result[prop_code] = v
+ return result
+
+
+def cache_get_configuration(
+ connection: 'Connection', cache: Union[str, int], flags: int=0, query_id=None,
+) -> 'APIResult':
+ """
+ Gets configuration for the given cache.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param flags: Ignite documentation is unclear on this subject,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Result value is OrderedDict with
+ the cache configuration parameters.
+ """
+
+ query_struct = Query(
+ OP_CACHE_GET_CONFIGURATION,
+ [
+ ('hash_code', Int),
+ ('flags', Byte),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flags': flags,
+ },
+ response_config=[
+ ('cache_config', cache_config_struct),
+ ],
+ )
+ if result.status == 0:
+ result.value = compact_cache_config(result.value['cache_config'])
+ return result
+
+
+def cache_create(
+ connection: 'Connection', name: str, query_id=None,
+) -> 'APIResult':
+ """
+ Creates a cache with a given name. Returns error if a cache with specified
+ name already exists.
+
+ :param connection: connection to Ignite server,
+ :param name: cache name,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status if a cache is
+ created successfully, non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(
+ OP_CACHE_CREATE_WITH_NAME,
+ [
+ ('cache_name', String),
+ ],
+ query_id=query_id,
+ )
+ return query_struct.perform(
+ connection,
+ query_params={
+ 'cache_name': name,
+ },
+ )
+
+
+def cache_get_or_create(
+ connection: 'Connection', name: str, query_id=None,
+) -> 'APIResult':
+ """
+ Creates a cache with a given name. Does nothing if the cache exists.
+
+ :param connection: connection to Ignite server,
+ :param name: cache name,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status if a cache is
+ created successfully, non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(
+ OP_CACHE_GET_OR_CREATE_WITH_NAME,
+ [
+ ('cache_name', String),
+ ],
+ query_id=query_id,
+ )
+ return query_struct.perform(
+ connection,
+ query_params={
+ 'cache_name': name,
+ },
+ )
+
+
+def cache_destroy(
+ connection: 'Connection', cache: Union[str, int], query_id=None,
+) -> 'APIResult':
+ """
+ Destroys cache with a given name.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object.
+ """
+
+ query_struct = Query(
+ OP_CACHE_DESTROY,[
+ ('hash_code', Int),
+ ],
+ query_id=query_id,
+ )
+ return query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ },
+ )
+
+
+def cache_get_names(connection: 'Connection', query_id=None) -> 'APIResult':
+ """
+ Gets existing cache names.
+
+ :param connection: connection to Ignite server,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and a list of cache
+ names, non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(OP_CACHE_GET_NAMES, query_id=query_id)
+ result = query_struct.perform(
+ connection,
+ response_config=[
+ ('cache_names', StringArray),
+ ],
+ )
+ if result.status == 0:
+ result.value = result.value['cache_names']
+ return result
+
+
+def cache_create_with_config(
+ connection: 'Connection', cache_props: dict, query_id=None,
+) -> 'APIResult':
+ """
+ Creates cache with provided configuration. An error is returned
+ if the name is already in use.
+
+ :param connection: connection to Ignite server,
+ :param cache_props: cache configuration properties to create cache with
+ in form of dictionary {property code: python value}.
+ You must supply at least name (PROP_NAME),
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status if cache was created,
+ non-zero status and an error description otherwise.
+ """
+
+ prop_types = {}
+ prop_values = {}
+ for i, prop_item in enumerate(cache_props.items()):
+ prop_code, prop_value = prop_item
+ prop_name = 'property_{}'.format(i)
+ prop_types[prop_name] = prop_map(prop_code)
+ prop_values[prop_name] = prop_value
+ prop_values['param_count'] = len(cache_props)
+
+ query_struct = ConfigQuery(
+ OP_CACHE_CREATE_WITH_CONFIGURATION,
+ [
+ ('param_count', Short),
+ ] + list(prop_types.items()),
+ query_id=query_id,
+ )
+ return query_struct.perform(connection, query_params=prop_values)
+
+
+def cache_get_or_create_with_config(
+ connection: 'Connection', cache_props: dict, query_id=None,
+) -> 'APIResult':
+ """
+ Creates cache with provided configuration. Does nothing if the name
+ is already in use.
+
+ :param connection: connection to Ignite server,
+ :param cache_props: cache configuration properties to create cache with
+ in form of dictionary {property code: python value}.
+ You must supply at least name (PROP_NAME),
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status if cache was created,
+ non-zero status and an error description otherwise.
+ """
+
+ prop_types = {}
+ prop_values = {}
+ for i, prop_item in enumerate(cache_props.items()):
+ prop_code, prop_value = prop_item
+ prop_name = 'property_{}'.format(i)
+ prop_types[prop_name] = prop_map(prop_code)
+ prop_values[prop_name] = prop_value
+ prop_values['param_count'] = len(cache_props)
+
+ query_struct = ConfigQuery(
+ OP_CACHE_GET_OR_CREATE_WITH_CONFIGURATION,
+ [
+ ('param_count', Short),
+ ] + list(prop_types.items()),
+ query_id=query_id,
+ )
+ return query_struct.perform(connection, query_params=prop_values)
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/api/key_value.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/api/key_value.py b/modules/platforms/python/pyignite/api/key_value.py
new file mode 100644
index 0000000..56f5378
--- /dev/null
+++ b/modules/platforms/python/pyignite/api/key_value.py
@@ -0,0 +1,995 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Iterable, Union
+
+from pyignite.queries.op_codes import *
+from pyignite.datatypes import (
+ Map, Bool, Byte, Int, Long, AnyDataArray, AnyDataObject,
+)
+from pyignite.datatypes.key_value import PeekModes
+from pyignite.queries import Query, Response
+from pyignite.utils import cache_id
+
+
+def cache_put(
+ connection: 'Connection', cache: Union[str, int], key, value,
+ key_hint=None, value_hint=None, binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Puts a value with a given key to cache (overwriting existing value if any).
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param key: key for the cache entry. Can be of any supported type,
+ :param value: value for the key,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param value_hint: (optional) Ignite data type, for which the given value
+ should be converted.
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status if a value
+ is written, non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(
+ OP_CACHE_PUT,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('key', key_hint or AnyDataObject),
+ ('value', value_hint or AnyDataObject),
+ ],
+ query_id=query_id,
+ )
+ return query_struct.perform(connection, {
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'key': key,
+ 'value': value,
+ })
+
+
+def cache_get(
+ connection: 'Connection', cache: Union[str, int], key,
+ key_hint=None, binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Retrieves a value from cache by key.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param key: key for the cache entry. Can be of any supported type,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and a value
+ retrieved on success, non-zero status and an error description on failure.
+ """
+
+ query_struct = Query(
+ OP_CACHE_GET,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('key', key_hint or AnyDataObject),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'key': key,
+ },
+ response_config=[
+ ('value', AnyDataObject),
+ ],
+ )
+ if result.status != 0:
+ return result
+ result.value = result.value['value']
+ return result
+
+
+def cache_get_all(
+ connection: 'Connection', cache: Union[str, int], keys: Iterable,
+ binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Retrieves multiple key-value pairs from cache.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param keys: list of keys or tuples of (key, key_hint),
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and a dict, made of
+ retrieved key-value pairs, non-zero status and an error description
+ on failure.
+ """
+
+ query_struct = Query(
+ OP_CACHE_GET_ALL,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('keys', AnyDataArray()),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'keys': keys,
+ },
+ response_config=[
+ ('data', Map),
+ ],
+ )
+ if result.status == 0:
+ result.value = dict(result.value)['data']
+ return result
+
+
+def cache_put_all(
+ connection: 'Connection', cache: Union[str, int], pairs: dict,
+ binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Puts multiple key-value pairs to cache (overwriting existing associations
+ if any).
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param pairs: dictionary type parameters, contains key-value pairs to save.
+ Each key or value can be an item of representable Python type or a tuple
+ of (item, hint),
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status if key-value pairs
+ are written, non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(
+ OP_CACHE_PUT_ALL,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('data', Map),
+ ],
+ query_id=query_id,
+ )
+ return query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'data': pairs,
+ },
+ )
+
+
+def cache_contains_key(
+ connection: 'Connection', cache: Union[str, int], key,
+ key_hint=None, binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Returns a value indicating whether given key is present in cache.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param key: key for the cache entry. Can be of any supported type,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param binary: pass True to keep the value in binary form. False
+ by default,
+ :param query_id: a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and a bool value
+ retrieved on success: `True` when key is present, `False` otherwise,
+ non-zero status and an error description on failure.
+ """
+
+ query_struct = Query(
+ OP_CACHE_CONTAINS_KEY,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('key', key_hint or AnyDataObject),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'key': key,
+ },
+ response_config=[
+ ('value', Bool),
+ ],
+ )
+ if result.status == 0:
+ result.value = result.value['value']
+ return result
+
+
+def cache_contains_keys(
+ connection: 'Connection', cache: Union[str, int], keys: Iterable,
+ binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Returns a value indicating whether all given keys are present in cache.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param keys: a list of keys or (key, type hint) tuples,
+ :param binary: pass True to keep the value in binary form. False
+ by default,
+ :param query_id: a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and a bool value
+ retrieved on success: `True` when all keys are present, `False` otherwise,
+ non-zero status and an error description on failure.
+ """
+
+ query_struct = Query(
+ OP_CACHE_CONTAINS_KEYS,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('keys', AnyDataArray()),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'keys': keys,
+ },
+ response_config=[
+ ('value', Bool),
+ ],
+ )
+ if result.status == 0:
+ result.value = result.value['value']
+ return result
+
+
+def cache_get_and_put(
+ connection: 'Connection', cache: Union[str, int], key, value,
+ key_hint=None, value_hint=None, binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Puts a value with a given key to cache, and returns the previous value
+ for that key, or null value if there was not such key.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param key: key for the cache entry. Can be of any supported type,
+ :param value: value for the key,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param value_hint: (optional) Ignite data type, for which the given value
+ should be converted.
+ :param binary: pass True to keep the value in binary form. False
+ by default,
+ :param query_id: a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and an old value
+ or None if a value is written, non-zero status and an error description
+ in case of error.
+ """
+
+ query_struct = Query(
+ OP_CACHE_GET_AND_PUT,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('key', key_hint or AnyDataObject),
+ ('value', value_hint or AnyDataObject),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'key': key,
+ 'value': value,
+ },
+ response_config=[
+ ('value', AnyDataObject),
+ ],
+ )
+ if result.status == 0:
+ result.value = result.value['value']
+ return result
+
+
+def cache_get_and_replace(
+ connection: 'Connection', cache: Union[str, int], key, value,
+ key_hint=None, value_hint=None, binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Puts a value with a given key to cache, returning previous value
+ for that key, if and only if there is a value currently mapped
+ for that key.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param key: key for the cache entry. Can be of any supported type,
+ :param value: value for the key,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param value_hint: (optional) Ignite data type, for which the given value
+ should be converted.
+ :param binary: pass True to keep the value in binary form. False
+ by default,
+ :param query_id: a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and an old value
+ or None on success, non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(
+ OP_CACHE_GET_AND_REPLACE, [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('key', key_hint or AnyDataObject),
+ ('value', value_hint or AnyDataObject),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'key': key,
+ 'value': value,
+ },
+ response_config=[
+ ('value', AnyDataObject),
+ ],
+ )
+ if result.status == 0:
+ result.value = result.value['value']
+ return result
+
+
+def cache_get_and_remove(
+ connection: 'Connection', cache: Union[str, int], key,
+ key_hint=None, binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Removes the cache entry with specified key, returning the value.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param key: key for the cache entry. Can be of any supported type,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param binary: pass True to keep the value in binary form. False
+ by default,
+ :param query_id: a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and an old value
+ or None, non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(
+ OP_CACHE_GET_AND_REMOVE, [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('key', key_hint or AnyDataObject),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'key': key,
+ },
+ response_config=[
+ ('value', AnyDataObject),
+ ],
+ )
+ if result.status == 0:
+ result.value = result.value['value']
+ return result
+
+
+def cache_put_if_absent(
+ connection: 'Connection', cache: Union[str, int], key, value,
+ key_hint=None, value_hint=None, binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Puts a value with a given key to cache only if the key
+ does not already exist.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param key: key for the cache entry. Can be of any supported type,
+ :param value: value for the key,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param value_hint: (optional) Ignite data type, for which the given value
+ should be converted.
+ :param binary: (optional) pass True to keep the value in binary form. False
+ by default,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status on success,
+ non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(
+ OP_CACHE_PUT_IF_ABSENT,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('key', key_hint or AnyDataObject),
+ ('value', value_hint or AnyDataObject),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'key': key,
+ 'value': value,
+ },
+ response_config=[
+ ('success', Bool),
+ ],
+ )
+ if result.status == 0:
+ result.value = result.value['success']
+ return result
+
+
+def cache_get_and_put_if_absent(
+ connection: 'Connection', cache: Union[str, int], key, value,
+ key_hint=None, value_hint=None, binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Puts a value with a given key to cache only if the key does not
+ already exist.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param key: key for the cache entry. Can be of any supported type,
+ :param value: value for the key,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param value_hint: (optional) Ignite data type, for which the given value
+ should be converted.
+ :param binary: (optional) pass True to keep the value in binary form. False
+ by default,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and an old value
+ or None on success, non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(
+ OP_CACHE_GET_AND_PUT_IF_ABSENT,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('key', key_hint or AnyDataObject),
+ ('value', value_hint or AnyDataObject),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'key': key,
+ 'value': value,
+ },
+ response_config=[
+ ('value', AnyDataObject),
+ ],
+ )
+ if result.status == 0:
+ result.value = result.value['value']
+ return result
+
+
+def cache_replace(
+ connection: 'Connection', cache: Union[str, int], key, value,
+ key_hint=None, value_hint=None, binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Puts a value with a given key to cache only if the key already exist.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param key: key for the cache entry. Can be of any supported type,
+ :param value: value for the key,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param value_hint: (optional) Ignite data type, for which the given value
+ should be converted.
+ :param binary: pass True to keep the value in binary form. False
+ by default,
+ :param query_id: a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and a boolean
+ success code, or non-zero status and an error description if something
+ has gone wrong.
+ """
+
+ query_struct = Query(
+ OP_CACHE_REPLACE,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('key', key_hint or AnyDataObject),
+ ('value', value_hint or AnyDataObject),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'key': key,
+ 'value': value,
+ },
+ response_config=[
+ ('success', Bool),
+ ],
+ )
+ if result.status == 0:
+ result.value = result.value['success']
+ return result
+
+
+def cache_replace_if_equals(
+ connection: 'Connection', cache: Union[str, int], key, sample, value,
+ key_hint=None, sample_hint=None, value_hint=None,
+ binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Puts a value with a given key to cache only if the key already exists
+ and value equals provided sample.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param key: key for the cache entry,
+ :param sample: a sample to compare the stored value with,
+ :param value: new value for the given key,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param sample_hint: (optional) Ignite data type, for whic
+ the given sample should be converted
+ :param value_hint: (optional) Ignite data type, for which the given value
+ should be converted,
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned
+ as-is in response.query_id. When the parameter is omitted, a random
+ value is generated,
+ :return: API result data object. Contains zero status and a boolean
+ success code, or non-zero status and an error description if something
+ has gone wrong.
+ """
+
+ query_struct = Query(
+ OP_CACHE_REPLACE_IF_EQUALS,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('key', key_hint or AnyDataObject),
+ ('sample', sample_hint or AnyDataObject),
+ ('value', value_hint or AnyDataObject),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'key': key,
+ 'sample': sample,
+ 'value': value,
+ },
+ response_config=[
+ ('success', Bool),
+ ],
+ )
+ if result.status == 0:
+ result.value = result.value['success']
+ return result
+
+
+def cache_clear(
+ connection: 'Connection', cache: Union[str, int], binary=False,
+ query_id=None,
+) -> 'APIResult':
+ """
+ Clears the cache without notifying listeners or cache writers.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned
+ as-is in response.query_id. When the parameter is omitted, a random
+ value is generated,
+ :return: API result data object. Contains zero status on success,
+ non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(
+ OP_CACHE_CLEAR,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ],
+ query_id=query_id,
+ )
+ return query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ },
+ )
+
+
+def cache_clear_key(
+ connection: 'Connection', cache: Union[str, int], key,
+ key_hint: object=None, binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Clears the cache key without notifying listeners or cache writers.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param key: key for the cache entry,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned
+ as-is in response.query_id. When the parameter is omitted, a random
+ value is generated,
+ :return: API result data object. Contains zero status on success,
+ non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(
+ OP_CACHE_CLEAR_KEY,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('key', key_hint or AnyDataObject),
+ ],
+ query_id=query_id,
+ )
+ return query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'key': key,
+ },
+ )
+
+
+def cache_clear_keys(
+ connection: 'Connection', cache: Union[str, int], keys: list,
+ binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Clears the cache keys without notifying listeners or cache writers.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param keys: list of keys or tuples of (key, key_hint),
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status on success,
+ non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(
+ OP_CACHE_CLEAR_KEYS,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('keys', AnyDataArray()),
+ ],
+ query_id=query_id,
+ )
+ return query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'keys': keys,
+ },
+ )
+
+
+def cache_remove_key(
+ connection: 'Connection', cache: Union[str, int], key,
+ key_hint: object=None, binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Clears the cache key without notifying listeners or cache writers.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param key: key for the cache entry,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned
+ as-is in response.query_id. When the parameter is omitted, a random
+ value is generated,
+ :return: API result data object. Contains zero status and a boolean
+ success code, or non-zero status and an error description if something
+ has gone wrong.
+ """
+
+ query_struct = Query(
+ OP_CACHE_REMOVE_KEY,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('key', key_hint or AnyDataObject),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'key': key,
+ },
+ response_config=[
+ ('success', Bool),
+ ],
+ )
+ if result.status == 0:
+ result.value = result.value['success']
+ return result
+
+
+def cache_remove_if_equals(
+ connection: 'Connection', cache: Union[str, int], key, sample,
+ key_hint=None, sample_hint=None,
+ binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Removes an entry with a given key if provided value is equal to
+ actual value, notifying listeners and cache writers.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param key: key for the cache entry,
+ :param sample: a sample to compare the stored value with,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param sample_hint: (optional) Ignite data type, for whic
+ the given sample should be converted
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned
+ as-is in response.query_id. When the parameter is omitted, a random
+ value is generated,
+ :return: API result data object. Contains zero status and a boolean
+ success code, or non-zero status and an error description if something
+ has gone wrong.
+ """
+
+ query_struct = Query(
+ OP_CACHE_REMOVE_IF_EQUALS,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('key', key_hint or AnyDataObject),
+ ('sample', sample_hint or AnyDataObject),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'key': key,
+ 'sample': sample,
+ },
+ response_config=[
+ ('success', Bool),
+ ],
+ )
+ if result.status == 0:
+ result.value = result.value['success']
+ return result
+
+
+def cache_remove_keys(
+ connection: 'Connection', cache: Union[str, int], keys: Iterable,
+ binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Removes entries with given keys, notifying listeners and cache writers.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param keys: list of keys or tuples of (key, key_hint),
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status on success,
+ non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(
+ OP_CACHE_REMOVE_KEYS,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('keys', AnyDataArray()),
+ ],
+ query_id=query_id,
+ )
+ return query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'keys': keys,
+ },
+ )
+
+
+def cache_remove_all(
+ connection: 'Connection', cache: Union[str, int], binary=False,
+ query_id=None,
+) -> 'APIResult':
+ """
+ Removes all entries from cache, notifying listeners and cache writers.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status on success,
+ non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(
+ OP_CACHE_REMOVE_ALL,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ],
+ query_id=query_id,
+ )
+ return query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ },
+ )
+
+
+def cache_get_size(
+ connection: 'Connection', cache: Union[str, int], peek_modes=0,
+ binary=False, query_id=None,
+) -> 'APIResult':
+ """
+ Gets the number of entries in cache.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param peek_modes: (optional) limit count to near cache partition
+ (PeekModes.NEAR), primary cache (PeekModes.PRIMARY), or backup cache
+ (PeekModes.BACKUP). Defaults to all cache partitions (PeekModes.ALL),
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and a number of
+ cache entries on success, non-zero status and an error description
+ otherwise.
+ """
+ if not isinstance(peek_modes, (list, tuple)):
+ if peek_modes == 0:
+ peek_modes = []
+ else:
+ peek_modes = [peek_modes]
+
+ query_struct = Query(
+ OP_CACHE_GET_SIZE,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('peek_modes', PeekModes),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'peek_modes': peek_modes,
+ },
+ response_config=[
+ ('count', Long),
+ ],
+ )
+ if result.status == 0:
+ result.value = result.value['count']
+ return result
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/api/result.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/api/result.py b/modules/platforms/python/pyignite/api/result.py
new file mode 100644
index 0000000..864ef61
--- /dev/null
+++ b/modules/platforms/python/pyignite/api/result.py
@@ -0,0 +1,38 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyignite.datatypes import String
+
+
+class APIResult:
+ """
+ Dataclass which represents the result of API request.
+
+ Fields are:
+
+ * status: request status code. 0 if successful,
+ * message: 'Success' if status == 0, verbatim error description
+ otherwise,
+ * value: return value or None.
+ """
+
+ message = 'Success'
+ value = None
+
+ def __init__(self, response: 'Response'):
+ self.status = response.status_code
+ self.query_id = response.query_id
+ if hasattr(response, 'error_message'):
+ self.message = String.to_python(response.error_message)
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/api/sql.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/api/sql.py b/modules/platforms/python/pyignite/api/sql.py
new file mode 100644
index 0000000..f2f96fc
--- /dev/null
+++ b/modules/platforms/python/pyignite/api/sql.py
@@ -0,0 +1,478 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Only key-value queries (scan queries) are implemented. SQL part is still
+in progress.
+"""
+
+from typing import Union
+
+from pyignite.datatypes import (
+ AnyDataArray, AnyDataObject, Bool, Byte, Int, Long, Map, Null, String,
+ StructArray,
+)
+from pyignite.datatypes.sql import StatementType
+from pyignite.queries import Query, Response, SQLResponse
+from pyignite.queries.op_codes import *
+from pyignite.utils import cache_id
+from .result import APIResult
+
+
+def scan(
+ connection: 'Connection', cache: Union[str, int], page_size: int,
+ partitions: int=-1, local: bool=False, binary: bool=False, query_id=None,
+) -> APIResult:
+ """
+ Performs scan query.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param page_size: cursor page size,
+ :param partitions: (optional) number of partitions to query
+ (negative to query entire cache),
+ :param local: (optional) pass True if this query should be executed
+ on local node only. Defaults to False,
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and a value
+ of type dict with results on success, non-zero status and an error
+ description otherwise.
+
+ Value dict is of following format:
+
+ * `cursor`: int, cursor ID,
+ * `data`: dict, result rows as key-value pairs,
+ * `more`: bool, True if more data is available for subsequent
+ ‘scan_cursor_get_page’ calls.
+ """
+
+ query_struct = Query(
+ OP_QUERY_SCAN,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('filter', Null),
+ ('page_size', Int),
+ ('partitions', Int),
+ ('local', Bool),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'filter': None,
+ 'page_size': page_size,
+ 'partitions': partitions,
+ 'local': 1 if local else 0,
+ },
+ response_config=[
+ ('cursor', Long),
+ ('data', Map),
+ ('more', Bool),
+ ],
+ )
+ if result.status == 0:
+ result.value = dict(result.value)
+ return result
+
+
+def scan_cursor_get_page(
+ connection: 'Connection', cursor: int, query_id=None,
+) -> APIResult:
+ """
+ Fetches the next scan query cursor page by cursor ID that is obtained
+ from `scan` function.
+
+ :param connection: connection to Ignite server,
+ :param cursor: cursor ID,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and a value
+ of type dict with results on success, non-zero status and an error
+ description otherwise.
+
+ Value dict is of following format:
+
+ * `data`: dict, result rows as key-value pairs,
+ * `more`: bool, True if more data is available for subsequent
+ ‘scan_cursor_get_page’ calls.
+ """
+
+ query_struct = Query(
+ OP_QUERY_SCAN_CURSOR_GET_PAGE,
+ [
+ ('cursor', Long),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'cursor': cursor,
+ },
+ response_config=[
+ ('data', Map),
+ ('more', Bool),
+ ],
+ )
+ if result.status == 0:
+ result.value = dict(result.value)
+ return result
+
+
+def sql(
+ connection: 'Connection', cache: Union[str, int],
+ table_name: str, query_str: str, page_size: int, query_args=None,
+ distributed_joins: bool=False, replicated_only: bool=False,
+ local: bool=False, timeout: int=0, binary: bool=False, query_id=None
+) -> APIResult:
+ """
+ Executes an SQL query over data stored in the cluster. The query returns
+ the whole record (key and value).
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param table_name: name of a type or SQL table,
+ :param query_str: SQL query string,
+ :param page_size: cursor page size,
+ :param query_args: (optional) query arguments,
+ :param distributed_joins: (optional) distributed joins. Defaults to False,
+ :param replicated_only: (optional) whether query contains only replicated
+ tables or not. Defaults to False,
+ :param local: (optional) pass True if this query should be executed
+ on local node only. Defaults to False,
+ :param timeout: (optional) non-negative timeout value in ms. Zero disables
+ timeout (default),
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and a value
+ of type dict with results on success, non-zero status and an error
+ description otherwise.
+
+ Value dict is of following format:
+
+ * `cursor`: int, cursor ID,
+ * `data`: dict, result rows as key-value pairs,
+ * `more`: bool, True if more data is available for subsequent
+ ‘sql_get_page’ calls.
+ """
+
+ if query_args is None:
+ query_args = []
+
+ query_struct = Query(
+ OP_QUERY_SQL,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('table_name', String),
+ ('query_str', String),
+ ('query_args', AnyDataArray()),
+ ('distributed_joins', Bool),
+ ('local', Bool),
+ ('replicated_only', Bool),
+ ('page_size', Int),
+ ('timeout', Long),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'table_name': table_name,
+ 'query_str': query_str,
+ 'query_args': query_args,
+ 'distributed_joins': 1 if distributed_joins else 0,
+ 'local': 1 if local else 0,
+ 'replicated_only': 1 if replicated_only else 0,
+ 'page_size': page_size,
+ 'timeout': timeout,
+ },
+ response_config=[
+ ('cursor', Long),
+ ('data', Map),
+ ('more', Bool),
+ ],
+ )
+ if result.status == 0:
+ result.value = dict(result.value)
+ return result
+
+
+def sql_cursor_get_page(
+ connection: 'Connection', cursor: int, query_id=None,
+) -> APIResult:
+ """
+ Retrieves the next SQL query cursor page by cursor ID from `sql`.
+
+ :param connection: connection to Ignite server,
+ :param cursor: cursor ID,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and a value
+ of type dict with results on success, non-zero status and an error
+ description otherwise.
+
+ Value dict is of following format:
+
+ * `data`: dict, result rows as key-value pairs,
+ * `more`: bool, True if more data is available for subsequent
+ ‘sql_cursor_get_page’ calls.
+ """
+
+ query_struct = Query(
+ OP_QUERY_SQL_CURSOR_GET_PAGE,
+ [
+ ('cursor', Long),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(
+ connection,
+ query_params={
+ 'cursor': cursor,
+ },
+ response_config=[
+ ('data', Map),
+ ('more', Bool),
+ ],
+ )
+ if result.status == 0:
+ result.value = dict(result.value)
+ return result
+
+
+def sql_fields(
+ connection: 'Connection', cache: Union[str, int],
+ query_str: str, page_size: int, query_args=None, schema: str=None,
+ statement_type: int=StatementType.ANY, distributed_joins: bool=False,
+ local: bool=False, replicated_only: bool=False,
+ enforce_join_order: bool=False, collocated: bool=False, lazy: bool=False,
+ include_field_names: bool=False, max_rows: int=-1, timeout: int=0,
+ binary: bool=False, query_id=None
+) -> APIResult:
+ """
+ Performs SQL fields query.
+
+ :param connection: connection to Ignite server,
+ :param cache: name or ID of the cache,
+ :param query_str: SQL query string,
+ :param page_size: cursor page size,
+ :param query_args: (optional) query arguments. List of values or
+ (value, type hint) tuples,
+ :param schema: (optional) schema for the query. Defaults to `PUBLIC`,
+ :param statement_type: (optional) statement type. Can be:
+
+ * StatementType.ALL − any type (default),
+ * StatementType.SELECT − select,
+ * StatementType.UPDATE − update.
+
+ :param distributed_joins: (optional) distributed joins. Defaults to False,
+ :param local: (optional) pass True if this query should be executed
+ on local node only. Defaults to False,
+ :param replicated_only: (optional) whether query contains only
+ replicated tables or not. Defaults to False,
+ :param enforce_join_order: (optional) enforce join order. Defaults
+ to False,
+ :param collocated: (optional) whether your data is co-located or not.
+ Defaults to False,
+ :param lazy: (optional) lazy query execution. Defaults to False,
+ :param include_field_names: (optional) include field names in result.
+ Defaults to False,
+ :param max_rows: (optional) query-wide maximum of rows. Defaults to -1
+ (all rows),
+ :param timeout: (optional) non-negative timeout value in ms. Zero disables
+ timeout (default),
+ :param binary: (optional) pass True to keep the value in binary form.
+ False by default,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and a value
+ of type dict with results on success, non-zero status and an error
+ description otherwise.
+
+ Value dict is of following format:
+
+ * `cursor`: int, cursor ID,
+ * `data`: list, result values,
+ * `more`: bool, True if more data is available for subsequent
+ ‘sql_fields_cursor_get_page’ calls.
+ """
+ if query_args is None:
+ query_args = []
+
+ query_struct = Query(
+ OP_QUERY_SQL_FIELDS,
+ [
+ ('hash_code', Int),
+ ('flag', Byte),
+ ('schema', String),
+ ('page_size', Int),
+ ('max_rows', Int),
+ ('query_str', String),
+ ('query_args', AnyDataArray()),
+ ('statement_type', StatementType),
+ ('distributed_joins', Bool),
+ ('local', Bool),
+ ('replicated_only', Bool),
+ ('enforce_join_order', Bool),
+ ('collocated', Bool),
+ ('lazy', Bool),
+ ('timeout', Long),
+ ('include_field_names', Bool),
+ ],
+ query_id=query_id,
+ )
+
+ _, send_buffer = query_struct.from_python({
+ 'hash_code': cache_id(cache),
+ 'flag': 1 if binary else 0,
+ 'schema': schema,
+ 'page_size': page_size,
+ 'max_rows': max_rows,
+ 'query_str': query_str,
+ 'query_args': query_args,
+ 'statement_type': statement_type,
+ 'distributed_joins': distributed_joins,
+ 'local': local,
+ 'replicated_only': replicated_only,
+ 'enforce_join_order': enforce_join_order,
+ 'collocated': collocated,
+ 'lazy': lazy,
+ 'timeout': timeout,
+ 'include_field_names': include_field_names,
+ })
+
+ connection.send(send_buffer)
+
+ response_struct = SQLResponse(
+ include_field_names=include_field_names,
+ has_cursor=True,
+ )
+ response_class, recv_buffer = response_struct.parse(connection)
+ response = response_class.from_buffer_copy(recv_buffer)
+
+ result = APIResult(response)
+ if result.status != 0:
+ return result
+ result.value = response_struct.to_python(response)
+ return result
+
+
+def sql_fields_cursor_get_page(
+ connection: 'Connection', cursor: int, field_count: int, query_id=None,
+) -> APIResult:
+ """
+ Retrieves the next query result page by cursor ID from `sql_fields`.
+
+ :param connection: connection to Ignite server,
+ :param cursor: cursor ID,
+ :param field_count: a number of fields in a row,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status and a value
+ of type dict with results on success, non-zero status and an error
+ description otherwise.
+
+ Value dict is of following format:
+
+ * `data`: list, result values,
+ * `more`: bool, True if more data is available for subsequent
+ ‘sql_fields_cursor_get_page’ calls.
+ """
+
+ query_struct = Query(
+ OP_QUERY_SQL_FIELDS_CURSOR_GET_PAGE,
+ [
+ ('cursor', Long),
+ ],
+ query_id=query_id,
+ )
+
+ _, send_buffer = query_struct.from_python({
+ 'cursor': cursor,
+ })
+
+ connection.send(send_buffer)
+
+ response_struct = Response([
+ ('data', StructArray([
+ ('field_{}'.format(i), AnyDataObject) for i in range(field_count)
+ ])),
+ ('more', Bool),
+ ])
+ response_class, recv_buffer = response_struct.parse(connection)
+ response = response_class.from_buffer_copy(recv_buffer)
+
+ result = APIResult(response)
+ if result.status != 0:
+ return result
+ value = response_struct.to_python(response)
+ result.value = {
+ 'data': [],
+ 'more': value['more']
+ }
+ for row_dict in value['data']:
+ row = []
+ for field_key in sorted(row_dict.keys()):
+ row.append(row_dict[field_key])
+ result.value['data'].append(row)
+ return result
+
+
+def resource_close(
+ connection: 'Connection', cursor: int, query_id=None
+) -> APIResult:
+ """
+ Closes a resource, such as query cursor.
+
+ :param connection: connection to Ignite server,
+ :param cursor: cursor ID,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object. Contains zero status on success,
+ non-zero status and an error description otherwise.
+ """
+
+ query_struct = Query(
+ OP_RESOURCE_CLOSE,
+ [
+ ('cursor', Long),
+ ],
+ query_id=query_id,
+ )
+ return query_struct.perform(
+ connection,
+ query_params={
+ 'cursor': cursor,
+ },
+ )
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/binary.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/binary.py b/modules/platforms/python/pyignite/binary.py
new file mode 100644
index 0000000..e726730
--- /dev/null
+++ b/modules/platforms/python/pyignite/binary.py
@@ -0,0 +1,136 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+:class:`~pyignite.binary.GenericObjectMeta` is a metaclass used to create
+classes, which objects serve as a native Python values for Ignite Complex
+object data type. You can use this metaclass with your existing classes
+to save and restore their selected attributes and properties to/from
+Ignite caches. It is also used internally by `pyignite` to create simple
+data classes “on the fly” when retrieving arbitrary Complex objects.
+
+You can get the examples of using Complex objects in the
+:ref:`complex_object_usage` section of `pyignite` documentation.
+"""
+
+from collections import OrderedDict
+from typing import Any
+
+import attr
+
+from .datatypes import *
+from .exceptions import ParseError
+from .utils import entity_id, schema_id
+
+
+ALLOWED_FIELD_TYPES = [
+ Null, ByteObject, ShortObject, IntObject, LongObject, FloatObject,
+ DoubleObject, CharObject, BoolObject, UUIDObject, DateObject,
+ TimestampObject, TimeObject, EnumObject, BinaryEnumObject,
+ ByteArrayObject, ShortArrayObject, IntArrayObject, LongArrayObject,
+ FloatArrayObject, DoubleArrayObject, CharArrayObject, BoolArrayObject,
+ UUIDArrayObject, DateArrayObject, TimestampArrayObject,
+ TimeArrayObject, EnumArrayObject, String, StringArrayObject,
+ DecimalObject, DecimalArrayObject, ObjectArrayObject, CollectionObject,
+ MapObject, BinaryObject, WrappedDataObject,
+]
+
+
+class GenericObjectPropsMixin:
+ """
+ This class is mixed both to metaclass and to resulting class to make class
+ properties universally available. You should not subclass it directly.
+ """
+ @property
+ def type_name(self) -> str:
+ """ Binary object type name. """
+ return self._type_name
+
+ @property
+ def type_id(self) -> int:
+ """ Binary object type ID. """
+ return entity_id(self._type_name)
+
+ @property
+ def schema(self) -> OrderedDict:
+ """ Binary object schema. """
+ return self._schema.copy()
+
+ @property
+ def schema_id(self) -> int:
+ """ Binary object schema ID. """
+ return schema_id(self._schema)
+
+ def __new__(cls, *args, **kwargs) -> Any:
+ # allow all items in Binary Object schema to be populated as optional
+ # arguments to `__init__()` with sensible defaults.
+ if cls is not GenericObjectMeta:
+ attributes = {
+ k: attr.ib(
+ type=getattr(v, 'pythonic', type(None)),
+ default=getattr(v, 'default', None),
+ ) for k, v in cls.schema.items()
+ }
+ attributes.update({'version': attr.ib(type=int, default=1)})
+ cls = attr.s(cls, these=attributes)
+ # skip parameters
+ return super().__new__(cls)
+
+
+class GenericObjectMeta(type, GenericObjectPropsMixin):
+ """
+ Complex (or Binary) Object metaclass. It is aimed to help user create
+ classes, which objects could serve as a pythonic representation of the
+ :class:`~pyignite.datatypes.complex.BinaryObject` Ignite data type.
+ """
+ _schema = None
+ _type_name = None
+ version = None
+
+ def __new__(
+ mcs: Any, name: str, base_classes: tuple, namespace: dict, **kwargs
+ ) -> Any:
+ """ Sort out class creation arguments. """
+ return super().__new__(
+ mcs, name, (GenericObjectPropsMixin, )+base_classes, namespace
+ )
+
+ @staticmethod
+ def _validate_schema(schema: dict):
+ for field_type in schema.values():
+ if field_type not in ALLOWED_FIELD_TYPES:
+ raise ParseError(
+ 'Wrong binary field type: {}'.format(field_type)
+ )
+
+ def __init__(
+ cls, name: str, base_classes: tuple, namespace: dict,
+ type_name: str=None, schema: OrderedDict=None, **kwargs
+ ):
+ """
+ Initializes binary object class.
+
+ :param type_name: (optional) binary object name. Defaults to class
+ name,
+ :param schema: (optional) a dict of field names: field types,
+ :raise: ParseError if one or more binary field types
+ did not recognized.
+ """
+ cls._type_name = type_name or cls.__name__
+ cls._type_id = entity_id(cls._type_name)
+ schema = schema or OrderedDict()
+ cls._validate_schema(schema)
+ cls._schema = schema
+ super().__init__(name, base_classes, namespace)
[5/6] ignite git commit: IGNITE-7782 Python thin client
Posted by is...@apache.org.
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/readme.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/readme.rst b/modules/platforms/python/docs/readme.rst
new file mode 100644
index 0000000..f91274e
--- /dev/null
+++ b/modules/platforms/python/docs/readme.rst
@@ -0,0 +1,202 @@
+.. Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+.. http://www.apache.org/licenses/LICENSE-2.0
+
+.. Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+=================
+Basic Information
+=================
+
+What it is
+----------
+
+This is an Apache Ignite thin (binary protocol) client library,
+written in Python 3, abbreviated as *pyignite*.
+
+`Apache Ignite`_ is a memory-centric distributed database, caching,
+and processing platform for transactional, analytical, and streaming
+workloads delivering in-memory speeds at petabyte scale.
+
+Ignite `binary client protocol`_ provides user applications the ability
+to communicate with an existing Ignite cluster without starting
+a full-fledged Ignite node. An application can connect to the cluster
+through a raw TCP socket.
+
+Prerequisites
+-------------
+
+- *Python 3.4* or above (3.6 is tested),
+- Access to *Apache Ignite* node, local or remote. The current thin client
+ version was tested on *Apache Ignite 2.7.0* (binary client protocol 1.2.0).
+
+Installation
+------------
+
+for end user
+""""""""""""
+
+If you want to use *pyignite* in your project, you may install it from PyPI:
+
+::
+
+$ pip install pyignite
+
+for developer
+"""""""""""""
+
+If you want to run tests, examples or build documentation, clone
+the whole repository:
+
+::
+
+$ git clone git@github.com:apache/ignite.git
+$ cd ignite/modules/platforms/python
+$ pip install -e .
+
+This will install the repository version of `pyignite` into your environment
+in so-called “develop” or “editable” mode. You may read more about
+`editable installs`_ in the `pip` manual.
+
+Then run through the contents of `requirements` folder to install
+the the additional requirements into your working Python environment using
+
+::
+
+$ pip install -r requirements/<your task>.txt
+
+You may also want to consult the `setuptools`_ manual about using `setup.py`.
+
+Examples
+--------
+
+Some examples of using pyignite are provided in
+`ignite/modules/platforms/python/examples` folder. They are extensively
+commented in the :ref:`examples_of_usage` section of the documentation.
+
+This code implies that it is run in the environment with `pyignite` package
+installed, and Apache Ignite node is running on localhost:10800, unless
+otherwise noted.
+
+There is also a possibility to run examples alone with tests. For
+the explanation of testing, look up the `Testing`_ section.
+
+Testing
+-------
+
+Create and activate virtualenv_ environment. Run
+
+::
+
+$ cd ignite/modules/platforms/python
+$ python ./setup.py pytest
+
+This does not require `pytest` and other test dependencies to be installed
+in your environment.
+
+Some or all tests require Apache Ignite node running on localhost:10800.
+To override the default parameters, use command line options
+``--ignite-host`` and ``--ignite-port``:
+
+::
+
+$ python ./setup.py pytest --addopts "--ignite-host=example.com --ignite-port=19840"
+
+You can use each of these two options multiple times. All combinations
+of given host and port will be tested.
+
+You can also test client against a server with SSL-encrypted connection.
+SSL-related `pytest` parameters are:
+
+``--use-ssl`` − use SSL encryption,
+
+``--ssl-certfile`` − a path to ssl certificate file to identify local party,
+
+``--ssl-ca-certfile`` − a path to a trusted certificate or a certificate chain,
+
+``--ssl-cert-reqs`` − determines how the remote side certificate is treated:
+
+- ``NONE`` (ignore, default),
+- ``OPTIONAL`` (validate, if provided),
+- ``REQUIRED`` (valid remote certificate is required),
+
+``--ssl-ciphers`` − ciphers to use,
+
+``--ssl-version`` − SSL version:
+
+- ``TLSV1_1`` (default),
+- ``TLSV1_2``.
+
+Other `pytest` parameters:
+
+``--timeout`` − timeout (in seconds) for each socket operation, including
+`connect`. Accepts integer or float value. Default is None (blocking mode),
+
+``--username`` and ``--password`` − credentials to authenticate to Ignite
+cluster. Used in conjunction with `authenticationEnabled` property in cluster
+configuration.
+
+``--examples`` − run the examples as one test. If you wish to run *only*
+the examples, supply also the name of the test function to `pytest` launcher:
+
+::
+
+$ pytest --examples ../tests/test_examples.py::test_examples
+
+In this test assertion fails if any of the examples' processes ends with
+non-zero exit code.
+
+Examples are not parameterized for the sake of simplicity. They always run
+with default parameters (host and port) regardless of any other
+`pytest` option.
+
+Since failover, SSL and authentication examples are meant to be controlled
+by user or depend on special configuration of the Ignite cluster, they
+can not be automated.
+
+Documentation
+-------------
+To recompile this documentation, do this from your virtualenv_ environment:
+
+::
+
+$ cd ignite/modules/platforms/python
+$ pip install -r requirements/docs.txt
+$ cd docs
+$ make html
+
+Then open `ignite/modules/platforms/python/docs/generated/html/index.html`_
+in your browser.
+
+If you feel that old version is stuck, do
+
+::
+
+$ cd ignite/modules/platforms/python/docs
+$ make clean
+$ sphinx-apidoc -feM -o source/ ../ ../setup.py
+$ make html
+
+And that should be it.
+
+Licensing
+---------
+
+This is a free software, brought to you on terms of the `Apache License v2`_.
+
+.. _Apache Ignite: https://apacheignite.readme.io/docs/what-is-ignite
+.. _binary client protocol: https://apacheignite.readme.io/docs/binary-client-protocol
+.. _Apache License v2: http://www.apache.org/licenses/LICENSE-2.0
+.. _virtualenv: https://virtualenv.pypa.io/
+.. _setuptools: https://setuptools.readthedocs.io/
+.. _ignite/modules/platforms/python/docs/generated/html/index.html: .
+.. _editable installs: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/modules.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/modules.rst b/modules/platforms/python/docs/source/modules.rst
new file mode 100644
index 0000000..c125dd3
--- /dev/null
+++ b/modules/platforms/python/docs/source/modules.rst
@@ -0,0 +1,7 @@
+python
+======
+
+.. toctree::
+ :maxdepth: 4
+
+ pyignite
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.api.binary.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.api.binary.rst b/modules/platforms/python/docs/source/pyignite.api.binary.rst
new file mode 100644
index 0000000..49f1c86
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.api.binary.rst
@@ -0,0 +1,7 @@
+pyignite.api.binary module
+==========================
+
+.. automodule:: pyignite.api.binary
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.api.cache_config.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.api.cache_config.rst b/modules/platforms/python/docs/source/pyignite.api.cache_config.rst
new file mode 100644
index 0000000..599c857
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.api.cache_config.rst
@@ -0,0 +1,7 @@
+pyignite.api.cache\_config module
+=================================
+
+.. automodule:: pyignite.api.cache_config
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.api.key_value.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.api.key_value.rst b/modules/platforms/python/docs/source/pyignite.api.key_value.rst
new file mode 100644
index 0000000..52d6c3f
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.api.key_value.rst
@@ -0,0 +1,7 @@
+pyignite.api.key\_value module
+==============================
+
+.. automodule:: pyignite.api.key_value
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.api.result.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.api.result.rst b/modules/platforms/python/docs/source/pyignite.api.result.rst
new file mode 100644
index 0000000..21398e3
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.api.result.rst
@@ -0,0 +1,7 @@
+pyignite.api.result module
+==========================
+
+.. automodule:: pyignite.api.result
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.api.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.api.rst b/modules/platforms/python/docs/source/pyignite.api.rst
new file mode 100644
index 0000000..e18d4a3
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.api.rst
@@ -0,0 +1,19 @@
+pyignite.api package
+====================
+
+.. automodule:: pyignite.api
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Submodules
+----------
+
+.. toctree::
+
+ pyignite.api.binary
+ pyignite.api.cache_config
+ pyignite.api.key_value
+ pyignite.api.result
+ pyignite.api.sql
+
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.api.sql.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.api.sql.rst b/modules/platforms/python/docs/source/pyignite.api.sql.rst
new file mode 100644
index 0000000..84479ad
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.api.sql.rst
@@ -0,0 +1,7 @@
+pyignite.api.sql module
+=======================
+
+.. automodule:: pyignite.api.sql
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.binary.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.binary.rst b/modules/platforms/python/docs/source/pyignite.binary.rst
new file mode 100644
index 0000000..6b21582
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.binary.rst
@@ -0,0 +1,7 @@
+pyignite.binary module
+======================
+
+.. automodule:: pyignite.binary
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.cache.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.cache.rst b/modules/platforms/python/docs/source/pyignite.cache.rst
new file mode 100644
index 0000000..e6e83c5
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.cache.rst
@@ -0,0 +1,7 @@
+pyignite.cache module
+=====================
+
+.. automodule:: pyignite.cache
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.client.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.client.rst b/modules/platforms/python/docs/source/pyignite.client.rst
new file mode 100644
index 0000000..fef316b
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.client.rst
@@ -0,0 +1,7 @@
+pyignite.client module
+======================
+
+.. automodule:: pyignite.client
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.connection.generators.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.connection.generators.rst b/modules/platforms/python/docs/source/pyignite.connection.generators.rst
new file mode 100644
index 0000000..daecda3
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.connection.generators.rst
@@ -0,0 +1,7 @@
+pyignite.connection.generators module
+=====================================
+
+.. automodule:: pyignite.connection.generators
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.connection.handshake.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.connection.handshake.rst b/modules/platforms/python/docs/source/pyignite.connection.handshake.rst
new file mode 100644
index 0000000..28e83df
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.connection.handshake.rst
@@ -0,0 +1,7 @@
+pyignite.connection.handshake module
+====================================
+
+.. automodule:: pyignite.connection.handshake
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.connection.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.connection.rst b/modules/platforms/python/docs/source/pyignite.connection.rst
new file mode 100644
index 0000000..92b07a7
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.connection.rst
@@ -0,0 +1,17 @@
+pyignite.connection package
+===========================
+
+.. automodule:: pyignite.connection
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Submodules
+----------
+
+.. toctree::
+
+ pyignite.connection.generators
+ pyignite.connection.handshake
+ pyignite.connection.ssl
+
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.connection.ssl.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.connection.ssl.rst b/modules/platforms/python/docs/source/pyignite.connection.ssl.rst
new file mode 100644
index 0000000..8eebf43
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.connection.ssl.rst
@@ -0,0 +1,7 @@
+pyignite.connection.ssl module
+==============================
+
+.. automodule:: pyignite.connection.ssl
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.constants.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.constants.rst b/modules/platforms/python/docs/source/pyignite.constants.rst
new file mode 100644
index 0000000..f71e4f1
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.constants.rst
@@ -0,0 +1,7 @@
+pyignite.constants module
+=========================
+
+.. automodule:: pyignite.constants
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.binary.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.binary.rst b/modules/platforms/python/docs/source/pyignite.datatypes.binary.rst
new file mode 100644
index 0000000..0d175de
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.binary.rst
@@ -0,0 +1,7 @@
+pyignite.datatypes.binary module
+================================
+
+.. automodule:: pyignite.datatypes.binary
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.cache_config.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.cache_config.rst b/modules/platforms/python/docs/source/pyignite.datatypes.cache_config.rst
new file mode 100644
index 0000000..3d5eaeb
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.cache_config.rst
@@ -0,0 +1,7 @@
+pyignite.datatypes.cache\_config module
+=======================================
+
+.. automodule:: pyignite.datatypes.cache_config
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.cache_properties.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.cache_properties.rst b/modules/platforms/python/docs/source/pyignite.datatypes.cache_properties.rst
new file mode 100644
index 0000000..57f0e9f
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.cache_properties.rst
@@ -0,0 +1,7 @@
+pyignite.datatypes.cache\_properties module
+===========================================
+
+.. automodule:: pyignite.datatypes.cache_properties
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.complex.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.complex.rst b/modules/platforms/python/docs/source/pyignite.datatypes.complex.rst
new file mode 100644
index 0000000..1e3f21e
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.complex.rst
@@ -0,0 +1,7 @@
+pyignite.datatypes.complex module
+=================================
+
+.. automodule:: pyignite.datatypes.complex
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.internal.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.internal.rst b/modules/platforms/python/docs/source/pyignite.datatypes.internal.rst
new file mode 100644
index 0000000..5dc5535
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.internal.rst
@@ -0,0 +1,7 @@
+pyignite.datatypes.internal module
+==================================
+
+.. automodule:: pyignite.datatypes.internal
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.key_value.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.key_value.rst b/modules/platforms/python/docs/source/pyignite.datatypes.key_value.rst
new file mode 100644
index 0000000..0b3aa88
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.key_value.rst
@@ -0,0 +1,7 @@
+pyignite.datatypes.key\_value module
+====================================
+
+.. automodule:: pyignite.datatypes.key_value
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.null_object.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.null_object.rst b/modules/platforms/python/docs/source/pyignite.datatypes.null_object.rst
new file mode 100644
index 0000000..05f22b1
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.null_object.rst
@@ -0,0 +1,7 @@
+pyignite.datatypes.null\_object module
+======================================
+
+.. automodule:: pyignite.datatypes.null_object
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.primitive.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.primitive.rst b/modules/platforms/python/docs/source/pyignite.datatypes.primitive.rst
new file mode 100644
index 0000000..8a53604
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.primitive.rst
@@ -0,0 +1,7 @@
+pyignite.datatypes.primitive module
+===================================
+
+.. automodule:: pyignite.datatypes.primitive
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.primitive_arrays.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.primitive_arrays.rst b/modules/platforms/python/docs/source/pyignite.datatypes.primitive_arrays.rst
new file mode 100644
index 0000000..b4b94bf
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.primitive_arrays.rst
@@ -0,0 +1,7 @@
+pyignite.datatypes.primitive\_arrays module
+===========================================
+
+.. automodule:: pyignite.datatypes.primitive_arrays
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.primitive_objects.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.primitive_objects.rst b/modules/platforms/python/docs/source/pyignite.datatypes.primitive_objects.rst
new file mode 100644
index 0000000..a74db38
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.primitive_objects.rst
@@ -0,0 +1,7 @@
+pyignite.datatypes.primitive\_objects module
+============================================
+
+.. automodule:: pyignite.datatypes.primitive_objects
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.prop_codes.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.prop_codes.rst b/modules/platforms/python/docs/source/pyignite.datatypes.prop_codes.rst
new file mode 100644
index 0000000..d23596b
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.prop_codes.rst
@@ -0,0 +1,7 @@
+pyignite.datatypes.prop\_codes module
+=====================================
+
+.. automodule:: pyignite.datatypes.prop_codes
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.rst b/modules/platforms/python/docs/source/pyignite.datatypes.rst
new file mode 100644
index 0000000..77e7183
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.rst
@@ -0,0 +1,28 @@
+pyignite.datatypes package
+==========================
+
+.. automodule:: pyignite.datatypes
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Submodules
+----------
+
+.. toctree::
+
+ pyignite.datatypes.binary
+ pyignite.datatypes.cache_config
+ pyignite.datatypes.cache_properties
+ pyignite.datatypes.complex
+ pyignite.datatypes.internal
+ pyignite.datatypes.key_value
+ pyignite.datatypes.null_object
+ pyignite.datatypes.primitive
+ pyignite.datatypes.primitive_arrays
+ pyignite.datatypes.primitive_objects
+ pyignite.datatypes.prop_codes
+ pyignite.datatypes.sql
+ pyignite.datatypes.standard
+ pyignite.datatypes.type_codes
+
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.sql.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.sql.rst b/modules/platforms/python/docs/source/pyignite.datatypes.sql.rst
new file mode 100644
index 0000000..e20f084
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.sql.rst
@@ -0,0 +1,7 @@
+pyignite.datatypes.sql module
+=============================
+
+.. automodule:: pyignite.datatypes.sql
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.standard.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.standard.rst b/modules/platforms/python/docs/source/pyignite.datatypes.standard.rst
new file mode 100644
index 0000000..e46d339
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.standard.rst
@@ -0,0 +1,7 @@
+pyignite.datatypes.standard module
+==================================
+
+.. automodule:: pyignite.datatypes.standard
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.datatypes.type_codes.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.datatypes.type_codes.rst b/modules/platforms/python/docs/source/pyignite.datatypes.type_codes.rst
new file mode 100644
index 0000000..47baa4b
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.datatypes.type_codes.rst
@@ -0,0 +1,7 @@
+pyignite.datatypes.type\_codes module
+=====================================
+
+.. automodule:: pyignite.datatypes.type_codes
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.exceptions.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.exceptions.rst b/modules/platforms/python/docs/source/pyignite.exceptions.rst
new file mode 100644
index 0000000..dd24687
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.exceptions.rst
@@ -0,0 +1,7 @@
+pyignite.exceptions module
+==========================
+
+.. automodule:: pyignite.exceptions
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.queries.op_codes.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.queries.op_codes.rst b/modules/platforms/python/docs/source/pyignite.queries.op_codes.rst
new file mode 100644
index 0000000..bc556ec
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.queries.op_codes.rst
@@ -0,0 +1,7 @@
+pyignite.queries.op\_codes module
+=================================
+
+.. automodule:: pyignite.queries.op_codes
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.queries.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.queries.rst b/modules/platforms/python/docs/source/pyignite.queries.rst
new file mode 100644
index 0000000..6dd81a2
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.queries.rst
@@ -0,0 +1,15 @@
+pyignite.queries package
+========================
+
+.. automodule:: pyignite.queries
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Submodules
+----------
+
+.. toctree::
+
+ pyignite.queries.op_codes
+
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.rst b/modules/platforms/python/docs/source/pyignite.rst
new file mode 100644
index 0000000..947cab2
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.rst
@@ -0,0 +1,30 @@
+pyignite package
+================
+
+.. automodule:: pyignite
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ pyignite.api
+ pyignite.connection
+ pyignite.datatypes
+ pyignite.queries
+
+Submodules
+----------
+
+.. toctree::
+
+ pyignite.binary
+ pyignite.cache
+ pyignite.client
+ pyignite.constants
+ pyignite.exceptions
+ pyignite.utils
+
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/source/pyignite.utils.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/source/pyignite.utils.rst b/modules/platforms/python/docs/source/pyignite.utils.rst
new file mode 100644
index 0000000..5ee42ab
--- /dev/null
+++ b/modules/platforms/python/docs/source/pyignite.utils.rst
@@ -0,0 +1,7 @@
+pyignite.utils module
+=====================
+
+.. automodule:: pyignite.utils
+ :members:
+ :undoc-members:
+ :show-inheritance:
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/examples/binary_basics.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/examples/binary_basics.py b/modules/platforms/python/examples/binary_basics.py
new file mode 100644
index 0000000..96a9058
--- /dev/null
+++ b/modules/platforms/python/examples/binary_basics.py
@@ -0,0 +1,53 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import OrderedDict
+
+from pyignite import Client, GenericObjectMeta
+from pyignite.datatypes import *
+
+
+class Person(metaclass=GenericObjectMeta, schema=OrderedDict([
+ ('first_name', String),
+ ('last_name', String),
+ ('age', IntObject),
+])):
+ pass
+
+
+client = Client()
+client.connect('localhost', 10800)
+
+person_cache = client.get_or_create_cache('person')
+
+person_cache.put(
+ 1, Person(first_name='Ivan', last_name='Ivanov', age=33)
+)
+
+person = person_cache.get(1)
+print(person.__class__.__name__)
+# Person
+
+print(person.__class__ is Person)
+# True if `Person` was registered automatically (on writing)
+# or manually (using `client.register_binary_type()` method).
+# False otherwise
+
+print(person)
+# Person(first_name='Ivan', last_name='Ivanov', age=33, version=1)
+
+client.register_binary_type(Person)
+
+Person = person.__class__
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/examples/create_binary.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/examples/create_binary.py b/modules/platforms/python/examples/create_binary.py
new file mode 100644
index 0000000..c963796
--- /dev/null
+++ b/modules/platforms/python/examples/create_binary.py
@@ -0,0 +1,103 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import OrderedDict
+
+from pyignite import Client, GenericObjectMeta
+from pyignite.datatypes import DoubleObject, IntObject, String
+from pyignite.datatypes.prop_codes import *
+
+client = Client()
+client.connect('127.0.0.1', 10800)
+
+student_cache = client.create_cache({
+ PROP_NAME: 'SQL_PUBLIC_STUDENT',
+ PROP_SQL_SCHEMA: 'PUBLIC',
+ PROP_QUERY_ENTITIES: [
+ {
+ 'table_name': 'Student'.upper(),
+ 'key_field_name': 'SID',
+ 'key_type_name': 'java.lang.Integer',
+ 'field_name_aliases': [],
+ 'query_fields': [
+ {
+ 'name': 'SID',
+ 'type_name': 'java.lang.Integer',
+ 'is_key_field': True,
+ 'is_notnull_constraint_field': True,
+ },
+ {
+ 'name': 'NAME',
+ 'type_name': 'java.lang.String',
+ },
+ {
+ 'name': 'LOGIN',
+ 'type_name': 'java.lang.String',
+ },
+ {
+ 'name': 'AGE',
+ 'type_name': 'java.lang.Integer',
+ },
+ {
+ 'name': 'GPA',
+ 'type_name': 'java.math.Double',
+ },
+ ],
+ 'query_indexes': [],
+ 'value_type_name': 'SQL_PUBLIC_STUDENT_TYPE',
+ 'value_field_name': None,
+ },
+ ],
+ })
+
+
+class Student(
+ metaclass=GenericObjectMeta,
+ type_name='SQL_PUBLIC_STUDENT_TYPE',
+ schema=OrderedDict([
+ ('NAME', String),
+ ('LOGIN', String),
+ ('AGE', IntObject),
+ ('GPA', DoubleObject),
+ ])
+):
+ pass
+
+
+student_cache.put(
+ 1,
+ Student(LOGIN='jdoe', NAME='John Doe', AGE=17, GPA=4.25),
+ key_hint=IntObject
+)
+
+result = client.sql(
+ r'SELECT * FROM Student',
+ include_field_names=True
+)
+print(next(result))
+# ['SID', 'NAME', 'LOGIN', 'AGE', 'GPA']
+
+print(*result)
+# [1, 'John Doe', 'jdoe', 17, 4.25]
+
+# DROP_QUERY = 'DROP TABLE Student'
+# client.sql(DROP_QUERY)
+#
+# pyignite.exceptions.SQLError: class org.apache.ignite.IgniteCheckedException:
+# Only cache created with CREATE TABLE may be removed with DROP TABLE
+# [cacheName=SQL_PUBLIC_STUDENT]
+
+student_cache.destroy()
+client.close()
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/examples/failover.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/examples/failover.py b/modules/platforms/python/examples/failover.py
new file mode 100644
index 0000000..3a5fcce
--- /dev/null
+++ b/modules/platforms/python/examples/failover.py
@@ -0,0 +1,61 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyignite import Client
+from pyignite.datatypes.cache_config import CacheMode
+from pyignite.datatypes.prop_codes import *
+from pyignite.exceptions import SocketError
+
+
+nodes = [
+ ('127.0.0.1', 10800),
+ ('127.0.0.1', 10801),
+ ('127.0.0.1', 10802),
+]
+
+client = Client(timeout=4.0)
+client.connect(nodes)
+print('Connected to {}'.format(client))
+
+my_cache = client.get_or_create_cache({
+ PROP_NAME: 'my_cache',
+ PROP_CACHE_MODE: CacheMode.REPLICATED,
+})
+my_cache.put('test_key', 0)
+
+# abstract main loop
+while True:
+ try:
+ # do the work
+ test_value = my_cache.get('test_key')
+ my_cache.put('test_key', test_value + 1)
+ except (OSError, SocketError) as e:
+ # recover from error (repeat last command, check data
+ # consistency or just continue − depends on the task)
+ print('Error: {}'.format(e))
+ print('Last value: {}'.format(my_cache.get('test_key')))
+ print('Reconnected to {}'.format(client))
+
+# Connected to 127.0.0.1:10800
+# Error: [Errno 104] Connection reset by peer
+# Last value: 6999
+# Reconnected to 127.0.0.1:10801
+# Error: Socket connection broken.
+# Last value: 12302
+# Reconnected to 127.0.0.1:10802
+# Error: [Errno 111] Client refused
+# Traceback (most recent call last):
+# ...
+# pyignite.exceptions.ReconnectError: Can not reconnect: out of nodes
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/examples/get_and_put.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/examples/get_and_put.py b/modules/platforms/python/examples/get_and_put.py
new file mode 100644
index 0000000..49c5108
--- /dev/null
+++ b/modules/platforms/python/examples/get_and_put.py
@@ -0,0 +1,41 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyignite import Client
+
+client = Client()
+client.connect('127.0.0.1', 10800)
+
+my_cache = client.create_cache('my cache')
+
+my_cache.put('my key', 42)
+
+result = my_cache.get('my key')
+print(result) # 42
+
+result = my_cache.get('non-existent key')
+print(result) # None
+
+result = my_cache.get_all([
+ 'my key',
+ 'non-existent key',
+ 'other-key',
+])
+print(result) # {'my key': 42}
+
+my_cache.clear_key('my key')
+
+my_cache.destroy()
+client.close()
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/examples/migrate_binary.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/examples/migrate_binary.py b/modules/platforms/python/examples/migrate_binary.py
new file mode 100644
index 0000000..f0b0f74
--- /dev/null
+++ b/modules/platforms/python/examples/migrate_binary.py
@@ -0,0 +1,190 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import OrderedDict
+from datetime import date
+from decimal import Decimal
+
+from pyignite import Client, GenericObjectMeta
+from pyignite.datatypes import (
+ BoolObject, DateObject, DecimalObject, LongObject, String,
+)
+
+
+# prepare old data
+old_schema = OrderedDict([
+ ('date', DateObject),
+ ('reported', BoolObject),
+ ('purpose', String),
+ ('sum', DecimalObject),
+ ('recipient', String),
+ ('cashier_id', LongObject),
+])
+
+old_data = [
+ (1, {
+ 'date': date(2017, 9, 21),
+ 'reported': True,
+ 'purpose': 'Praesent eget fermentum massa',
+ 'sum': Decimal('666.67'),
+ 'recipient': 'John Doe',
+ 'cashier_id': 8,
+ }),
+ (2, {
+ 'date': date(2017, 10, 11),
+ 'reported': True,
+ 'purpose': 'Proin in bibendum nulla',
+ 'sum': Decimal('333.33'),
+ 'recipient': 'Jane Roe',
+ 'cashier_id': 9,
+ }),
+ (3, {
+ 'date': date(2017, 10, 11),
+ 'reported': True,
+ 'purpose': 'Suspendisse nec dolor auctor, scelerisque ex eu, iaculis odio',
+ 'sum': Decimal('400.0'),
+ 'recipient': 'Jane Roe',
+ 'cashier_id': 8,
+ }),
+ (4, {
+ 'date': date(2017, 10, 24),
+ 'reported': False,
+ 'purpose': 'Quisque ut leo ligula',
+ 'sum': Decimal('1234.5'),
+ 'recipient': 'Joe Bloggs',
+ 'cashier_id': 10,
+ }),
+ (5, {
+ 'date': date(2017, 12, 1),
+ 'reported': True,
+ 'purpose': 'Quisque ut leo ligula',
+ 'sum': Decimal('800.0'),
+ 'recipient': 'Richard Public',
+ 'cashier_id': 12,
+ }),
+ (6, {
+ 'date': date(2017, 12, 1),
+ 'reported': True,
+ 'purpose': 'Aenean eget bibendum lorem, a luctus libero',
+ 'sum': Decimal('135.79'),
+ 'recipient': 'Joe Bloggs',
+ 'cashier_id': 10,
+ }),
+]
+
+# - add `report_date`
+# - set `report_date` to the current date if `reported` is True, None if False
+# - delete `reported`
+#
+# new_schema = {
+# 'date': DateObject,
+# 'report_date': DateObject,
+# 'purpose': String,
+# 'sum': DecimalObject,
+# 'recipient': String,
+# 'cashier_id': LongObject,
+# }
+
+
+class ExpenseVoucher(
+ metaclass=GenericObjectMeta,
+ schema=old_schema,
+):
+ pass
+
+
+client = Client()
+client.connect('127.0.0.1', 10800)
+
+accounting = client.get_or_create_cache('accounting')
+
+for key, value in old_data:
+ accounting.put(key, ExpenseVoucher(**value))
+
+data_classes = client.query_binary_type('ExpenseVoucher')
+print(data_classes)
+# {
+# -231598180: <class '__main__.ExpenseVoucher'>
+# }
+
+s_id, data_class = data_classes.popitem()
+schema = data_class.schema
+
+schema['expense_date'] = schema['date']
+del schema['date']
+schema['report_date'] = DateObject
+del schema['reported']
+schema['sum'] = DecimalObject
+
+
+# define new data class
+class ExpenseVoucherV2(
+ metaclass=GenericObjectMeta,
+ type_name='ExpenseVoucher',
+ schema=schema,
+):
+ pass
+
+
+def migrate(cache, data, new_class):
+ """ Migrate given data pages. """
+ for key, old_value in data:
+ # read data
+ print(old_value)
+ # ExpenseVoucher(
+ # date=datetime(2017, 9, 21, 0, 0),
+ # reported=True,
+ # purpose='Praesent eget fermentum massa',
+ # sum=Decimal('666.67'),
+ # recipient='John Doe',
+ # cashier_id=8,
+ # version=1
+ # )
+
+ # create new binary object
+ new_value = new_class()
+
+ # process data
+ new_value.sum = old_value.sum
+ new_value.purpose = old_value.purpose
+ new_value.recipient = old_value.recipient
+ new_value.cashier_id = old_value.cashier_id
+ new_value.expense_date = old_value.date
+ new_value.report_date = date.today() if old_value.reported else None
+
+ # replace data
+ cache.put(key, new_value)
+
+ # verify data
+ verify = cache.get(key)
+ print(verify)
+ # ExpenseVoucherV2(
+ # purpose='Praesent eget fermentum massa',
+ # sum=Decimal('666.67'),
+ # recipient='John Doe',
+ # cashier_id=8,
+ # expense_date=datetime(2017, 9, 21, 0, 0),
+ # report_date=datetime(2018, 8, 29, 0, 0),
+ # version=1,
+ # )
+
+
+# migrate data
+result = accounting.scan()
+migrate(accounting, result, ExpenseVoucherV2)
+
+# cleanup
+accounting.destroy()
+client.close()
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/examples/read_binary.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/examples/read_binary.py b/modules/platforms/python/examples/read_binary.py
new file mode 100644
index 0000000..3a8e9e2
--- /dev/null
+++ b/modules/platforms/python/examples/read_binary.py
@@ -0,0 +1,275 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from decimal import Decimal
+
+from pyignite import Client
+from pyignite.datatypes.prop_codes import *
+
+
+COUNTRY_TABLE_NAME = 'Country'
+CITY_TABLE_NAME = 'City'
+LANGUAGE_TABLE_NAME = 'CountryLanguage'
+
+COUNTRY_CREATE_TABLE_QUERY = '''CREATE TABLE Country (
+ Code CHAR(3) PRIMARY KEY,
+ Name CHAR(52),
+ Continent CHAR(50),
+ Region CHAR(26),
+ SurfaceArea DECIMAL(10,2),
+ IndepYear SMALLINT(6),
+ Population INT(11),
+ LifeExpectancy DECIMAL(3,1),
+ GNP DECIMAL(10,2),
+ GNPOld DECIMAL(10,2),
+ LocalName CHAR(45),
+ GovernmentForm CHAR(45),
+ HeadOfState CHAR(60),
+ Capital INT(11),
+ Code2 CHAR(2)
+)'''
+
+COUNTRY_INSERT_QUERY = '''INSERT INTO Country(
+ Code, Name, Continent, Region,
+ SurfaceArea, IndepYear, Population,
+ LifeExpectancy, GNP, GNPOld,
+ LocalName, GovernmentForm, HeadOfState,
+ Capital, Code2
+) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
+
+CITY_CREATE_TABLE_QUERY = '''CREATE TABLE City (
+ ID INT(11),
+ Name CHAR(35),
+ CountryCode CHAR(3),
+ District CHAR(20),
+ Population INT(11),
+ PRIMARY KEY (ID, CountryCode)
+) WITH "affinityKey=CountryCode"'''
+
+CITY_CREATE_INDEX = '''
+CREATE INDEX idx_country_code ON city (CountryCode)'''
+
+CITY_INSERT_QUERY = '''INSERT INTO City(
+ ID, Name, CountryCode, District, Population
+) VALUES (?, ?, ?, ?, ?)'''
+
+LANGUAGE_CREATE_TABLE_QUERY = '''CREATE TABLE CountryLanguage (
+ CountryCode CHAR(3),
+ Language CHAR(30),
+ IsOfficial BOOLEAN,
+ Percentage DECIMAL(4,1),
+ PRIMARY KEY (CountryCode, Language)
+) WITH "affinityKey=CountryCode"'''
+
+LANGUAGE_CREATE_INDEX = '''
+CREATE INDEX idx_lang_country_code ON CountryLanguage (CountryCode)'''
+
+LANGUAGE_INSERT_QUERY = '''INSERT INTO CountryLanguage(
+ CountryCode, Language, IsOfficial, Percentage
+) VALUES (?, ?, ?, ?)'''
+
+DROP_TABLE_QUERY = '''DROP TABLE {} IF EXISTS'''
+
+COUNTRY_DATA = [
+ [
+ 'USA', 'United States', 'North America', 'North America',
+ Decimal('9363520.00'), 1776, 278357000,
+ Decimal('77.1'), Decimal('8510700.00'), Decimal('8110900.00'),
+ 'United States', 'Federal Republic', 'George W. Bush',
+ 3813, 'US',
+ ],
+ [
+ 'IND', 'India', 'Asia', 'Southern and Central Asia',
+ Decimal('3287263.00'), 1947, 1013662000,
+ Decimal('62.5'), Decimal('447114.00'), Decimal('430572.00'),
+ 'Bharat/India', 'Federal Republic', 'Kocheril Raman Narayanan',
+ 1109, 'IN',
+ ],
+ [
+ 'CHN', 'China', 'Asia', 'Eastern Asia',
+ Decimal('9572900.00'), -1523, 1277558000,
+ Decimal('71.4'), Decimal('982268.00'), Decimal('917719.00'),
+ 'Zhongquo', 'PeoplesRepublic', 'Jiang Zemin',
+ 1891, 'CN',
+ ],
+]
+
+CITY_DATA = [
+ [3793, 'New York', 'USA', 'New York', 8008278],
+ [3794, 'Los Angeles', 'USA', 'California', 3694820],
+ [3795, 'Chicago', 'USA', 'Illinois', 2896016],
+ [3796, 'Houston', 'USA', 'Texas', 1953631],
+ [3797, 'Philadelphia', 'USA', 'Pennsylvania', 1517550],
+ [3798, 'Phoenix', 'USA', 'Arizona', 1321045],
+ [3799, 'San Diego', 'USA', 'California', 1223400],
+ [3800, 'Dallas', 'USA', 'Texas', 1188580],
+ [3801, 'San Antonio', 'USA', 'Texas', 1144646],
+ [3802, 'Detroit', 'USA', 'Michigan', 951270],
+ [3803, 'San Jose', 'USA', 'California', 894943],
+ [3804, 'Indianapolis', 'USA', 'Indiana', 791926],
+ [3805, 'San Francisco', 'USA', 'California', 776733],
+ [1024, 'Mumbai (Bombay)', 'IND', 'Maharashtra', 10500000],
+ [1025, 'Delhi', 'IND', 'Delhi', 7206704],
+ [1026, 'Calcutta [Kolkata]', 'IND', 'West Bengali', 4399819],
+ [1027, 'Chennai (Madras)', 'IND', 'Tamil Nadu', 3841396],
+ [1028, 'Hyderabad', 'IND', 'Andhra Pradesh', 2964638],
+ [1029, 'Ahmedabad', 'IND', 'Gujarat', 2876710],
+ [1030, 'Bangalore', 'IND', 'Karnataka', 2660088],
+ [1031, 'Kanpur', 'IND', 'Uttar Pradesh', 1874409],
+ [1032, 'Nagpur', 'IND', 'Maharashtra', 1624752],
+ [1033, 'Lucknow', 'IND', 'Uttar Pradesh', 1619115],
+ [1034, 'Pune', 'IND', 'Maharashtra', 1566651],
+ [1035, 'Surat', 'IND', 'Gujarat', 1498817],
+ [1036, 'Jaipur', 'IND', 'Rajasthan', 1458483],
+ [1890, 'Shanghai', 'CHN', 'Shanghai', 9696300],
+ [1891, 'Peking', 'CHN', 'Peking', 7472000],
+ [1892, 'Chongqing', 'CHN', 'Chongqing', 6351600],
+ [1893, 'Tianjin', 'CHN', 'Tianjin', 5286800],
+ [1894, 'Wuhan', 'CHN', 'Hubei', 4344600],
+ [1895, 'Harbin', 'CHN', 'Heilongjiang', 4289800],
+ [1896, 'Shenyang', 'CHN', 'Liaoning', 4265200],
+ [1897, 'Kanton [Guangzhou]', 'CHN', 'Guangdong', 4256300],
+ [1898, 'Chengdu', 'CHN', 'Sichuan', 3361500],
+ [1899, 'Nanking [Nanjing]', 'CHN', 'Jiangsu', 2870300],
+ [1900, 'Changchun', 'CHN', 'Jilin', 2812000],
+ [1901, 'Xi´an', 'CHN', 'Shaanxi', 2761400],
+ [1902, 'Dalian', 'CHN', 'Liaoning', 2697000],
+ [1903, 'Qingdao', 'CHN', 'Shandong', 2596000],
+ [1904, 'Jinan', 'CHN', 'Shandong', 2278100],
+ [1905, 'Hangzhou', 'CHN', 'Zhejiang', 2190500],
+ [1906, 'Zhengzhou', 'CHN', 'Henan', 2107200],
+]
+
+LANGUAGE_DATA = [
+ ['USA', 'Chinese', False, Decimal('0.6')],
+ ['USA', 'English', True, Decimal('86.2')],
+ ['USA', 'French', False, Decimal('0.7')],
+ ['USA', 'German', False, Decimal('0.7')],
+ ['USA', 'Italian', False, Decimal('0.6')],
+ ['USA', 'Japanese', False, Decimal('0.2')],
+ ['USA', 'Korean', False, Decimal('0.3')],
+ ['USA', 'Polish', False, Decimal('0.3')],
+ ['USA', 'Portuguese', False, Decimal('0.2')],
+ ['USA', 'Spanish', False, Decimal('7.5')],
+ ['USA', 'Tagalog', False, Decimal('0.4')],
+ ['USA', 'Vietnamese', False, Decimal('0.2')],
+ ['IND', 'Asami', False, Decimal('1.5')],
+ ['IND', 'Bengali', False, Decimal('8.2')],
+ ['IND', 'Gujarati', False, Decimal('4.8')],
+ ['IND', 'Hindi', True, Decimal('39.9')],
+ ['IND', 'Kannada', False, Decimal('3.9')],
+ ['IND', 'Malajalam', False, Decimal('3.6')],
+ ['IND', 'Marathi', False, Decimal('7.4')],
+ ['IND', 'Orija', False, Decimal('3.3')],
+ ['IND', 'Punjabi', False, Decimal('2.8')],
+ ['IND', 'Tamil', False, Decimal('6.3')],
+ ['IND', 'Telugu', False, Decimal('7.8')],
+ ['IND', 'Urdu', False, Decimal('5.1')],
+ ['CHN', 'Chinese', True, Decimal('92.0')],
+ ['CHN', 'Dong', False, Decimal('0.2')],
+ ['CHN', 'Hui', False, Decimal('0.8')],
+ ['CHN', 'Mantšu', False, Decimal('0.9')],
+ ['CHN', 'Miao', False, Decimal('0.7')],
+ ['CHN', 'Mongolian', False, Decimal('0.4')],
+ ['CHN', 'Puyi', False, Decimal('0.2')],
+ ['CHN', 'Tibetan', False, Decimal('0.4')],
+ ['CHN', 'Tujia', False, Decimal('0.5')],
+ ['CHN', 'Uighur', False, Decimal('0.6')],
+ ['CHN', 'Yi', False, Decimal('0.6')],
+ ['CHN', 'Zhuang', False, Decimal('1.4')],
+]
+
+
+# establish connection
+client = Client()
+client.connect('127.0.0.1', 10800)
+
+# create tables
+for query in [
+ COUNTRY_CREATE_TABLE_QUERY,
+ CITY_CREATE_TABLE_QUERY,
+ LANGUAGE_CREATE_TABLE_QUERY,
+]:
+ client.sql(query)
+
+# create indices
+for query in [CITY_CREATE_INDEX, LANGUAGE_CREATE_INDEX]:
+ client.sql(query)
+
+# load data
+for row in COUNTRY_DATA:
+ client.sql(COUNTRY_INSERT_QUERY, query_args=row)
+
+for row in CITY_DATA:
+ client.sql(CITY_INSERT_QUERY, query_args=row)
+
+for row in LANGUAGE_DATA:
+ client.sql(LANGUAGE_INSERT_QUERY, query_args=row)
+
+# examine the storage
+result = client.get_cache_names()
+print(result)
+# [
+# 'SQL_PUBLIC_CITY',
+# 'SQL_PUBLIC_COUNTRY',
+# 'PUBLIC',
+# 'SQL_PUBLIC_COUNTRYLANGUAGE'
+# ]
+
+city_cache = client.get_or_create_cache('SQL_PUBLIC_CITY')
+print(city_cache.settings[PROP_NAME])
+# 'SQL_PUBLIC_CITY'
+
+print(city_cache.settings[PROP_QUERY_ENTITIES])
+# {
+# 'key_type_name': (
+# 'SQL_PUBLIC_CITY_9ac8e17a_2f99_45b7_958e_06da32882e9d_KEY'
+# ),
+# 'value_type_name': (
+# 'SQL_PUBLIC_CITY_9ac8e17a_2f99_45b7_958e_06da32882e9d'
+# ),
+# 'table_name': 'CITY',
+# 'query_fields': [
+# ...
+# ],
+# 'field_name_aliases': [
+# ...
+# ],
+# 'query_indexes': []
+# }
+
+result = city_cache.scan()
+print(next(result))
+# (
+# SQL_PUBLIC_CITY_6fe650e1_700f_4e74_867d_58f52f433c43_KEY(
+# ID=1890,
+# COUNTRYCODE='CHN',
+# version=1
+# ),
+# SQL_PUBLIC_CITY_6fe650e1_700f_4e74_867d_58f52f433c43(
+# NAME='Shanghai',
+# DISTRICT='Shanghai',
+# POPULATION=9696300,
+# version=1
+# )
+# )
+
+# clean up
+for table_name in [
+ CITY_TABLE_NAME,
+ LANGUAGE_TABLE_NAME,
+ COUNTRY_TABLE_NAME,
+]:
+ result = client.sql(DROP_TABLE_QUERY.format(table_name))
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/examples/readme.md
----------------------------------------------------------------------
diff --git a/modules/platforms/python/examples/readme.md b/modules/platforms/python/examples/readme.md
new file mode 100644
index 0000000..3628c82
--- /dev/null
+++ b/modules/platforms/python/examples/readme.md
@@ -0,0 +1,17 @@
+# Examples
+
+This directory contains the following example files:
+
+- `binary_basics.py` − basic operations with Complex objects,
+- `binary_types.py` - read SQL table as a key-value cache,
+- `create_binary.py` − create SQL row with key-value operation,
+- `failover.py` − fail-over connection to Ignite cluster,
+- `get_and_put.py` − basic key-value operations,
+- `migrate_binary.py` − work with Complex object schemas,
+- `scans.py` − cache scan operation,
+- `sql.py` − use Ignite SQL,
+- `type_hints.py` − type hints.
+
+For the explanation of the examples please refer to the
+[Examples of usage](https://apache-ignite-binary-protocol-client.readthedocs.io/en/latest/examples.html)
+section of the `pyignite` documentation.
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/examples/scans.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/examples/scans.py b/modules/platforms/python/examples/scans.py
new file mode 100644
index 0000000..d5f2b48
--- /dev/null
+++ b/modules/platforms/python/examples/scans.py
@@ -0,0 +1,55 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyignite import Client
+
+client = Client()
+client.connect('127.0.0.1', 10800)
+
+my_cache = client.create_cache('my cache')
+
+my_cache.put_all({'key_{}'.format(v): v for v in range(20)})
+# {
+# 'key_0': 0,
+# 'key_1': 1,
+# 'key_2': 2,
+# ... 20 elements in total...
+# 'key_18': 18,
+# 'key_19': 19
+# }
+
+result = my_cache.scan()
+for k, v in result:
+ print(k, v)
+# 'key_17' 17
+# 'key_10' 10
+# 'key_6' 6,
+# ... 20 elements in total...
+# 'key_16' 16
+# 'key_12' 12
+
+result = my_cache.scan()
+print(dict(result))
+# {
+# 'key_17': 17,
+# 'key_10': 10,
+# 'key_6': 6,
+# ... 20 elements in total...
+# 'key_16': 16,
+# 'key_12': 12
+# }
+
+my_cache.destroy()
+client.close()
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/examples/sql.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/examples/sql.py b/modules/platforms/python/examples/sql.py
new file mode 100644
index 0000000..8f0ee7c
--- /dev/null
+++ b/modules/platforms/python/examples/sql.py
@@ -0,0 +1,298 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from decimal import Decimal
+
+from pyignite import Client
+
+
+COUNTRY_TABLE_NAME = 'Country'
+CITY_TABLE_NAME = 'City'
+LANGUAGE_TABLE_NAME = 'CountryLanguage'
+
+COUNTRY_CREATE_TABLE_QUERY = '''CREATE TABLE Country (
+ Code CHAR(3) PRIMARY KEY,
+ Name CHAR(52),
+ Continent CHAR(50),
+ Region CHAR(26),
+ SurfaceArea DECIMAL(10,2),
+ IndepYear SMALLINT(6),
+ Population INT(11),
+ LifeExpectancy DECIMAL(3,1),
+ GNP DECIMAL(10,2),
+ GNPOld DECIMAL(10,2),
+ LocalName CHAR(45),
+ GovernmentForm CHAR(45),
+ HeadOfState CHAR(60),
+ Capital INT(11),
+ Code2 CHAR(2)
+)'''
+
+COUNTRY_INSERT_QUERY = '''INSERT INTO Country(
+ Code, Name, Continent, Region,
+ SurfaceArea, IndepYear, Population,
+ LifeExpectancy, GNP, GNPOld,
+ LocalName, GovernmentForm, HeadOfState,
+ Capital, Code2
+) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
+
+CITY_CREATE_TABLE_QUERY = '''CREATE TABLE City (
+ ID INT(11),
+ Name CHAR(35),
+ CountryCode CHAR(3),
+ District CHAR(20),
+ Population INT(11),
+ PRIMARY KEY (ID, CountryCode)
+) WITH "affinityKey=CountryCode"'''
+
+CITY_CREATE_INDEX = '''
+CREATE INDEX idx_country_code ON city (CountryCode)'''
+
+CITY_INSERT_QUERY = '''INSERT INTO City(
+ ID, Name, CountryCode, District, Population
+) VALUES (?, ?, ?, ?, ?)'''
+
+LANGUAGE_CREATE_TABLE_QUERY = '''CREATE TABLE CountryLanguage (
+ CountryCode CHAR(3),
+ Language CHAR(30),
+ IsOfficial BOOLEAN,
+ Percentage DECIMAL(4,1),
+ PRIMARY KEY (CountryCode, Language)
+) WITH "affinityKey=CountryCode"'''
+
+LANGUAGE_CREATE_INDEX = '''
+CREATE INDEX idx_lang_country_code ON CountryLanguage (CountryCode)'''
+
+LANGUAGE_INSERT_QUERY = '''INSERT INTO CountryLanguage(
+ CountryCode, Language, IsOfficial, Percentage
+) VALUES (?, ?, ?, ?)'''
+
+DROP_TABLE_QUERY = '''DROP TABLE {} IF EXISTS'''
+
+COUNTRY_DATA = [
+ [
+ 'USA', 'United States', 'North America', 'North America',
+ Decimal('9363520.00'), 1776, 278357000,
+ Decimal('77.1'), Decimal('8510700.00'), Decimal('8110900.00'),
+ 'United States', 'Federal Republic', 'George W. Bush',
+ 3813, 'US',
+ ],
+ [
+ 'IND', 'India', 'Asia', 'Southern and Central Asia',
+ Decimal('3287263.00'), 1947, 1013662000,
+ Decimal('62.5'), Decimal('447114.00'), Decimal('430572.00'),
+ 'Bharat/India', 'Federal Republic', 'Kocheril Raman Narayanan',
+ 1109, 'IN',
+ ],
+ [
+ 'CHN', 'China', 'Asia', 'Eastern Asia',
+ Decimal('9572900.00'), -1523, 1277558000,
+ Decimal('71.4'), Decimal('982268.00'), Decimal('917719.00'),
+ 'Zhongquo', 'PeoplesRepublic', 'Jiang Zemin',
+ 1891, 'CN',
+ ],
+]
+
+CITY_DATA = [
+ [3793, 'New York', 'USA', 'New York', 8008278],
+ [3794, 'Los Angeles', 'USA', 'California', 3694820],
+ [3795, 'Chicago', 'USA', 'Illinois', 2896016],
+ [3796, 'Houston', 'USA', 'Texas', 1953631],
+ [3797, 'Philadelphia', 'USA', 'Pennsylvania', 1517550],
+ [3798, 'Phoenix', 'USA', 'Arizona', 1321045],
+ [3799, 'San Diego', 'USA', 'California', 1223400],
+ [3800, 'Dallas', 'USA', 'Texas', 1188580],
+ [3801, 'San Antonio', 'USA', 'Texas', 1144646],
+ [3802, 'Detroit', 'USA', 'Michigan', 951270],
+ [3803, 'San Jose', 'USA', 'California', 894943],
+ [3804, 'Indianapolis', 'USA', 'Indiana', 791926],
+ [3805, 'San Francisco', 'USA', 'California', 776733],
+ [1024, 'Mumbai (Bombay)', 'IND', 'Maharashtra', 10500000],
+ [1025, 'Delhi', 'IND', 'Delhi', 7206704],
+ [1026, 'Calcutta [Kolkata]', 'IND', 'West Bengali', 4399819],
+ [1027, 'Chennai (Madras)', 'IND', 'Tamil Nadu', 3841396],
+ [1028, 'Hyderabad', 'IND', 'Andhra Pradesh', 2964638],
+ [1029, 'Ahmedabad', 'IND', 'Gujarat', 2876710],
+ [1030, 'Bangalore', 'IND', 'Karnataka', 2660088],
+ [1031, 'Kanpur', 'IND', 'Uttar Pradesh', 1874409],
+ [1032, 'Nagpur', 'IND', 'Maharashtra', 1624752],
+ [1033, 'Lucknow', 'IND', 'Uttar Pradesh', 1619115],
+ [1034, 'Pune', 'IND', 'Maharashtra', 1566651],
+ [1035, 'Surat', 'IND', 'Gujarat', 1498817],
+ [1036, 'Jaipur', 'IND', 'Rajasthan', 1458483],
+ [1890, 'Shanghai', 'CHN', 'Shanghai', 9696300],
+ [1891, 'Peking', 'CHN', 'Peking', 7472000],
+ [1892, 'Chongqing', 'CHN', 'Chongqing', 6351600],
+ [1893, 'Tianjin', 'CHN', 'Tianjin', 5286800],
+ [1894, 'Wuhan', 'CHN', 'Hubei', 4344600],
+ [1895, 'Harbin', 'CHN', 'Heilongjiang', 4289800],
+ [1896, 'Shenyang', 'CHN', 'Liaoning', 4265200],
+ [1897, 'Kanton [Guangzhou]', 'CHN', 'Guangdong', 4256300],
+ [1898, 'Chengdu', 'CHN', 'Sichuan', 3361500],
+ [1899, 'Nanking [Nanjing]', 'CHN', 'Jiangsu', 2870300],
+ [1900, 'Changchun', 'CHN', 'Jilin', 2812000],
+ [1901, 'Xi´an', 'CHN', 'Shaanxi', 2761400],
+ [1902, 'Dalian', 'CHN', 'Liaoning', 2697000],
+ [1903, 'Qingdao', 'CHN', 'Shandong', 2596000],
+ [1904, 'Jinan', 'CHN', 'Shandong', 2278100],
+ [1905, 'Hangzhou', 'CHN', 'Zhejiang', 2190500],
+ [1906, 'Zhengzhou', 'CHN', 'Henan', 2107200],
+]
+
+LANGUAGE_DATA = [
+ ['USA', 'Chinese', False, Decimal('0.6')],
+ ['USA', 'English', True, Decimal('86.2')],
+ ['USA', 'French', False, Decimal('0.7')],
+ ['USA', 'German', False, Decimal('0.7')],
+ ['USA', 'Italian', False, Decimal('0.6')],
+ ['USA', 'Japanese', False, Decimal('0.2')],
+ ['USA', 'Korean', False, Decimal('0.3')],
+ ['USA', 'Polish', False, Decimal('0.3')],
+ ['USA', 'Portuguese', False, Decimal('0.2')],
+ ['USA', 'Spanish', False, Decimal('7.5')],
+ ['USA', 'Tagalog', False, Decimal('0.4')],
+ ['USA', 'Vietnamese', False, Decimal('0.2')],
+ ['IND', 'Asami', False, Decimal('1.5')],
+ ['IND', 'Bengali', False, Decimal('8.2')],
+ ['IND', 'Gujarati', False, Decimal('4.8')],
+ ['IND', 'Hindi', True, Decimal('39.9')],
+ ['IND', 'Kannada', False, Decimal('3.9')],
+ ['IND', 'Malajalam', False, Decimal('3.6')],
+ ['IND', 'Marathi', False, Decimal('7.4')],
+ ['IND', 'Orija', False, Decimal('3.3')],
+ ['IND', 'Punjabi', False, Decimal('2.8')],
+ ['IND', 'Tamil', False, Decimal('6.3')],
+ ['IND', 'Telugu', False, Decimal('7.8')],
+ ['IND', 'Urdu', False, Decimal('5.1')],
+ ['CHN', 'Chinese', True, Decimal('92.0')],
+ ['CHN', 'Dong', False, Decimal('0.2')],
+ ['CHN', 'Hui', False, Decimal('0.8')],
+ ['CHN', 'Mantšu', False, Decimal('0.9')],
+ ['CHN', 'Miao', False, Decimal('0.7')],
+ ['CHN', 'Mongolian', False, Decimal('0.4')],
+ ['CHN', 'Puyi', False, Decimal('0.2')],
+ ['CHN', 'Tibetan', False, Decimal('0.4')],
+ ['CHN', 'Tujia', False, Decimal('0.5')],
+ ['CHN', 'Uighur', False, Decimal('0.6')],
+ ['CHN', 'Yi', False, Decimal('0.6')],
+ ['CHN', 'Zhuang', False, Decimal('1.4')],
+]
+
+
+# establish connection
+client = Client()
+client.connect('127.0.0.1', 10800)
+
+# create tables
+for query in [
+ COUNTRY_CREATE_TABLE_QUERY,
+ CITY_CREATE_TABLE_QUERY,
+ LANGUAGE_CREATE_TABLE_QUERY,
+]:
+ client.sql(query)
+
+# create indices
+for query in [CITY_CREATE_INDEX, LANGUAGE_CREATE_INDEX]:
+ client.sql(query)
+
+# load data
+for row in COUNTRY_DATA:
+ client.sql(COUNTRY_INSERT_QUERY, query_args=row)
+
+for row in CITY_DATA:
+ client.sql(CITY_INSERT_QUERY, query_args=row)
+
+for row in LANGUAGE_DATA:
+ client.sql(LANGUAGE_INSERT_QUERY, query_args=row)
+
+# 10 most populated cities (with pagination)
+MOST_POPULATED_QUERY = '''
+SELECT name, population FROM City ORDER BY population DESC LIMIT 10'''
+
+result = client.sql(MOST_POPULATED_QUERY)
+print('Most 10 populated cities:')
+for row in result:
+ print(row)
+# Most 10 populated cities:
+# ['Mumbai (Bombay)', 10500000]
+# ['Shanghai', 9696300]
+# ['New York', 8008278]
+# ['Peking', 7472000]
+# ['Delhi', 7206704]
+# ['Chongqing', 6351600]
+# ['Tianjin', 5286800]
+# ['Calcutta [Kolkata]', 4399819]
+# ['Wuhan', 4344600]
+# ['Harbin', 4289800]
+
+# 10 most populated cities in 3 countries (with pagination and header row)
+MOST_POPULATED_IN_3_COUNTRIES_QUERY = '''
+SELECT country.name as country_name, city.name as city_name, MAX(city.population) AS max_pop FROM country
+ JOIN city ON city.countrycode = country.code
+ WHERE country.code IN ('USA','IND','CHN')
+ GROUP BY country.name, city.name ORDER BY max_pop DESC LIMIT 10
+'''
+
+result = client.sql(
+ MOST_POPULATED_IN_3_COUNTRIES_QUERY,
+ include_field_names=True,
+)
+print('Most 10 populated cities in USA, India and China:')
+print(next(result))
+print('----------------------------------------')
+for row in result:
+ print(row)
+# Most 10 populated cities in USA, India and China:
+# ['COUNTRY_NAME', 'CITY_NAME', 'MAX_POP']
+# ----------------------------------------
+# ['India', 'Mumbai (Bombay)', 10500000]
+# ['China', 'Shanghai', 9696300]
+# ['United States', 'New York', 8008278]
+# ['China', 'Peking', 7472000]
+# ['India', 'Delhi', 7206704]
+# ['China', 'Chongqing', 6351600]
+# ['China', 'Tianjin', 5286800]
+# ['India', 'Calcutta [Kolkata]', 4399819]
+# ['China', 'Wuhan', 4344600]
+# ['China', 'Harbin', 4289800]
+
+# show city info
+CITY_INFO_QUERY = '''SELECT * FROM City WHERE id = ?'''
+
+result = client.sql(
+ CITY_INFO_QUERY,
+ query_args=[3802],
+ include_field_names=True,
+)
+field_names = next(result)
+field_data = list(*result)
+
+print('City info:')
+for field_name, field_value in zip(field_names*len(field_data), field_data):
+ print('{}: {}'.format(field_name, field_value))
+# City info:
+# ID: 3802
+# NAME: Detroit
+# COUNTRYCODE: USA
+# DISTRICT: Michigan
+# POPULATION: 951270
+
+# clean up
+for table_name in [
+ CITY_TABLE_NAME,
+ LANGUAGE_TABLE_NAME,
+ COUNTRY_TABLE_NAME,
+]:
+ result = client.sql(DROP_TABLE_QUERY.format(table_name))
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/examples/type_hints.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/examples/type_hints.py b/modules/platforms/python/examples/type_hints.py
new file mode 100644
index 0000000..4cc44c0
--- /dev/null
+++ b/modules/platforms/python/examples/type_hints.py
@@ -0,0 +1,51 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyignite import Client
+from pyignite.datatypes import CharObject, ShortObject
+
+client = Client()
+client.connect('127.0.0.1', 10800)
+
+my_cache = client.get_or_create_cache('my cache')
+
+my_cache.put('my key', 42)
+# value ‘42’ takes 9 bytes of memory as a LongObject
+
+my_cache.put('my key', 42, value_hint=ShortObject)
+# value ‘42’ takes only 3 bytes as a ShortObject
+
+my_cache.put('a', 1)
+# ‘a’ is a key of type String
+
+my_cache.put('a', 2, key_hint=CharObject)
+# another key ‘a’ of type CharObject was created
+
+value = my_cache.get('a')
+print(value)
+# 1
+
+value = my_cache.get('a', key_hint=CharObject)
+print(value)
+# 2
+
+# now let us delete both keys at once
+my_cache.remove_keys([
+ 'a', # a default type key
+ ('a', CharObject), # a key of type CharObject
+])
+
+my_cache.destroy()
+client.close()
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/__init__.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/__init__.py b/modules/platforms/python/pyignite/__init__.py
new file mode 100644
index 0000000..0ac346f
--- /dev/null
+++ b/modules/platforms/python/pyignite/__init__.py
@@ -0,0 +1,17 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyignite.client import Client
+from pyignite.binary import GenericObjectMeta
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/api/__init__.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/api/__init__.py b/modules/platforms/python/pyignite/api/__init__.py
new file mode 100644
index 0000000..01437f0
--- /dev/null
+++ b/modules/platforms/python/pyignite/api/__init__.py
@@ -0,0 +1,71 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This module contains functions, that are (more or less) directly mapped to
+Apache Ignite binary protocol operations. Read more:
+
+https://apacheignite.readme.io/docs/binary-client-protocol#section-client-operations
+
+When the binary client protocol changes, these functions also change. For
+stable end user API see :mod:`pyignite.client` module.
+"""
+
+from .cache_config import (
+ cache_create,
+ cache_get_names,
+ cache_get_or_create,
+ cache_destroy,
+ cache_get_configuration,
+ cache_create_with_config,
+ cache_get_or_create_with_config,
+)
+from .key_value import (
+ cache_get,
+ cache_put,
+ cache_get_all,
+ cache_put_all,
+ cache_contains_key,
+ cache_contains_keys,
+ cache_get_and_put,
+ cache_get_and_replace,
+ cache_get_and_remove,
+ cache_put_if_absent,
+ cache_get_and_put_if_absent,
+ cache_replace,
+ cache_replace_if_equals,
+ cache_clear,
+ cache_clear_key,
+ cache_clear_keys,
+ cache_remove_key,
+ cache_remove_if_equals,
+ cache_remove_keys,
+ cache_remove_all,
+ cache_get_size,
+)
+from .sql import (
+ scan,
+ scan_cursor_get_page,
+ sql,
+ sql_cursor_get_page,
+ sql_fields,
+ sql_fields_cursor_get_page,
+ resource_close,
+)
+from .binary import (
+ get_binary_type,
+ put_binary_type,
+)
+from .result import APIResult
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/api/binary.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/api/binary.py b/modules/platforms/python/pyignite/api/binary.py
new file mode 100644
index 0000000..f0a5831
--- /dev/null
+++ b/modules/platforms/python/pyignite/api/binary.py
@@ -0,0 +1,209 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Union
+
+from pyignite.constants import *
+from pyignite.datatypes.binary import (
+ body_struct, enum_struct, schema_struct, binary_fields_struct,
+)
+from pyignite.datatypes import String, Int, Bool
+from pyignite.queries import Query, Response
+from pyignite.queries.op_codes import *
+from pyignite.utils import int_overflow, entity_id
+from .result import APIResult
+
+
+def get_binary_type(
+ connection: 'Connection', binary_type: Union[str, int], query_id=None,
+) -> APIResult:
+ """
+ Gets the binary type information by type ID.
+
+ :param connection: connection to Ignite server,
+ :param binary_type: binary type name or ID,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object.
+ """
+
+ query_struct = Query(
+ OP_GET_BINARY_TYPE,
+ [
+ ('type_id', Int),
+ ],
+ query_id=query_id,
+ )
+
+ _, send_buffer = query_struct.from_python({
+ 'type_id': entity_id(binary_type),
+ })
+ connection.send(send_buffer)
+
+ response_head_struct = Response([
+ ('type_exists', Bool),
+ ])
+ response_head_type, recv_buffer = response_head_struct.parse(connection)
+ response_head = response_head_type.from_buffer_copy(recv_buffer)
+ response_parts = []
+ if response_head.type_exists:
+ resp_body_type, resp_body_buffer = body_struct.parse(connection)
+ response_parts.append(('body', resp_body_type))
+ resp_body = resp_body_type.from_buffer_copy(resp_body_buffer)
+ recv_buffer += resp_body_buffer
+ if resp_body.is_enum:
+ resp_enum, resp_enum_buffer = enum_struct.parse(connection)
+ response_parts.append(('enums', resp_enum))
+ recv_buffer += resp_enum_buffer
+ resp_schema_type, resp_schema_buffer = schema_struct.parse(connection)
+ response_parts.append(('schema', resp_schema_type))
+ recv_buffer += resp_schema_buffer
+
+ response_class = type(
+ 'GetBinaryTypeResponse',
+ (response_head_type,),
+ {
+ '_pack_': 1,
+ '_fields_': response_parts,
+ }
+ )
+ response = response_class.from_buffer_copy(recv_buffer)
+ result = APIResult(response)
+ if result.status != 0:
+ return result
+ result.value = {
+ 'type_exists': response.type_exists
+ }
+ if hasattr(response, 'body'):
+ result.value.update(body_struct.to_python(response.body))
+ if hasattr(response, 'enums'):
+ result.value['enums'] = enum_struct.to_python(response.enums)
+ if hasattr(response, 'schema'):
+ result.value['schema'] = {
+ x['schema_id']: [
+ z['schema_field_id'] for z in x['schema_fields']
+ ]
+ for x in schema_struct.to_python(response.schema)
+ }
+ return result
+
+
+def put_binary_type(
+ connection: 'Connection', type_name: str, affinity_key_field: str=None,
+ is_enum=False, schema: dict=None, query_id=None,
+) -> APIResult:
+ """
+ Registers binary type information in cluster.
+
+ :param connection: connection to Ignite server,
+ :param type_name: name of the data type being registered,
+ :param affinity_key_field: (optional) name of the affinity key field,
+ :param is_enum: (optional) register enum if True, binary object otherwise.
+ Defaults to False,
+ :param schema: (optional) when register enum, pass a dict of enumerated
+ parameter names as keys and an integers as values. When register binary
+ type, pass a dict of field names: field types. Binary type with no fields
+ is OK,
+ :param query_id: (optional) a value generated by client and returned as-is
+ in response.query_id. When the parameter is omitted, a random value
+ is generated,
+ :return: API result data object.
+ """
+ # prepare data
+ if schema is None:
+ schema = {}
+ type_id = entity_id(type_name)
+ data = {
+ 'type_name': type_name,
+ 'type_id': type_id,
+ 'affinity_key_field': affinity_key_field,
+ 'binary_fields': [],
+ 'is_enum': is_enum,
+ 'schema': [],
+ }
+ schema_id = None
+ if is_enum:
+ data['enums'] = []
+ for literal, ordinal in schema.items():
+ data['enums'].append({
+ 'literal': literal,
+ 'type_id': ordinal,
+ })
+ else:
+ # assemble schema and calculate schema ID in one go
+ schema_id = FNV1_OFFSET_BASIS if schema else 0
+ for field_name, data_type in schema.items():
+ # TODO: check for allowed data types
+ field_id = entity_id(field_name)
+ data['binary_fields'].append({
+ 'field_name': field_name,
+ 'type_id': int.from_bytes(
+ data_type.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ ),
+ 'field_id': field_id,
+ })
+ schema_id ^= (field_id & 0xff)
+ schema_id = int_overflow(schema_id * FNV1_PRIME)
+ schema_id ^= ((field_id >> 8) & 0xff)
+ schema_id = int_overflow(schema_id * FNV1_PRIME)
+ schema_id ^= ((field_id >> 16) & 0xff)
+ schema_id = int_overflow(schema_id * FNV1_PRIME)
+ schema_id ^= ((field_id >> 24) & 0xff)
+ schema_id = int_overflow(schema_id * FNV1_PRIME)
+
+ data['schema'].append({
+ 'schema_id': schema_id,
+ 'schema_fields': [
+ {'schema_field_id': entity_id(x)} for x in schema
+ ],
+ })
+
+ # do query
+ if is_enum:
+ query_struct = Query(
+ OP_PUT_BINARY_TYPE,
+ [
+ ('type_id', Int),
+ ('type_name', String),
+ ('affinity_key_field', String),
+ ('binary_fields', binary_fields_struct),
+ ('is_enum', Bool),
+ ('enums', enum_struct),
+ ('schema', schema_struct),
+ ],
+ query_id=query_id,
+ )
+ else:
+ query_struct = Query(
+ OP_PUT_BINARY_TYPE,
+ [
+ ('type_id', Int),
+ ('type_name', String),
+ ('affinity_key_field', String),
+ ('binary_fields', binary_fields_struct),
+ ('is_enum', Bool),
+ ('schema', schema_struct),
+ ],
+ query_id=query_id,
+ )
+ result = query_struct.perform(connection, query_params=data)
+ if result.status == 0:
+ result.value = {
+ 'type_id': type_id,
+ 'schema_id': schema_id,
+ }
+ return result
[2/6] ignite git commit: IGNITE-7782 Python thin client
Posted by is...@apache.org.
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/complex.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/complex.py b/modules/platforms/python/pyignite/datatypes/complex.py
new file mode 100644
index 0000000..9a5664c
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/complex.py
@@ -0,0 +1,531 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import OrderedDict
+import ctypes
+import inspect
+
+from pyignite.constants import *
+from pyignite.exceptions import ParseError
+from pyignite.utils import entity_id, hashcode, is_hinted
+from .internal import AnyDataObject
+from .type_codes import *
+
+
+__all__ = [
+ 'Map', 'ObjectArrayObject', 'CollectionObject', 'MapObject',
+ 'WrappedDataObject', 'BinaryObject',
+]
+
+
+class ObjectArrayObject:
+ """
+ Array of objects of any type. Its Python representation is
+ tuple(type_id, iterable of any type).
+ """
+ type_code = TC_OBJECT_ARRAY
+ type_or_id_name = 'type_id'
+
+ @classmethod
+ def build_header(cls):
+ return type(
+ cls.__name__+'Header',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('type_id', ctypes.c_int),
+ ('length', ctypes.c_int),
+ ],
+ }
+ )
+
+ @classmethod
+ def parse(cls, client: 'Client'):
+ header_class = cls.build_header()
+ buffer = client.recv(ctypes.sizeof(header_class))
+ header = header_class.from_buffer_copy(buffer)
+ fields = []
+
+ for i in range(header.length):
+ c_type, buffer_fragment = AnyDataObject.parse(client)
+ buffer += buffer_fragment
+ fields.append(('element_{}'.format(i), c_type))
+
+ final_class = type(
+ cls.__name__,
+ (header_class,),
+ {
+ '_pack_': 1,
+ '_fields_': fields,
+ }
+ )
+ return final_class, buffer
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ result = []
+ for i in range(ctype_object.length):
+ result.append(
+ AnyDataObject.to_python(
+ getattr(ctype_object, 'element_{}'.format(i)),
+ *args, **kwargs
+ )
+ )
+ return getattr(ctype_object, cls.type_or_id_name), result
+
+ @classmethod
+ def from_python(cls, value):
+ type_or_id, value = value
+ header_class = cls.build_header()
+ header = header_class()
+ header.type_code = int.from_bytes(
+ cls.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+ try:
+ length = len(value)
+ except TypeError:
+ value = [value]
+ length = 1
+ header.length = length
+ setattr(header, cls.type_or_id_name, type_or_id)
+ buffer = bytes(header)
+
+ for x in value:
+ buffer += AnyDataObject.from_python(x)
+ return buffer
+
+
+class WrappedDataObject:
+ """
+ One or more binary objects can be wrapped in an array. This allows reading,
+ storing, passing and writing objects efficiently without understanding
+ their contents, performing simple byte copy.
+
+ Python representation: tuple(payload: bytes, offset: integer). Offset
+ points to the root object of the array.
+ """
+ type_code = TC_ARRAY_WRAPPED_OBJECTS
+
+ @classmethod
+ def build_header(cls):
+ return type(
+ cls.__name__+'Header',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('length', ctypes.c_int),
+ ],
+ }
+ )
+
+ @classmethod
+ def parse(cls, client: 'Client'):
+ header_class = cls.build_header()
+ buffer = client.recv(ctypes.sizeof(header_class))
+ header = header_class.from_buffer_copy(buffer)
+
+ final_class = type(
+ cls.__name__,
+ (header_class,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('payload', ctypes.c_byte*header.length),
+ ('offset', ctypes.c_int),
+ ],
+ }
+ )
+ buffer += client.recv(
+ ctypes.sizeof(final_class) - ctypes.sizeof(header_class)
+ )
+ return final_class, buffer
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ return bytes(ctype_object.payload), ctype_object.offset
+
+ @classmethod
+ def from_python(cls, value):
+ raise ParseError('Send unwrapped data.')
+
+
+class CollectionObject(ObjectArrayObject):
+ """
+ Just like object array, but contains deserialization type hint instead of
+ type id. This hint is also useless in Python, because the list type along
+ covers all the use cases.
+
+ Also represented as tuple(type_id, iterable of any type) in Python.
+ """
+ type_code = TC_COLLECTION
+ type_or_id_name = 'type'
+ pythonic = list
+ default = []
+
+ @classmethod
+ def build_header(cls):
+ return type(
+ cls.__name__+'Header',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('length', ctypes.c_int),
+ ('type', ctypes.c_byte),
+ ],
+ }
+ )
+
+
+class Map:
+ """
+ Dictionary type, payload-only.
+
+ Ignite does not track the order of key-value pairs in its caches, hence
+ the ordinary Python dict type, not the collections.OrderedDict.
+ """
+ HASH_MAP = 1
+ LINKED_HASH_MAP = 2
+
+ @classmethod
+ def build_header(cls):
+ return type(
+ cls.__name__+'Header',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('length', ctypes.c_int),
+ ],
+ }
+ )
+
+ @classmethod
+ def parse(cls, client: 'Client'):
+ header_class = cls.build_header()
+ buffer = client.recv(ctypes.sizeof(header_class))
+ header = header_class.from_buffer_copy(buffer)
+ fields = []
+
+ for i in range(header.length << 1):
+ c_type, buffer_fragment = AnyDataObject.parse(client)
+ buffer += buffer_fragment
+ fields.append(('element_{}'.format(i), c_type))
+
+ final_class = type(
+ cls.__name__,
+ (header_class,),
+ {
+ '_pack_': 1,
+ '_fields_': fields,
+ }
+ )
+ return final_class, buffer
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ map_type = getattr(ctype_object, 'type', cls.HASH_MAP)
+ result = OrderedDict() if map_type == cls.LINKED_HASH_MAP else {}
+
+ for i in range(0, ctype_object.length << 1, 2):
+ k = AnyDataObject.to_python(
+ getattr(ctype_object, 'element_{}'.format(i)),
+ *args, **kwargs
+ )
+ v = AnyDataObject.to_python(
+ getattr(ctype_object, 'element_{}'.format(i + 1)),
+ *args, **kwargs
+ )
+ result[k] = v
+ return result
+
+ @classmethod
+ def from_python(cls, value, type_id=None):
+ header_class = cls.build_header()
+ header = header_class()
+ length = len(value)
+ header.length = length
+ if hasattr(header, 'type_code'):
+ header.type_code = int.from_bytes(
+ cls.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+ if hasattr(header, 'type'):
+ header.type = type_id
+ buffer = bytes(header)
+
+ for k, v in value.items():
+ if is_hinted(k):
+ buffer += k[1].from_python(k[0])
+ else:
+ buffer += AnyDataObject.from_python(k)
+ if is_hinted(v):
+ buffer += v[1].from_python(v[0])
+ else:
+ buffer += AnyDataObject.from_python(v)
+ return buffer
+
+
+class MapObject(Map):
+ """
+ This is a dictionary type. Type conversion hint can be a `HASH_MAP`
+ (ordinary dict) or `LINKED_HASH_MAP` (collections.OrderedDict).
+
+ Keys and values in map are independent data objects, but `count`
+ counts pairs. Very annoying.
+ """
+ type_code = TC_MAP
+ pythonic = dict
+ default = {}
+
+ @classmethod
+ def build_header(cls):
+ return type(
+ cls.__name__+'Header',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('length', ctypes.c_int),
+ ('type', ctypes.c_byte),
+ ],
+ }
+ )
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ return ctype_object.type, super().to_python(
+ ctype_object, *args, **kwargs
+ )
+
+ @classmethod
+ def from_python(cls, value):
+ type_id, value = value
+ return super().from_python(value, type_id)
+
+
+class BinaryObject:
+ type_code = TC_COMPLEX_OBJECT
+
+ USER_TYPE = 0x0001
+ HAS_SCHEMA = 0x0002
+ HAS_RAW_DATA = 0x0004
+ OFFSET_ONE_BYTE = 0x0008
+ OFFSET_TWO_BYTES = 0x0010
+ COMPACT_FOOTER = 0x0020
+
+ @classmethod
+ def build_header(cls):
+ return type(
+ cls.__name__,
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('version', ctypes.c_byte),
+ ('flags', ctypes.c_short),
+ ('type_id', ctypes.c_int),
+ ('hash_code', ctypes.c_int),
+ ('length', ctypes.c_int),
+ ('schema_id', ctypes.c_int),
+ ('schema_offset', ctypes.c_int),
+ ],
+ }
+ )
+
+ @classmethod
+ def offset_c_type(cls, flags: int):
+ if flags & cls.OFFSET_ONE_BYTE:
+ return ctypes.c_ubyte
+ if flags & cls.OFFSET_TWO_BYTES:
+ return ctypes.c_uint16
+ return ctypes.c_uint
+
+ @classmethod
+ def schema_type(cls, flags: int):
+ if flags & cls.COMPACT_FOOTER:
+ return cls.offset_c_type(flags)
+ return type(
+ 'SchemaElement',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('field_id', ctypes.c_int),
+ ('offset', cls.offset_c_type(flags)),
+ ],
+ },
+ )
+
+ @staticmethod
+ def get_dataclass(client: 'Client', header) -> OrderedDict:
+ # get field names from outer space
+ temp_conn = client.clone()
+ result = temp_conn.query_binary_type(header.type_id, header.schema_id)
+ temp_conn.close()
+ if not result:
+ raise ParseError('Binary type is not registered')
+ return result
+
+ @classmethod
+ def parse(cls, client: 'Client'):
+ from pyignite.datatypes import Struct
+
+ header_class = cls.build_header()
+ buffer = client.recv(ctypes.sizeof(header_class))
+ header = header_class.from_buffer_copy(buffer)
+
+ # ignore full schema, always retrieve fields' types and order
+ # from complex types registry
+ data_class = cls.get_dataclass(client, header)
+ fields = data_class.schema.items()
+ object_fields_struct = Struct(fields)
+ object_fields, object_fields_buffer = object_fields_struct.parse(client)
+ buffer += object_fields_buffer
+ final_class_fields = [('object_fields', object_fields)]
+
+ if header.flags & cls.HAS_SCHEMA:
+ schema = cls.schema_type(header.flags) * len(fields)
+ buffer += client.recv(ctypes.sizeof(schema))
+ final_class_fields.append(('schema', schema))
+
+ final_class = type(
+ cls.__name__,
+ (header_class,),
+ {
+ '_pack_': 1,
+ '_fields_': final_class_fields,
+ }
+ )
+ # register schema encoding approach
+ client.compact_footer = bool(header.flags & cls.COMPACT_FOOTER)
+ return final_class, buffer
+
+ @classmethod
+ def to_python(cls, ctype_object, client: 'Client'=None, *args, **kwargs):
+
+ if not client:
+ raise ParseError(
+ 'Can not query binary type {}'.format(ctype_object.type_id)
+ )
+
+ data_class = client.query_binary_type(
+ ctype_object.type_id,
+ ctype_object.schema_id
+ )
+ result = data_class()
+
+ result.version = ctype_object.version
+ for field_name, field_type in data_class.schema.items():
+ setattr(
+ result, field_name, field_type.to_python(
+ getattr(ctype_object.object_fields, field_name),
+ client, *args, **kwargs
+ )
+ )
+ return result
+
+ @classmethod
+ def from_python(cls, value: object):
+
+ def find_client():
+ """
+ A nice hack. Extracts the nearest `Client` instance from the
+ call stack.
+ """
+ from pyignite import Client
+
+ frame = None
+ try:
+ for rec in inspect.stack()[2:]:
+ frame = rec[0]
+ code = frame.f_code
+ for varname in code.co_varnames:
+ suspect = frame.f_locals[varname]
+ if isinstance(suspect, Client):
+ return suspect
+ finally:
+ del frame
+
+ compact_footer = True # this is actually used
+ client = find_client()
+ if client:
+ # if no client can be found, the class of the `value` is discarded
+ # and the new dataclass is automatically registered later on
+ client.register_binary_type(value.__class__)
+ compact_footer = client.compact_footer
+ else:
+ raise Warning(
+ 'Can not register binary type {}'.format(value.type_name)
+ )
+
+ # prepare header
+ header_class = cls.build_header()
+ header = header_class()
+ header.type_code = int.from_bytes(
+ cls.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+
+ header.flags = cls.USER_TYPE | cls.HAS_SCHEMA
+ if compact_footer:
+ header.flags |= cls.COMPACT_FOOTER
+ header.version = value.version
+ header.type_id = value.type_id
+ header.schema_id = value.schema_id
+
+ # create fields and calculate offsets
+ field_buffer = b''
+ offsets = [ctypes.sizeof(header_class)]
+ schema_items = list(value.schema.items())
+ for field_name, field_type in schema_items:
+ partial_buffer = field_type.from_python(
+ getattr(
+ value, field_name, getattr(field_type, 'default', None)
+ )
+ )
+ offsets.append(max(offsets) + len(partial_buffer))
+ field_buffer += partial_buffer
+
+ offsets = offsets[:-1]
+
+ # create footer
+ if max(offsets, default=0) < 255:
+ header.flags |= cls.OFFSET_ONE_BYTE
+ elif max(offsets) < 65535:
+ header.flags |= cls.OFFSET_TWO_BYTES
+ schema_class = cls.schema_type(header.flags) * len(offsets)
+ schema = schema_class()
+ if compact_footer:
+ for i, offset in enumerate(offsets):
+ schema[i] = offset
+ else:
+ for i, offset in enumerate(offsets):
+ schema[i].field_id = entity_id(schema_items[i][0])
+ schema[i].offset = offset
+ # calculate size and hash code
+ header.schema_offset = ctypes.sizeof(header_class) + len(field_buffer)
+ header.length = header.schema_offset + ctypes.sizeof(schema_class)
+ header.hash_code = hashcode(field_buffer + bytes(schema))
+
+ return bytes(header) + field_buffer + bytes(schema)
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/internal.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/internal.py b/modules/platforms/python/pyignite/datatypes/internal.py
new file mode 100644
index 0000000..a363a5f
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/internal.py
@@ -0,0 +1,461 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import OrderedDict
+import ctypes
+import decimal
+from datetime import date, datetime, timedelta
+from typing import Any, Tuple
+import uuid
+
+import attr
+
+from pyignite.constants import *
+from pyignite.exceptions import ParseError
+from pyignite.utils import is_binary, is_hinted, is_iterable
+from .type_codes import *
+
+
+__all__ = ['AnyDataArray', 'AnyDataObject', 'Struct', 'StructArray', 'tc_map']
+
+
+def tc_map(key: bytes, _memo_map: dict={}):
+ """
+ Returns a default parser/generator class for the given type code.
+
+ This mapping is used internally inside listed complex parser/generator
+ classes, so it has to be a function. Local imports are used for the same
+ reason.
+
+ :param key: Ignite type code,
+ :param _memo_map: do not use this parameter, it is for memoization
+ of the “type code-type class” mapping,
+ :return: parser/generator class for the type code.
+ """
+ if not _memo_map:
+ from pyignite.datatypes import (
+ Null, ByteObject, ShortObject, IntObject, LongObject, FloatObject,
+ DoubleObject, CharObject, BoolObject, UUIDObject, DateObject,
+ TimestampObject, TimeObject, EnumObject, BinaryEnumObject,
+ ByteArrayObject, ShortArrayObject, IntArrayObject, LongArrayObject,
+ FloatArrayObject, DoubleArrayObject, CharArrayObject,
+ BoolArrayObject,
+ UUIDArrayObject, DateArrayObject, TimestampArrayObject,
+ TimeArrayObject, EnumArrayObject, String, StringArrayObject,
+ DecimalObject, DecimalArrayObject, ObjectArrayObject,
+ CollectionObject,
+ MapObject, BinaryObject, WrappedDataObject,
+ )
+
+ _memo_map = {
+ TC_NULL: Null,
+
+ TC_BYTE: ByteObject,
+ TC_SHORT: ShortObject,
+ TC_INT: IntObject,
+ TC_LONG: LongObject,
+ TC_FLOAT: FloatObject,
+ TC_DOUBLE: DoubleObject,
+ TC_CHAR: CharObject,
+ TC_BOOL: BoolObject,
+
+ TC_UUID: UUIDObject,
+ TC_DATE: DateObject,
+ TC_TIMESTAMP: TimestampObject,
+ TC_TIME: TimeObject,
+ TC_ENUM: EnumObject,
+ TC_BINARY_ENUM: BinaryEnumObject,
+
+ TC_BYTE_ARRAY: ByteArrayObject,
+ TC_SHORT_ARRAY: ShortArrayObject,
+ TC_INT_ARRAY: IntArrayObject,
+ TC_LONG_ARRAY: LongArrayObject,
+ TC_FLOAT_ARRAY: FloatArrayObject,
+ TC_DOUBLE_ARRAY: DoubleArrayObject,
+ TC_CHAR_ARRAY: CharArrayObject,
+ TC_BOOL_ARRAY: BoolArrayObject,
+
+ TC_UUID_ARRAY: UUIDArrayObject,
+ TC_DATE_ARRAY: DateArrayObject,
+ TC_TIMESTAMP_ARRAY: TimestampArrayObject,
+ TC_TIME_ARRAY: TimeArrayObject,
+ TC_ENUM_ARRAY: EnumArrayObject,
+
+ TC_STRING: String,
+ TC_STRING_ARRAY: StringArrayObject,
+ TC_DECIMAL: DecimalObject,
+ TC_DECIMAL_ARRAY: DecimalArrayObject,
+
+ TC_OBJECT_ARRAY: ObjectArrayObject,
+ TC_COLLECTION: CollectionObject,
+ TC_MAP: MapObject,
+
+ TC_COMPLEX_OBJECT: BinaryObject,
+ TC_ARRAY_WRAPPED_OBJECTS: WrappedDataObject,
+ }
+ return _memo_map[key]
+
+
+@attr.s
+class StructArray:
+ """ `counter_type` counter, followed by count*following structure. """
+ following = attr.ib(type=list, factory=list)
+ counter_type = attr.ib(default=ctypes.c_int)
+ defaults = attr.ib(type=dict, default={})
+
+ def build_header_class(self):
+ return type(
+ self.__class__.__name__+'Header',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('length', self.counter_type),
+ ],
+ },
+ )
+
+ def parse(self, client: 'Client'):
+ buffer = client.recv(ctypes.sizeof(self.counter_type))
+ length = int.from_bytes(buffer, byteorder=PROTOCOL_BYTE_ORDER)
+ fields = []
+
+ for i in range(length):
+ c_type, buffer_fragment = Struct(self.following).parse(client)
+ buffer += buffer_fragment
+ fields.append(('element_{}'.format(i), c_type))
+
+ data_class = type(
+ 'StructArray',
+ (self.build_header_class(),),
+ {
+ '_pack_': 1,
+ '_fields_': fields,
+ },
+ )
+
+ return data_class, buffer
+
+ def to_python(self, ctype_object, *args, **kwargs):
+ result = []
+ length = getattr(ctype_object, 'length', 0)
+ for i in range(length):
+ result.append(
+ Struct(
+ self.following, dict_type=dict
+ ).to_python(
+ getattr(ctype_object, 'element_{}'.format(i)),
+ *args, **kwargs
+ )
+ )
+ return result
+
+ def from_python(self, value):
+ length = len(value)
+ header_class = self.build_header_class()
+ header = header_class()
+ header.length = length
+ buffer = bytes(header)
+
+ for i, v in enumerate(value):
+ for default_key, default_value in self.defaults.items():
+ v.setdefault(default_key, default_value)
+ for name, el_class in self.following:
+ buffer += el_class.from_python(v[name])
+
+ return buffer
+
+
+@attr.s
+class Struct:
+ """ Sequence of fields, including variable-sized and nested. """
+ fields = attr.ib(type=list)
+ dict_type = attr.ib(default=OrderedDict)
+ defaults = attr.ib(type=dict, default={})
+
+ def parse(self, client: 'Client') -> Tuple[type, bytes]:
+ buffer = b''
+ fields = []
+
+ for name, c_type in self.fields:
+ c_type, buffer_fragment = c_type.parse(client)
+ buffer += buffer_fragment
+
+ fields.append((name, c_type))
+
+ data_class = type(
+ 'Struct',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': fields,
+ },
+ )
+
+ return data_class, buffer
+
+ def to_python(self, ctype_object, *args, **kwargs) -> Any:
+ result = self.dict_type()
+ for name, c_type in self.fields:
+ result[name] = c_type.to_python(
+ getattr(ctype_object, name),
+ *args, **kwargs
+ )
+ return result
+
+ def from_python(self, value) -> bytes:
+ buffer = b''
+
+ for default_key, default_value in self.defaults.items():
+ value.setdefault(default_key, default_value)
+
+ for name, el_class in self.fields:
+ buffer += el_class.from_python(value[name])
+
+ return buffer
+
+
+class AnyDataObject:
+ """
+ Not an actual Ignite type, but contains a guesswork
+ on serializing Python data or parsing an unknown Ignite data object.
+ """
+ _python_map = None
+ _python_array_map = None
+
+ @staticmethod
+ def get_subtype(iterable, allow_none=False):
+ # arrays of these types can contain Null objects
+ object_array_python_types = [
+ str,
+ datetime,
+ timedelta,
+ decimal.Decimal,
+ uuid.UUID,
+ ]
+
+ iterator = iter(iterable)
+ type_first = type(None)
+ try:
+ while isinstance(None, type_first):
+ type_first = type(next(iterator))
+ except StopIteration:
+ raise TypeError(
+ 'Can not represent an empty iterable '
+ 'or an iterable of `NoneType` in Ignite type.'
+ )
+
+ if type_first in object_array_python_types:
+ allow_none = True
+
+ # if an iterable contains items of more than one non-nullable type,
+ # return None
+ if all([
+ isinstance(x, type_first)
+ or ((x is None) and allow_none) for x in iterator
+ ]):
+ return type_first
+
+ @classmethod
+ def parse(cls, client: 'Client'):
+ type_code = client.recv(ctypes.sizeof(ctypes.c_byte))
+ try:
+ data_class = tc_map(type_code)
+ except KeyError:
+ raise ParseError('Unknown type code: `{}`'.format(type_code))
+ client.prefetch += type_code
+ return data_class.parse(client)
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ type_code = ctype_object.type_code.to_bytes(
+ ctypes.sizeof(ctypes.c_byte),
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+ data_class = tc_map(type_code)
+ return data_class.to_python(ctype_object)
+
+ @classmethod
+ def _init_python_map(cls):
+ """
+ Optimizes Python types→Ignite types map creation for speed.
+
+ Local imports seem inevitable here.
+ """
+ from pyignite.datatypes import (
+ LongObject, DoubleObject, String, BoolObject, Null, UUIDObject,
+ DateObject, TimeObject, DecimalObject,
+ )
+
+ cls._python_map = {
+ int: LongObject,
+ float: DoubleObject,
+ str: String,
+ bytes: String,
+ bool: BoolObject,
+ type(None): Null,
+ uuid.UUID: UUIDObject,
+ datetime: DateObject,
+ date: DateObject,
+ timedelta: TimeObject,
+ decimal.Decimal: DecimalObject,
+ }
+
+ @classmethod
+ def _init_python_array_map(cls):
+ """
+ Optimizes Python types→Ignite array types map creation for speed.
+ """
+ from pyignite.datatypes import (
+ LongArrayObject, DoubleArrayObject, StringArrayObject,
+ BoolArrayObject, UUIDArrayObject, DateArrayObject, TimeArrayObject,
+ DecimalArrayObject,
+ )
+
+ cls._python_array_map = {
+ int: LongArrayObject,
+ float: DoubleArrayObject,
+ str: StringArrayObject,
+ bytes: StringArrayObject,
+ bool: BoolArrayObject,
+ uuid.UUID: UUIDArrayObject,
+ datetime: DateArrayObject,
+ date: DateArrayObject,
+ timedelta: TimeArrayObject,
+ decimal.Decimal: DecimalArrayObject,
+ }
+
+ @classmethod
+ def map_python_type(cls, value):
+ from pyignite.datatypes import (
+ MapObject, ObjectArrayObject, BinaryObject,
+ )
+
+ if cls._python_map is None:
+ cls._init_python_map()
+ if cls._python_array_map is None:
+ cls._init_python_array_map()
+
+ value_type = type(value)
+ if is_iterable(value) and value_type is not str:
+ value_subtype = cls.get_subtype(value)
+ if value_subtype in cls._python_array_map:
+ return cls._python_array_map[value_subtype]
+
+ # a little heuristics (order may be important)
+ if all([
+ value_subtype is None,
+ len(value) == 2,
+ isinstance(value[0], int),
+ isinstance(value[1], dict),
+ ]):
+ return MapObject
+
+ if all([
+ value_subtype is None,
+ len(value) == 2,
+ isinstance(value[0], int),
+ is_iterable(value[1]),
+ ]):
+ return ObjectArrayObject
+
+ raise TypeError(
+ 'Type `array of {}` is invalid'.format(value_subtype)
+ )
+
+ if is_binary(value):
+ return BinaryObject
+
+ if value_type in cls._python_map:
+ return cls._python_map[value_type]
+ raise TypeError(
+ 'Type `{}` is invalid.'.format(value_type)
+ )
+
+ @classmethod
+ def from_python(cls, value):
+ return cls.map_python_type(value).from_python(value)
+
+
+@attr.s
+class AnyDataArray(AnyDataObject):
+ """
+ Sequence of AnyDataObjects, payload-only.
+ """
+ counter_type = attr.ib(default=ctypes.c_int)
+
+ def build_header(self):
+ return type(
+ self.__class__.__name__+'Header',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('length', self.counter_type),
+ ],
+ }
+ )
+
+ def parse(self, client: 'Client'):
+ header_class = self.build_header()
+ buffer = client.recv(ctypes.sizeof(header_class))
+ header = header_class.from_buffer_copy(buffer)
+ fields = []
+
+ for i in range(header.length):
+ c_type, buffer_fragment = super().parse(client)
+ buffer += buffer_fragment
+ fields.append(('element_{}'.format(i), c_type))
+
+ final_class = type(
+ self.__class__.__name__,
+ (header_class,),
+ {
+ '_pack_': 1,
+ '_fields_': fields,
+ }
+ )
+ return final_class, buffer
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ result = []
+ for i in range(ctype_object.length):
+ result.append(
+ super().to_python(
+ getattr(ctype_object, 'element_{}'.format(i)),
+ *args, **kwargs
+ )
+ )
+ return result
+
+ def from_python(self, value):
+ header_class = self.build_header()
+ header = header_class()
+
+ try:
+ length = len(value)
+ except TypeError:
+ value = [value]
+ length = 1
+ header.length = length
+ buffer = bytes(header)
+
+ for x in value:
+ if is_hinted(x):
+ buffer += x[1].from_python(x[0])
+ else:
+ buffer += super().from_python(x)
+ return buffer
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/key_value.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/key_value.py b/modules/platforms/python/pyignite/datatypes/key_value.py
new file mode 100644
index 0000000..0f21ac6
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/key_value.py
@@ -0,0 +1,24 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .primitive_arrays import ByteArray
+
+
+class PeekModes(ByteArray):
+
+ ALL = 0
+ NEAR = 1
+ PRIMARY = 2
+ BACKUP = 3
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/null_object.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/null_object.py b/modules/platforms/python/pyignite/datatypes/null_object.py
new file mode 100644
index 0000000..9fa1e8f
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/null_object.py
@@ -0,0 +1,63 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Null object.
+
+There can't be null type, because null payload takes exactly 0 bytes.
+"""
+
+import ctypes
+
+from .type_codes import TC_NULL
+
+
+__all__ = ['Null']
+
+
+class Null:
+ default = None
+ pythonic = type(None)
+ _object_c_type = None
+
+ @classmethod
+ def build_c_type(cls):
+ if cls._object_c_type is None:
+ cls._object_c_type = type(
+ cls.__name__,
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ],
+ },
+ )
+ return cls._object_c_type
+
+ @classmethod
+ def parse(cls, client: 'Client'):
+ buffer = client.recv(ctypes.sizeof(ctypes.c_byte))
+ data_type = cls.build_c_type()
+ return data_type, buffer
+
+ @staticmethod
+ def to_python(*args, **kwargs):
+ return None
+
+ @staticmethod
+ def from_python(*args):
+ return TC_NULL
+
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/primitive.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/primitive.py b/modules/platforms/python/pyignite/datatypes/primitive.py
new file mode 100644
index 0000000..73f096d
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/primitive.py
@@ -0,0 +1,105 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ctypes
+
+from pyignite.constants import *
+
+
+__all__ = [
+ 'Primitive',
+ 'Byte', 'Short', 'Int', 'Long', 'Float', 'Double', 'Char', 'Bool',
+]
+
+
+class Primitive:
+ """
+ Ignite primitive type. Base type for the following types:
+
+ - Byte,
+ - Short,
+ - Int,
+ - Long,
+ - Float,
+ - Double,
+ - Char,
+ - Bool.
+ """
+
+ c_type = None
+
+ @classmethod
+ def parse(cls, client: 'Client'):
+ return cls.c_type, client.recv(ctypes.sizeof(cls.c_type))
+
+ @staticmethod
+ def to_python(ctype_object, *args, **kwargs):
+ return ctype_object
+
+ @classmethod
+ def from_python(cls, value):
+ return bytes(cls.c_type(value))
+
+
+class Byte(Primitive):
+ c_type = ctypes.c_byte
+
+
+class Short(Primitive):
+ c_type = ctypes.c_short
+
+
+class Int(Primitive):
+ c_type = ctypes.c_int
+
+
+class Long(Primitive):
+ c_type = ctypes.c_long
+
+
+class Float(Primitive):
+ c_type = ctypes.c_float
+
+
+class Double(Primitive):
+ c_type = ctypes.c_double
+
+
+class Char(Primitive):
+ c_type = ctypes.c_short
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ return ctype_object.value.to_bytes(
+ ctypes.sizeof(cls.c_type),
+ byteorder=PROTOCOL_BYTE_ORDER
+ ).decode(PROTOCOL_CHAR_ENCODING)
+
+ @classmethod
+ def from_python(cls, value):
+ if type(value) is str:
+ value = value.encode(PROTOCOL_CHAR_ENCODING)
+ # assuming either a bytes or an integer
+ if type(value) is bytes:
+ value = int.from_bytes(value, byteorder=PROTOCOL_BYTE_ORDER)
+ # assuming a valid integer
+ return value.to_bytes(
+ ctypes.sizeof(cls.c_type),
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+
+
+class Bool(Primitive):
+ c_type = ctypes.c_bool
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/primitive_arrays.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/primitive_arrays.py b/modules/platforms/python/pyignite/datatypes/primitive_arrays.py
new file mode 100644
index 0000000..83a2b4c
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/primitive_arrays.py
@@ -0,0 +1,207 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ctypes
+
+from pyignite.constants import *
+from .primitive import *
+from .type_codes import *
+
+
+__all__ = [
+ 'ByteArray', 'ByteArrayObject', 'ShortArray', 'ShortArrayObject',
+ 'IntArray', 'IntArrayObject', 'LongArray', 'LongArrayObject',
+ 'FloatArray', 'FloatArrayObject', 'DoubleArray', 'DoubleArrayObject',
+ 'CharArray', 'CharArrayObject', 'BoolArray', 'BoolArrayObject',
+]
+
+
+class PrimitiveArray:
+ """
+ Base class for array of primitives. Payload-only.
+ """
+ primitive_type = None
+ type_code = None
+
+ @classmethod
+ def build_header_class(cls):
+ return type(
+ cls.__name__+'Header',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('length', ctypes.c_int),
+ ],
+ }
+ )
+
+ @classmethod
+ def parse(cls, client: 'Client'):
+ header_class = cls.build_header_class()
+ buffer = client.recv(ctypes.sizeof(header_class))
+ header = header_class.from_buffer_copy(buffer)
+ final_class = type(
+ cls.__name__,
+ (header_class,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('data', cls.primitive_type.c_type * header.length),
+ ],
+ }
+ )
+ buffer += client.recv(
+ ctypes.sizeof(final_class) - ctypes.sizeof(header_class)
+ )
+ return final_class, buffer
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ result = []
+ for i in range(ctype_object.length):
+ result.append(ctype_object.data[i])
+ return result
+
+ @classmethod
+ def from_python(cls, value):
+ header_class = cls.build_header_class()
+ header = header_class()
+ if hasattr(header, 'type_code'):
+ header.type_code = int.from_bytes(
+ cls.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+ length = len(value)
+ header.length = length
+ buffer = bytes(header)
+
+ for x in value:
+ buffer += cls.primitive_type.from_python(x)
+ return buffer
+
+
+class ByteArray(PrimitiveArray):
+ primitive_type = Byte
+ type_code = TC_BYTE_ARRAY
+
+
+class ShortArray(PrimitiveArray):
+ primitive_type = Short
+ type_code = TC_SHORT_ARRAY
+
+
+class IntArray(PrimitiveArray):
+ primitive_type = Int
+ type_code = TC_INT_ARRAY
+
+
+class LongArray(PrimitiveArray):
+ primitive_type = Long
+ type_code = TC_LONG_ARRAY
+
+
+class FloatArray(PrimitiveArray):
+ primitive_type = Float
+ type_code = TC_FLOAT_ARRAY
+
+
+class DoubleArray(PrimitiveArray):
+ primitive_type = Double
+ type_code = TC_DOUBLE_ARRAY
+
+
+class CharArray(PrimitiveArray):
+ primitive_type = Char
+ type_code = TC_CHAR_ARRAY
+
+
+class BoolArray(PrimitiveArray):
+ primitive_type = Bool
+ type_code = TC_BOOL_ARRAY
+
+
+class PrimitiveArrayObject(PrimitiveArray):
+ """
+ Base class for primitive array object. Type code plus payload.
+ """
+ pythonic = list
+ default = []
+
+ @classmethod
+ def build_header_class(cls):
+ return type(
+ cls.__name__+'Header',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('length', ctypes.c_int),
+ ],
+ }
+ )
+
+
+class ByteArrayObject(PrimitiveArrayObject):
+ primitive_type = Byte
+ type_code = TC_BYTE_ARRAY
+
+
+class ShortArrayObject(PrimitiveArrayObject):
+ primitive_type = Short
+ type_code = TC_SHORT_ARRAY
+
+
+class IntArrayObject(PrimitiveArrayObject):
+ primitive_type = Int
+ type_code = TC_INT_ARRAY
+
+
+class LongArrayObject(PrimitiveArrayObject):
+ primitive_type = Long
+ type_code = TC_LONG_ARRAY
+
+
+class FloatArrayObject(PrimitiveArrayObject):
+ primitive_type = Float
+ type_code = TC_FLOAT_ARRAY
+
+
+class DoubleArrayObject(PrimitiveArrayObject):
+ primitive_type = Double
+ type_code = TC_DOUBLE_ARRAY
+
+
+class CharArrayObject(PrimitiveArrayObject):
+ primitive_type = Char
+ type_code = TC_CHAR_ARRAY
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ values = super().to_python(ctype_object, *args, **kwargs)
+ return [
+ v.to_bytes(
+ ctypes.sizeof(cls.primitive_type.c_type),
+ byteorder=PROTOCOL_BYTE_ORDER
+ ).decode(
+ PROTOCOL_CHAR_ENCODING
+ ) for v in values
+ ]
+
+
+class BoolArrayObject(PrimitiveArrayObject):
+ primitive_type = Bool
+ type_code = TC_BOOL_ARRAY
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/primitive_objects.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/primitive_objects.py b/modules/platforms/python/pyignite/datatypes/primitive_objects.py
new file mode 100644
index 0000000..53b54b3
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/primitive_objects.py
@@ -0,0 +1,157 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ctypes
+
+from pyignite.constants import *
+from .type_codes import *
+
+
+__all__ = [
+ 'DataObject', 'ByteObject', 'ShortObject', 'IntObject', 'LongObject',
+ 'FloatObject', 'DoubleObject', 'CharObject', 'BoolObject',
+]
+
+
+class DataObject:
+ """
+ Base class for primitive data objects.
+
+ Primitive data objects are built of primitive data prepended by
+ the corresponding type code.
+ """
+
+ c_type = None
+ type_code = None
+ _object_c_type = None
+
+ @classmethod
+ def build_c_type(cls):
+ if cls._object_c_type is None:
+ cls._object_c_type = type(
+ cls.__name__,
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('value', cls.c_type),
+ ],
+ },
+ )
+ return cls._object_c_type
+
+ @classmethod
+ def parse(cls, client: 'Client'):
+ data_type = cls.build_c_type()
+ buffer = client.recv(ctypes.sizeof(data_type))
+ return data_type, buffer
+
+ @staticmethod
+ def to_python(ctype_object, *args, **kwargs):
+ return ctype_object.value
+
+ @classmethod
+ def from_python(cls, value):
+ data_type = cls.build_c_type()
+ data_object = data_type()
+ data_object.type_code = int.from_bytes(
+ cls.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+ data_object.value = value
+ return bytes(data_object)
+
+
+class ByteObject(DataObject):
+ c_type = ctypes.c_byte
+ type_code = TC_BYTE
+ pythonic = int
+ default = 0
+
+
+class ShortObject(DataObject):
+ c_type = ctypes.c_short
+ type_code = TC_SHORT
+ pythonic = int
+ default = 0
+
+
+class IntObject(DataObject):
+ c_type = ctypes.c_int
+ type_code = TC_INT
+ pythonic = int
+ default = 0
+
+
+class LongObject(DataObject):
+ c_type = ctypes.c_long
+ type_code = TC_LONG
+ pythonic = int
+ default = 0
+
+
+class FloatObject(DataObject):
+ c_type = ctypes.c_float
+ type_code = TC_FLOAT
+ pythonic = float
+ default = 0.0
+
+
+class DoubleObject(DataObject):
+ c_type = ctypes.c_double
+ type_code = TC_DOUBLE
+ pythonic = float
+ default = 0.0
+
+
+class CharObject(DataObject):
+ """
+ This type is a little tricky. It stores character values in
+ UTF-16 Little-endian encoding. We have to encode/decode it
+ to/from UTF-8 to keep the coding hassle to minimum. Bear in mind
+ though: decoded character may take 1..4 bytes in UTF-8.
+ """
+ c_type = ctypes.c_short
+ type_code = TC_CHAR
+ pythonic = str
+ default = ' '
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ return ctype_object.value.to_bytes(
+ ctypes.sizeof(cls.c_type),
+ byteorder=PROTOCOL_BYTE_ORDER
+ ).decode(PROTOCOL_CHAR_ENCODING)
+
+ @classmethod
+ def from_python(cls, value):
+ if type(value) is str:
+ value = value.encode(PROTOCOL_CHAR_ENCODING)
+ # assuming either a bytes or an integer
+ if type(value) is bytes:
+ value = int.from_bytes(value, byteorder=PROTOCOL_BYTE_ORDER)
+ # assuming a valid integer
+ return cls.type_code + value.to_bytes(
+ ctypes.sizeof(cls.c_type),
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+
+
+class BoolObject(DataObject):
+ c_type = ctypes.c_bool
+ type_code = TC_BOOL
+ pythonic = bool
+ default = False
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/prop_codes.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/prop_codes.py b/modules/platforms/python/pyignite/datatypes/prop_codes.py
new file mode 100644
index 0000000..adea281
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/prop_codes.py
@@ -0,0 +1,51 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+PROP_NAME = 0
+PROP_CACHE_MODE = 1
+PROP_CACHE_ATOMICITY_MODE = 2
+PROP_BACKUPS_NUMBER = 3
+PROP_WRITE_SYNCHRONIZATION_MODE = 4
+PROP_COPY_ON_READ = 5
+PROP_READ_FROM_BACKUP = 6
+
+PROP_DATA_REGION_NAME = 100
+PROP_IS_ONHEAP_CACHE_ENABLED = 101
+
+PROP_QUERY_ENTITIES = 200
+PROP_QUERY_PARALLELISM = 201
+PROP_QUERY_DETAIL_METRIC_SIZE = 202
+PROP_SQL_SCHEMA = 203
+PROP_SQL_INDEX_INLINE_MAX_SIZE = 204
+PROP_SQL_ESCAPE_ALL = 205
+PROP_MAX_QUERY_ITERATORS = 206
+
+PROP_REBALANCE_MODE = 300
+PROP_REBALANCE_DELAY = 301
+PROP_REBALANCE_TIMEOUT = 302
+PROP_REBALANCE_BATCH_SIZE = 303
+PROP_REBALANCE_BATCHES_PREFETCH_COUNT = 304
+PROP_REBALANCE_ORDER = 305
+PROP_REBALANCE_THROTTLE = 306
+
+PROP_GROUP_NAME = 400
+PROP_CACHE_KEY_CONFIGURATION = 401
+PROP_DEFAULT_LOCK_TIMEOUT = 402
+PROP_MAX_CONCURRENT_ASYNC_OPERATIONS = 403
+PROP_PARTITION_LOSS_POLICY = 404
+PROP_EAGER_TTL = 405
+PROP_STATISTICS_ENABLED = 406
+
+PROP_INVALIDATE = -1
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/sql.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/sql.py b/modules/platforms/python/pyignite/datatypes/sql.py
new file mode 100644
index 0000000..533349e
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/sql.py
@@ -0,0 +1,23 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .primitive import Byte
+
+
+class StatementType(Byte):
+
+ ANY = 0
+ SELECT = 1
+ UPDATE = 2
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/standard.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/standard.py b/modules/platforms/python/pyignite/datatypes/standard.py
new file mode 100644
index 0000000..160e06e
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/standard.py
@@ -0,0 +1,713 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ctypes
+from datetime import date, datetime, time, timedelta
+import decimal
+import uuid
+
+from pyignite.constants import *
+from .type_codes import *
+from .null_object import Null
+
+
+__all__ = [
+ 'String', 'DecimalObject', 'UUIDObject', 'TimestampObject', 'DateObject',
+ 'TimeObject',
+
+ 'StringArray', 'DecimalArray', 'UUIDArray', 'TimestampArray', 'DateArray',
+ 'TimeArray',
+
+ 'StringArrayObject', 'DecimalArrayObject', 'UUIDArrayObject',
+ 'TimestampArrayObject', 'TimeArrayObject', 'DateArrayObject',
+
+ 'EnumObject', 'EnumArray', 'EnumArrayObject', 'BinaryEnumObject',
+ 'BinaryEnumArrayObject', 'ObjectArray',
+]
+
+
+class StandardObject:
+ type_code = None
+
+ @classmethod
+ def build_c_type(cls):
+ raise NotImplementedError('This object is generic')
+
+ @classmethod
+ def parse(cls, client: 'Client'):
+ tc_type = client.recv(ctypes.sizeof(ctypes.c_byte))
+
+ if tc_type == TC_NULL:
+ return Null.build_c_type(), tc_type
+
+ c_type = cls.build_c_type()
+ buffer = tc_type + client.recv(ctypes.sizeof(c_type) - len(tc_type))
+ return c_type, buffer
+
+
+class String:
+ """
+ Pascal-style string: `c_int` counter, followed by count*bytes.
+ UTF-8-encoded, so that one character may take 1 to 4 bytes.
+ """
+ type_code = TC_STRING
+ pythonic = str
+
+ @classmethod
+ def build_c_type(cls, length: int):
+ return type(
+ cls.__name__,
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('length', ctypes.c_int),
+ ('data', ctypes.c_char * length),
+ ],
+ },
+ )
+
+ @classmethod
+ def parse(cls, client: 'Client'):
+ tc_type = client.recv(ctypes.sizeof(ctypes.c_byte))
+ # String or Null
+ if tc_type == TC_NULL:
+ return Null.build_c_type(), tc_type
+
+ buffer = tc_type + client.recv(ctypes.sizeof(ctypes.c_int))
+ length = int.from_bytes(buffer[1:], byteorder=PROTOCOL_BYTE_ORDER)
+
+ data_type = cls.build_c_type(length)
+ buffer += client.recv(ctypes.sizeof(data_type) - len(buffer))
+
+ return data_type, buffer
+
+ @staticmethod
+ def to_python(ctype_object, *args, **kwargs):
+ length = getattr(ctype_object, 'length', None)
+ if length is None:
+ return None
+ elif length > 0:
+ return ctype_object.data.decode(PROTOCOL_STRING_ENCODING)
+ else:
+ return ''
+
+ @classmethod
+ def from_python(cls, value):
+ if value is None:
+ return Null.from_python()
+
+ if isinstance(value, str):
+ value = value.encode(PROTOCOL_STRING_ENCODING)
+ length = len(value)
+ data_type = cls.build_c_type(length)
+ data_object = data_type()
+ data_object.type_code = int.from_bytes(
+ cls.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+ data_object.length = length
+ data_object.data = value
+ return bytes(data_object)
+
+
+class DecimalObject:
+ type_code = TC_DECIMAL
+ pythonic = decimal.Decimal
+ default = decimal.Decimal('0.00')
+
+ @classmethod
+ def build_c_header(cls):
+ return type(
+ cls.__name__,
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('scale', ctypes.c_int),
+ ('length', ctypes.c_int),
+ ],
+ }
+ )
+
+ @classmethod
+ def parse(cls, client: 'Client'):
+ tc_type = client.recv(ctypes.sizeof(ctypes.c_byte))
+ # Decimal or Null
+ if tc_type == TC_NULL:
+ return Null.build_c_type(), tc_type
+
+ header_class = cls.build_c_header()
+ buffer = tc_type + client.recv(
+ ctypes.sizeof(header_class)
+ - len(tc_type)
+ )
+ header = header_class.from_buffer_copy(buffer)
+ data_type = type(
+ cls.__name__,
+ (header_class,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('data', ctypes.c_char * header.length),
+ ],
+ }
+ )
+ buffer += client.recv(
+ ctypes.sizeof(data_type)
+ - ctypes.sizeof(header_class)
+ )
+ return data_type, buffer
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ if getattr(ctype_object, 'length', None) is None:
+ return None
+
+ sign = 1 if ctype_object.data[0] & 0x80 else 0
+ data = bytes([ctype_object.data[0] & 0x7f]) + ctype_object.data[1:]
+ result = decimal.Decimal(data.decode(PROTOCOL_STRING_ENCODING))
+ # apply scale
+ result = (
+ result
+ * decimal.Decimal('10') ** decimal.Decimal(ctype_object.scale)
+ )
+ if sign:
+ # apply sign
+ result = -result
+ return result
+
+ @classmethod
+ def from_python(cls, value: decimal.Decimal):
+ if value is None:
+ return Null.from_python()
+
+ sign, digits, scale = value.normalize().as_tuple()
+ data = bytearray([ord('0') + digit for digit in digits])
+ if sign:
+ data[0] |= 0x80
+ else:
+ data[0] &= 0x7f
+ length = len(digits)
+ header_class = cls.build_c_header()
+ data_class = type(
+ cls.__name__,
+ (header_class,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('data', ctypes.c_char * length),
+ ],
+ }
+ )
+ data_object = data_class()
+ data_object.type_code = int.from_bytes(
+ cls.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+ data_object.length = length
+ data_object.scale = scale
+ data_object.data = bytes(data)
+ return bytes(data_object)
+
+
+class UUIDObject(StandardObject):
+ """
+ Universally unique identifier (UUID), aka Globally unique identifier
+ (GUID). Payload takes up 16 bytes.
+ """
+ type_code = TC_UUID
+ _object_c_type = None
+
+ @classmethod
+ def build_c_type(cls):
+ if cls._object_c_type is None:
+ cls._object_c_type = type(
+ cls.__name__,
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('value', ctypes.c_byte * 16),
+ ],
+ }
+ )
+ return cls._object_c_type
+
+ @classmethod
+ def from_python(cls, value: uuid.UUID):
+ data_type = cls.build_c_type()
+ data_object = data_type()
+ data_object.type_code = int.from_bytes(
+ cls.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+ for i, byte in enumerate(bytearray(value.bytes)):
+ data_object.value[i] = byte
+ return bytes(data_object)
+
+ @classmethod
+ def to_python(cls, ctypes_object, *args, **kwargs):
+ if ctypes_object.type_code == int.from_bytes(
+ TC_NULL,
+ byteorder=PROTOCOL_BYTE_ORDER
+ ):
+ return None
+ return uuid.UUID(bytes=bytes(ctypes_object.value))
+
+
+class TimestampObject(StandardObject):
+ """
+ A signed integer number of milliseconds past 1 Jan 1970, aka Epoch
+ (8 bytes long integer), plus the delta in nanoseconds (4 byte integer,
+ only 0..999 range used).
+
+ The accuracy is ridiculous. For instance, common HPETs have
+ less than 10ms accuracy. Therefore no ns range calculations is made;
+ `epoch` and `fraction` stored separately and represented as
+ tuple(datetime.datetime, integer).
+ """
+ type_code = TC_TIMESTAMP
+ pythonic = tuple
+ default = (datetime(1970, 1, 1), 0)
+ _object_c_type = None
+
+ @classmethod
+ def build_c_type(cls):
+ if cls._object_c_type is None:
+ cls._object_c_type = type(
+ cls.__name__,
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('epoch', ctypes.c_long),
+ ('fraction', ctypes.c_int),
+ ],
+ }
+ )
+ return cls._object_c_type
+
+ @classmethod
+ def from_python(cls, value: tuple):
+ if value is None:
+ return Null.from_python()
+ data_type = cls.build_c_type()
+ data_object = data_type()
+ data_object.type_code = int.from_bytes(
+ cls.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+ data_object.epoch = int(value[0].timestamp() * 1000)
+ data_object.fraction = value[1]
+ return bytes(data_object)
+
+ @classmethod
+ def to_python(cls, ctypes_object, *args, **kwargs):
+ if ctypes_object.type_code == int.from_bytes(
+ TC_NULL,
+ byteorder=PROTOCOL_BYTE_ORDER
+ ):
+ return None
+ return (
+ datetime.fromtimestamp(ctypes_object.epoch/1000),
+ ctypes_object.fraction
+ )
+
+
+class DateObject(StandardObject):
+ """
+ A signed integer number of milliseconds past 1 Jan 1970, aka Epoch
+ (8 bytes long integer).
+
+ Represented as a naive datetime.datetime in Python.
+ """
+ type_code = TC_DATE
+ pythonic = datetime
+ default = datetime(1970, 1, 1)
+ _object_c_type = None
+
+ @classmethod
+ def build_c_type(cls):
+ if cls._object_c_type is None:
+ cls._object_c_type = type(
+ cls.__name__,
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('epoch', ctypes.c_long),
+ ],
+ }
+ )
+ return cls._object_c_type
+
+ @classmethod
+ def from_python(cls, value: [date, datetime]):
+ if value is None:
+ return Null.from_python()
+ if type(value) is date:
+ value = datetime.combine(value, time())
+ data_type = cls.build_c_type()
+ data_object = data_type()
+ data_object.type_code = int.from_bytes(
+ cls.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+ data_object.epoch = int(value.timestamp() * 1000)
+ return bytes(data_object)
+
+ @classmethod
+ def to_python(cls, ctypes_object, *args, **kwargs):
+ if ctypes_object.type_code == int.from_bytes(
+ TC_NULL,
+ byteorder=PROTOCOL_BYTE_ORDER
+ ):
+ return None
+ return datetime.fromtimestamp(ctypes_object.epoch/1000)
+
+
+class TimeObject(StandardObject):
+ """
+ Time of the day as a number of milliseconds since midnight.
+
+ Represented as a datetime.timedelta in Python.
+ """
+ type_code = TC_TIME
+ pythonic = timedelta
+ default = timedelta()
+ _object_c_type = None
+
+ @classmethod
+ def build_c_type(cls):
+ if cls._object_c_type is None:
+ cls._object_c_type = type(
+ cls.__name__,
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('value', ctypes.c_long),
+ ],
+ }
+ )
+ return cls._object_c_type
+
+ @classmethod
+ def from_python(cls, value: timedelta):
+ if value is None:
+ return Null.from_python()
+ data_type = cls.build_c_type()
+ data_object = data_type()
+ data_object.type_code = int.from_bytes(
+ cls.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+ data_object.value = int(value.total_seconds() * 1000)
+ return bytes(data_object)
+
+ @classmethod
+ def to_python(cls, ctypes_object, *args, **kwargs):
+ if ctypes_object.type_code == int.from_bytes(
+ TC_NULL,
+ byteorder=PROTOCOL_BYTE_ORDER
+ ):
+ return None
+ return timedelta(milliseconds=ctypes_object.value)
+
+
+class EnumObject(StandardObject):
+ """
+ Two integers used as the ID of the enumeration type, and its value.
+
+ This type itself is useless in Python, but can be used for interoperability
+ (using language-specific type serialization is a good way to kill the
+ interoperability though), so it represented by tuple(int, int) in Python.
+ """
+ type_code = TC_ENUM
+ _object_c_type = None
+
+ @classmethod
+ def build_c_type(cls):
+ if cls._object_c_type is None:
+ cls._object_c_type = type(
+ cls.__name__,
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('type_id', ctypes.c_int),
+ ('ordinal', ctypes.c_int),
+ ],
+ }
+ )
+ return cls._object_c_type
+
+ @classmethod
+ def from_python(cls, value: tuple):
+ if value is None:
+ return Null.from_python()
+
+ data_type = cls.build_c_type()
+ data_object = data_type()
+ data_object.type_code = int.from_bytes(
+ cls.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+ if value is None:
+ return Null.from_python(value)
+ data_object.type_id, data_object.ordinal = value
+ return bytes(data_object)
+
+ @classmethod
+ def to_python(cls, ctypes_object, *args, **kwargs):
+ if ctypes_object.type_code == int.from_bytes(
+ TC_NULL,
+ byteorder=PROTOCOL_BYTE_ORDER
+ ):
+ return None
+ return ctypes_object.type_id, ctypes_object.ordinal
+
+
+class BinaryEnumObject(EnumObject):
+ """
+ Another way of representing the enum type. Same, but different.
+ """
+ type_code = TC_BINARY_ENUM
+
+
+class StandardArray:
+ """
+ Base class for array of primitives. Payload-only.
+ """
+ standard_type = None
+ type_code = None
+
+ @classmethod
+ def build_header_class(cls):
+ return type(
+ cls.__name__+'Header',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('length', ctypes.c_int),
+ ],
+ }
+ )
+
+ @classmethod
+ def parse(cls, client: 'Client'):
+ header_class = cls.build_header_class()
+ buffer = client.recv(ctypes.sizeof(header_class))
+ header = header_class.from_buffer_copy(buffer)
+ fields = []
+ for i in range(header.length):
+ c_type, buffer_fragment = cls.standard_type.parse(client)
+ buffer += buffer_fragment
+ fields.append(('element_{}'.format(i), c_type))
+
+ final_class = type(
+ cls.__name__,
+ (header_class,),
+ {
+ '_pack_': 1,
+ '_fields_': fields,
+ }
+ )
+ return final_class, buffer
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ result = []
+ for i in range(ctype_object.length):
+ result.append(
+ cls.standard_type.to_python(
+ getattr(ctype_object, 'element_{}'.format(i)),
+ *args, **kwargs
+ )
+ )
+ return result
+
+ @classmethod
+ def from_python(cls, value):
+ header_class = cls.build_header_class()
+ header = header_class()
+ if hasattr(header, 'type_code'):
+ header.type_code = int.from_bytes(
+ cls.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+ length = len(value)
+ header.length = length
+ buffer = bytes(header)
+
+ for x in value:
+ buffer += cls.standard_type.from_python(x)
+ return buffer
+
+
+class StringArray(StandardArray):
+ """
+ Array of Pascal-like strings. Payload-only, i.e. no `type_code` field
+ in binary representation.
+
+ List(str) in Python.
+ """
+ standard_type = String
+
+
+class DecimalArray(StandardArray):
+ standard_type = DecimalObject
+
+
+class UUIDArray(StandardArray):
+ standard_type = UUIDObject
+
+
+class TimestampArray(StandardArray):
+ standard_type = TimestampObject
+
+
+class DateArray(StandardArray):
+ standard_type = DateObject
+
+
+class TimeArray(StandardArray):
+ standard_type = TimeObject
+
+
+class EnumArray(StandardArray):
+ standard_type = EnumObject
+
+
+class StandardArrayObject(StandardArray):
+ pythonic = list
+ default = []
+
+ @classmethod
+ def build_header_class(cls):
+ return type(
+ cls.__name__+'Header',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('length', ctypes.c_int),
+ ],
+ }
+ )
+
+
+class StringArrayObject(StandardArrayObject):
+ """ List of strings. """
+ standard_type = String
+ type_code = TC_STRING_ARRAY
+
+
+class DecimalArrayObject(StandardArrayObject):
+ """ List of decimal.Decimal objects. """
+ standard_type = DecimalObject
+ type_code = TC_DECIMAL_ARRAY
+
+
+class UUIDArrayObject(StandardArrayObject):
+ """ Translated into Python as a list(uuid.UUID)"""
+ standard_type = UUIDObject
+ type_code = TC_UUID_ARRAY
+
+
+class TimestampArrayObject(StandardArrayObject):
+ """
+ Translated into Python as a list of (datetime.datetime, integer) tuples.
+ """
+ standard_type = TimestampObject
+ type_code = TC_TIMESTAMP_ARRAY
+
+
+class DateArrayObject(StandardArrayObject):
+ """ List of datetime.datetime type values. """
+ standard_type = DateObject
+ type_code = TC_DATE_ARRAY
+
+
+class TimeArrayObject(StandardArrayObject):
+ """ List of datetime.timedelta type values. """
+ standard_type = TimeObject
+ type_code = TC_TIME_ARRAY
+
+
+class EnumArrayObject(StandardArrayObject):
+ """
+ Array of (int, int) tuples, plus it holds a `type_id` in its header.
+ The only `type_id` value of -1 (user type) works from Python perspective.
+ """
+ standard_type = EnumObject
+ type_code = TC_ENUM_ARRAY
+
+ @classmethod
+ def build_header_class(cls):
+ return type(
+ cls.__name__+'Header',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('type_code', ctypes.c_byte),
+ ('type_id', ctypes.c_int),
+ ('length', ctypes.c_int),
+ ],
+ }
+ )
+
+ @classmethod
+ def from_python(cls, value):
+ type_id, value = value
+ header_class = cls.build_header_class()
+ header = header_class()
+ if hasattr(header, 'type_code'):
+ header.type_code = int.from_bytes(
+ cls.type_code,
+ byteorder=PROTOCOL_BYTE_ORDER
+ )
+ length = len(value)
+ header.length = length
+ header.type_id = type_id
+ buffer = bytes(header)
+
+ for x in value:
+ buffer += cls.standard_type.from_python(x)
+ return buffer
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ type_id = ctype_object.type_id
+ return type_id, super().to_python(ctype_object, *args, **kwargs)
+
+
+class BinaryEnumArrayObject(EnumArrayObject):
+ standard_type = BinaryEnumObject
+
+
+class ObjectArray(EnumArrayObject):
+ standard_type = BinaryEnumObject
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/type_codes.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/type_codes.py b/modules/platforms/python/pyignite/datatypes/type_codes.py
new file mode 100644
index 0000000..d5e8dd4
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/type_codes.py
@@ -0,0 +1,57 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+TC_BYTE = b'\x01'
+TC_SHORT = b'\x02'
+TC_INT = b'\x03'
+TC_LONG = b'\x04'
+TC_FLOAT = b'\x05'
+TC_DOUBLE = b'\x06'
+TC_CHAR = b'\x07'
+TC_BOOL = b'\x08'
+TC_STRING = b'\x09'
+TC_UUID = b'\x0a'
+TC_DATE = b'\x0b'
+TC_BYTE_ARRAY = b'\x0c'
+TC_SHORT_ARRAY = b'\x0d'
+TC_INT_ARRAY = b'\x0e'
+TC_LONG_ARRAY = b'\x0f'
+TC_FLOAT_ARRAY = b'\x10'
+TC_DOUBLE_ARRAY = b'\x11'
+TC_CHAR_ARRAY = b'\x12'
+TC_BOOL_ARRAY = b'\x13'
+TC_STRING_ARRAY = b'\x14'
+TC_UUID_ARRAY = b'\x15'
+TC_DATE_ARRAY = b'\x16'
+TC_OBJECT_ARRAY = b'\x17'
+TC_COLLECTION = b'\x18'
+TC_MAP = b'\x19'
+
+TC_ARRAY_WRAPPED_OBJECTS = b'\x1b'
+
+TC_ENUM = b'\x1c'
+TC_ENUM_ARRAY = b'\x1d'
+
+TC_DECIMAL = b'\x1e'
+TC_DECIMAL_ARRAY = b'\x1f'
+TC_TIMESTAMP = b'\x21'
+TC_TIMESTAMP_ARRAY = b'\x22'
+
+TC_TIME = b'\x24'
+TC_TIME_ARRAY = b'\x25'
+TC_BINARY_ENUM = b'\x26'
+
+TC_NULL = b'\x65'
+TC_COMPLEX_OBJECT = b'\x67'
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/exceptions.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/exceptions.py b/modules/platforms/python/pyignite/exceptions.py
new file mode 100644
index 0000000..2bc5996
--- /dev/null
+++ b/modules/platforms/python/pyignite/exceptions.py
@@ -0,0 +1,80 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from socket import error as SocketError
+
+
+class ParseError(Exception):
+ """
+ This exception is raised, when `pyignite` is unable to build a query to,
+ or parse a response from, Ignite node.
+ """
+ pass
+
+
+class HandshakeError(SocketError):
+ """
+ This exception is raised on Ignite binary protocol handshake failure,
+ as defined in
+ https://apacheignite.readme.io/docs/binary-client-protocol#section-handshake
+ """
+ pass
+
+
+class ReconnectError(Exception):
+ """
+ This exception is raised by `Client.reconnect` method, when no more
+ nodes are left to connect to. It is not meant to be an error, but rather
+ a flow control tool, similar to `StopIteration`.
+ """
+ pass
+
+
+class ParameterError(Exception):
+ """
+ This exception represents the parameter validation error in any `pyignite`
+ method.
+ """
+ pass
+
+
+class CacheError(Exception):
+ """
+ This exception is raised, whenever any remote Thin client operation
+ returns an error.
+ """
+ pass
+
+
+class BinaryTypeError(CacheError):
+ """
+ A remote error in operation with Complex Object registry.
+ """
+ pass
+
+
+class CacheCreationError(CacheError):
+ """
+ This exception is raised, when any complex operation failed
+ on cache creation phase.
+ """
+ pass
+
+
+class SQLError(CacheError):
+ """
+ An error in SQL query.
+ """
+ pass
[3/6] ignite git commit: IGNITE-7782 Python thin client
Posted by is...@apache.org.
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/cache.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/cache.py b/modules/platforms/python/pyignite/cache.py
new file mode 100644
index 0000000..6cd7377
--- /dev/null
+++ b/modules/platforms/python/pyignite/cache.py
@@ -0,0 +1,595 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Any, Iterable, Optional, Union
+
+from .datatypes import prop_codes
+from .exceptions import (
+ CacheCreationError, CacheError, ParameterError, SQLError,
+)
+from .utils import cache_id, is_wrapped, status_to_exception, unwrap_binary
+from .api.cache_config import (
+ cache_create, cache_create_with_config,
+ cache_get_or_create, cache_get_or_create_with_config,
+ cache_destroy, cache_get_configuration,
+)
+from .api.key_value import (
+ cache_get, cache_put, cache_get_all, cache_put_all, cache_replace,
+ cache_clear, cache_clear_key, cache_clear_keys,
+ cache_contains_key, cache_contains_keys,
+ cache_get_and_put, cache_get_and_put_if_absent, cache_put_if_absent,
+ cache_get_and_remove, cache_get_and_replace,
+ cache_remove_key, cache_remove_keys, cache_remove_all,
+ cache_remove_if_equals, cache_replace_if_equals, cache_get_size,
+)
+from .api.sql import scan, scan_cursor_get_page, sql, sql_cursor_get_page
+
+
+PROP_CODES = set([
+ getattr(prop_codes, x)
+ for x in dir(prop_codes)
+ if x.startswith('PROP_')
+])
+CACHE_CREATE_FUNCS = {
+ True: {
+ True: cache_get_or_create_with_config,
+ False: cache_create_with_config,
+ },
+ False: {
+ True: cache_get_or_create,
+ False: cache_create,
+ },
+}
+
+
+class Cache:
+ """
+ Ignite cache abstraction. Users should never use this class directly,
+ but construct its instances with
+ :py:meth:`~pyignite.client.Client.create_cache`,
+ :py:meth:`~pyignite.client.Client.get_or_create_cache` or
+ :py:meth:`~pyignite.client.Client.get_cache` methods instead. See
+ :ref:`this example <create_cache>` on how to do it.
+ """
+ _cache_id = None
+ _name = None
+ _client = None
+ _settings = None
+
+ @staticmethod
+ def _validate_settings(
+ settings: Union[str, dict]=None, get_only: bool=False,
+ ):
+ if any([
+ not settings,
+ type(settings) not in (str, dict),
+ type(settings) is dict and prop_codes.PROP_NAME not in settings,
+ ]):
+ raise ParameterError('You should supply at least cache name')
+
+ if all([
+ type(settings) is dict,
+ not set(settings).issubset(PROP_CODES),
+ ]):
+ raise ParameterError('One or more settings was not recognized')
+
+ if get_only and type(settings) is dict and len(settings) != 1:
+ raise ParameterError('Only cache name allowed as a parameter')
+
+ def __init__(
+ self, client: 'Client', settings: Union[str, dict]=None,
+ with_get: bool=False, get_only: bool=False,
+ ):
+ """
+ Initialize cache object.
+
+ :param client: Ignite client,
+ :param settings: cache settings. Can be a string (cache name) or a dict
+ of cache properties and their values. In this case PROP_NAME is
+ mandatory,
+ :param with_get: (optional) do not raise exception, if the cache
+ is already exists. Defaults to False,
+ :param get_only: (optional) do not communicate with Ignite server
+ at all, only create Cache instance. Defaults to False.
+ """
+ self._client = client
+ self._validate_settings(settings)
+ if type(settings) == str:
+ self._name = settings
+ else:
+ self._name = settings[prop_codes.PROP_NAME]
+
+ if not get_only:
+ func = CACHE_CREATE_FUNCS[type(settings) is dict][with_get]
+ result = func(client, settings)
+ if result.status != 0:
+ raise CacheCreationError(result.message)
+
+ self._cache_id = cache_id(self._name)
+
+ @property
+ def settings(self) -> Optional[dict]:
+ """
+ Lazy Cache settings. See the :ref:`example <sql_cache_read>`
+ of reading this property.
+
+ All cache properties are documented here: :ref:`cache_props`.
+
+ :return: dict of cache properties and their values.
+ """
+ if self._settings is None:
+ config_result = cache_get_configuration(self._client, self._cache_id)
+ if config_result.status == 0:
+ self._settings = config_result.value
+ else:
+ raise CacheError(config_result.message)
+
+ return self._settings
+
+ @property
+ def name(self) -> str:
+ """
+ Lazy cache name.
+
+ :return: cache name string.
+ """
+ if self._name is None:
+ self._name = self.settings[prop_codes.PROP_NAME]
+
+ return self._name
+
+ @property
+ def client(self) -> 'Client':
+ """
+ Ignite :class:`~pyignite.client.Client` object.
+
+ :return: Client object, through which the cache is accessed.
+ """
+ return self._client
+
+ @property
+ def cache_id(self) -> int:
+ """
+ Cache ID.
+
+ :return: integer value of the cache ID.
+ """
+ return self._cache_id
+
+ def _process_binary(self, value: Any) -> Any:
+ """
+ Detects and recursively unwraps Binary Object.
+
+ :param value: anything that could be a Binary Object,
+ :return: the result of the Binary Object unwrapping with all other data
+ left intact.
+ """
+ if is_wrapped(value):
+ return unwrap_binary(self._client, value)
+ return value
+
+ @status_to_exception(CacheError)
+ def destroy(self):
+ """
+ Destroys cache with a given name.
+ """
+ return cache_destroy(self._client, self._cache_id)
+
+ @status_to_exception(CacheError)
+ def get(self, key, key_hint: object=None) -> Any:
+ """
+ Retrieves a value from cache by key.
+
+ :param key: key for the cache entry. Can be of any supported type,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :return: value retrieved.
+ """
+ result = cache_get(self._client, self._cache_id, key, key_hint=key_hint)
+ result.value = self._process_binary(result.value)
+ return result
+
+ @status_to_exception(CacheError)
+ def put(self, key, value, key_hint: object=None, value_hint: object=None):
+ """
+ Puts a value with a given key to cache (overwriting existing value
+ if any).
+
+ :param key: key for the cache entry. Can be of any supported type,
+ :param value: value for the key,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param value_hint: (optional) Ignite data type, for which the given
+ value should be converted.
+ """
+ return cache_put(
+ self._client, self._cache_id, key, value,
+ key_hint=key_hint, value_hint=value_hint
+ )
+
+ @status_to_exception(CacheError)
+ def get_all(self, keys: list) -> list:
+ """
+ Retrieves multiple key-value pairs from cache.
+
+ :param keys: list of keys or tuples of (key, key_hint),
+ :return: a dict of key-value pairs.
+ """
+ result = cache_get_all(self._client, self._cache_id, keys)
+ if result.value:
+ for key, value in result.value.items():
+ result.value[key] = self._process_binary(value)
+ return result
+
+ @status_to_exception(CacheError)
+ def put_all(self, pairs: dict):
+ """
+ Puts multiple key-value pairs to cache (overwriting existing
+ associations if any).
+
+ :param pairs: dictionary type parameters, contains key-value pairs
+ to save. Each key or value can be an item of representable
+ Python type or a tuple of (item, hint),
+ """
+ return cache_put_all(self._client, self._cache_id, pairs)
+
+ @status_to_exception(CacheError)
+ def replace(
+ self, key, value, key_hint: object=None, value_hint: object=None
+ ):
+ """
+ Puts a value with a given key to cache only if the key already exist.
+
+ :param key: key for the cache entry. Can be of any supported type,
+ :param value: value for the key,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param value_hint: (optional) Ignite data type, for which the given
+ value should be converted.
+ """
+ result = cache_replace(
+ self._client, self._cache_id, key, value,
+ key_hint=key_hint, value_hint=value_hint
+ )
+ result.value = self._process_binary(result.value)
+ return result
+
+ @status_to_exception(CacheError)
+ def clear(self, keys: Optional[list]=None):
+ """
+ Clears the cache without notifying listeners or cache writers.
+
+ :param keys: (optional) list of cache keys or (key, key type
+ hint) tuples to clear (default: clear all).
+ """
+ if keys:
+ return cache_clear_keys(self._client, self._cache_id, keys)
+ else:
+ return cache_clear(self._client, self._cache_id)
+
+ @status_to_exception(CacheError)
+ def clear_key(self, key, key_hint: object=None):
+ """
+ Clears the cache key without notifying listeners or cache writers.
+
+ :param key: key for the cache entry,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ """
+ return cache_clear_key(
+ self._client, self._cache_id, key, key_hint=key_hint
+ )
+
+ @status_to_exception(CacheError)
+ def contains_key(self, key, key_hint=None) -> bool:
+ """
+ Returns a value indicating whether given key is present in cache.
+
+ :param key: key for the cache entry. Can be of any supported type,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :return: boolean `True` when key is present, `False` otherwise.
+ """
+ return cache_contains_key(
+ self._client, self._cache_id, key, key_hint=key_hint
+ )
+
+ @status_to_exception(CacheError)
+ def contains_keys(self, keys: Iterable) -> bool:
+ """
+ Returns a value indicating whether all given keys are present in cache.
+
+ :param keys: a list of keys or (key, type hint) tuples,
+ :return: boolean `True` when all keys are present, `False` otherwise.
+ """
+ return cache_contains_keys(self._client, self._cache_id, keys)
+
+ @status_to_exception(CacheError)
+ def get_and_put(self, key, value, key_hint=None, value_hint=None) -> Any:
+ """
+ Puts a value with a given key to cache, and returns the previous value
+ for that key, or null value if there was not such key.
+
+ :param key: key for the cache entry. Can be of any supported type,
+ :param value: value for the key,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param value_hint: (optional) Ignite data type, for which the given
+ value should be converted.
+ :return: old value or None.
+ """
+ result = cache_get_and_put(
+ self._client, self._cache_id, key, value, key_hint, value_hint
+ )
+ result.value = self._process_binary(result.value)
+ return result
+
+ @status_to_exception(CacheError)
+ def get_and_put_if_absent(
+ self, key, value, key_hint=None, value_hint=None
+ ):
+ """
+ Puts a value with a given key to cache only if the key does not
+ already exist.
+
+ :param key: key for the cache entry. Can be of any supported type,
+ :param value: value for the key,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param value_hint: (optional) Ignite data type, for which the given
+ value should be converted,
+ :return: old value or None.
+ """
+ result = cache_get_and_put_if_absent(
+ self._client, self._cache_id, key, value, key_hint, value_hint
+ )
+ result.value = self._process_binary(result.value)
+ return result
+
+ @status_to_exception(CacheError)
+ def put_if_absent(self, key, value, key_hint=None, value_hint=None):
+ """
+ Puts a value with a given key to cache only if the key does not
+ already exist.
+
+ :param key: key for the cache entry. Can be of any supported type,
+ :param value: value for the key,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param value_hint: (optional) Ignite data type, for which the given
+ value should be converted.
+ """
+ return cache_put_if_absent(
+ self._client, self._cache_id, key, value, key_hint, value_hint
+ )
+
+ @status_to_exception(CacheError)
+ def get_and_remove(self, key, key_hint=None) -> Any:
+ """
+ Removes the cache entry with specified key, returning the value.
+
+ :param key: key for the cache entry. Can be of any supported type,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :return: old value or None.
+ """
+ result = cache_get_and_remove(
+ self._client, self._cache_id, key, key_hint
+ )
+ result.value = self._process_binary(result.value)
+ return result
+
+ @status_to_exception(CacheError)
+ def get_and_replace(
+ self, key, value, key_hint=None, value_hint=None
+ ) -> Any:
+ """
+ Puts a value with a given key to cache, returning previous value
+ for that key, if and only if there is a value currently mapped
+ for that key.
+
+ :param key: key for the cache entry. Can be of any supported type,
+ :param value: value for the key,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param value_hint: (optional) Ignite data type, for which the given
+ value should be converted.
+ :return: old value or None.
+ """
+ result = cache_get_and_replace(
+ self._client, self._cache_id, key, value, key_hint, value_hint
+ )
+ result.value = self._process_binary(result.value)
+ return result
+
+ @status_to_exception(CacheError)
+ def remove_key(self, key, key_hint=None):
+ """
+ Clears the cache key without notifying listeners or cache writers.
+
+ :param key: key for the cache entry,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ """
+ return cache_remove_key(self._client, self._cache_id, key, key_hint)
+
+ @status_to_exception(CacheError)
+ def remove_keys(self, keys: list):
+ """
+ Removes cache entries by given list of keys, notifying listeners
+ and cache writers.
+
+ :param keys: list of keys or tuples of (key, key_hint) to remove.
+ """
+ return cache_remove_keys(self._client, self._cache_id, keys)
+
+ @status_to_exception(CacheError)
+ def remove_all(self):
+ """
+ Removes all cache entries, notifying listeners and cache writers.
+ """
+ return cache_remove_all(self._client, self._cache_id)
+
+ @status_to_exception(CacheError)
+ def remove_if_equals(self, key, sample, key_hint=None, sample_hint=None):
+ """
+ Removes an entry with a given key if provided value is equal to
+ actual value, notifying listeners and cache writers.
+
+ :param key: key for the cache entry,
+ :param sample: a sample to compare the stored value with,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param sample_hint: (optional) Ignite data type, for whic
+ the given sample should be converted.
+ """
+ return cache_remove_if_equals(
+ self._client, self._cache_id, key, sample, key_hint, sample_hint
+ )
+
+ @status_to_exception(CacheError)
+ def replace_if_equals(
+ self, key, sample, value,
+ key_hint=None, sample_hint=None, value_hint=None
+ ) -> Any:
+ """
+ Puts a value with a given key to cache only if the key already exists
+ and value equals provided sample.
+
+ :param key: key for the cache entry,
+ :param sample: a sample to compare the stored value with,
+ :param value: new value for the given key,
+ :param key_hint: (optional) Ignite data type, for which the given key
+ should be converted,
+ :param sample_hint: (optional) Ignite data type, for whic
+ the given sample should be converted
+ :param value_hint: (optional) Ignite data type, for which the given
+ value should be converted,
+ :return: boolean `True` when key is present, `False` otherwise.
+ """
+ result = cache_replace_if_equals(
+ self._client, self._cache_id, key, sample, value,
+ key_hint, sample_hint, value_hint
+ )
+ result.value = self._process_binary(result.value)
+ return result
+
+ @status_to_exception(CacheError)
+ def get_size(self, peek_modes=0):
+ """
+ Gets the number of entries in cache.
+
+ :param peek_modes: (optional) limit count to near cache partition
+ (PeekModes.NEAR), primary cache (PeekModes.PRIMARY), or backup cache
+ (PeekModes.BACKUP). Defaults to all cache partitions (PeekModes.ALL),
+ :return: integer number of cache entries.
+ """
+ return cache_get_size(self._client, self._cache_id, peek_modes)
+
+ def scan(self, page_size: int=1, partitions: int=-1, local: bool=False):
+ """
+ Returns all key-value pairs from the cache, similar to `get_all`, but
+ with internal pagination, which is slower, but safer.
+
+ :param page_size: (optional) page size. Default size is 1 (slowest
+ and safest),
+ :param partitions: (optional) number of partitions to query
+ (negative to query entire cache),
+ :param local: (optional) pass True if this query should be executed
+ on local node only. Defaults to False,
+ :return: generator with key-value pairs.
+ """
+ result = scan(self._client, self._cache_id, page_size, partitions, local)
+ if result.status != 0:
+ raise CacheError(result.message)
+
+ cursor = result.value['cursor']
+ for k, v in result.value['data'].items():
+ k = self._process_binary(k)
+ v = self._process_binary(v)
+ yield k, v
+
+ while result.value['more']:
+ result = scan_cursor_get_page(self._client, cursor)
+ if result.status != 0:
+ raise CacheError(result.message)
+
+ for k, v in result.value['data'].items():
+ k = self._process_binary(k)
+ v = self._process_binary(v)
+ yield k, v
+
+ def select_row(
+ self, query_str: str, page_size: int=1,
+ query_args: Optional[list]=None, distributed_joins: bool=False,
+ replicated_only: bool=False, local: bool=False, timeout: int=0
+ ):
+ """
+ Executes a simplified SQL SELECT query over data stored in the cache.
+ The query returns the whole record (key and value).
+
+ :param query_str: SQL query string,
+ :param page_size: (optional) cursor page size. Default is 1, which
+ means that client makes one server call per row,
+ :param query_args: (optional) query arguments,
+ :param distributed_joins: (optional) distributed joins. Defaults
+ to False,
+ :param replicated_only: (optional) whether query contains only
+ replicated tables or not. Defaults to False,
+ :param local: (optional) pass True if this query should be executed
+ on local node only. Defaults to False,
+ :param timeout: (optional) non-negative timeout value in ms. Zero
+ disables timeout (default),
+ :return: generator with key-value pairs.
+ """
+ def generate_result(value):
+ cursor = value['cursor']
+ more = value['more']
+ for k, v in value['data'].items():
+ k = self._process_binary(k)
+ v = self._process_binary(v)
+ yield k, v
+
+ while more:
+ inner_result = sql_cursor_get_page(self._client, cursor)
+ if result.status != 0:
+ raise SQLError(result.message)
+ more = inner_result.value['more']
+ for k, v in inner_result.value['data'].items():
+ k = self._process_binary(k)
+ v = self._process_binary(v)
+ yield k, v
+
+ type_name = self.settings[
+ prop_codes.PROP_QUERY_ENTITIES
+ ][0]['value_type_name']
+ if not type_name:
+ raise SQLError('Value type is unknown')
+ result = sql(
+ self._client,
+ self._cache_id,
+ type_name,
+ query_str,
+ page_size,
+ query_args,
+ distributed_joins,
+ replicated_only,
+ local,
+ timeout
+ )
+ if result.status != 0:
+ raise SQLError(result.message)
+
+ return generate_result(result.value)
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/client.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/client.py b/modules/platforms/python/pyignite/client.py
new file mode 100644
index 0000000..d5a9464
--- /dev/null
+++ b/modules/platforms/python/pyignite/client.py
@@ -0,0 +1,406 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This module contains `Client` class, that lets you communicate with Apache
+Ignite cluster node by the means of Ignite binary client protocol.
+
+To start the communication, you may connect to the node of their choice
+by instantiating the `Client` object and calling
+:py:meth:`~pyignite.connection.Connection.connect` method with proper
+parameters.
+
+The whole storage room of Ignite cluster is split up into named structures,
+called caches. For accessing the particular cache in key-value style
+(a-la Redis or memcached) you should first create
+the :class:`~pyignite.cache.Cache` object by calling
+:py:meth:`~pyignite.client.Client.create_cache` or
+:py:meth:`~pyignite.client.Client.get_or_create_cache()` methods, than call
+:class:`~pyignite.cache.Cache` methods. If you wish to create a cache object
+without communicating with server, there is also a
+:py:meth:`~pyignite.client.Client.get_cache()` method that does just that.
+
+For using Ignite SQL, call :py:meth:`~pyignite.client.Client.sql` method.
+It returns a generator with result rows.
+
+:py:meth:`~pyignite.client.Client.register_binary_type` and
+:py:meth:`~pyignite.client.Client.query_binary_type` methods operates
+the local (class-wise) registry for Ignite Complex objects.
+"""
+
+from collections import defaultdict, OrderedDict
+from typing import Iterable, Type, Union
+
+from .api.binary import get_binary_type, put_binary_type
+from .api.cache_config import cache_get_names
+from .api.sql import sql_fields, sql_fields_cursor_get_page
+from .cache import Cache
+from .connection import Connection
+from .constants import *
+from .datatypes import BinaryObject
+from .datatypes.internal import tc_map
+from .exceptions import BinaryTypeError, CacheError, SQLError
+from .utils import entity_id, schema_id, status_to_exception
+from .binary import GenericObjectMeta
+
+
+__all__ = ['Client']
+
+
+class Client(Connection):
+ """
+ This is a main `pyignite` class, that is build upon the
+ :class:`~pyignite.connection.Connection`. In addition to the attributes,
+ properties and methods of its parent class, `Client` implements
+ the following features:
+
+ * cache factory. Cache objects are used for key-value operations,
+ * Ignite SQL endpoint,
+ * binary types registration endpoint.
+ """
+
+ _registry = defaultdict(dict)
+ _compact_footer = None
+
+ def _transfer_params(self, to: 'Client'):
+ super()._transfer_params(to)
+ to._registry = self._registry
+ to._compact_footer = self._compact_footer
+
+ def __init__(self, compact_footer: bool=None, *args, **kwargs):
+ """
+ Initialize client.
+
+ :param compact_footer: (optional) use compact (True, recommended) or
+ full (False) schema approach when serializing Complex objects.
+ Default is to use the same approach the server is using (None).
+ Apache Ignite binary protocol documentation on this topic:
+ https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-schema
+ """
+ self._compact_footer = compact_footer
+ super().__init__(*args, **kwargs)
+
+ @status_to_exception(BinaryTypeError)
+ def get_binary_type(self, binary_type: Union[str, int]) -> dict:
+ """
+ Gets the binary type information from the Ignite server. This is quite
+ a low-level implementation of Ignite thin client protocol's
+ `OP_GET_BINARY_TYPE` operation. You would probably want to use
+ :py:meth:`~pyignite.client.Client.query_binary_type` instead.
+
+ :param binary_type: binary type name or ID,
+ :return: binary type description − a dict with the following fields:
+
+ - `type_exists`: True if the type is registered, False otherwise. In
+ the latter case all the following fields are omitted,
+ - `type_id`: Complex object type ID,
+ - `type_name`: Complex object type name,
+ - `affinity_key_field`: string value or None,
+ - `is_enum`: False in case of Complex object registration,
+ - `schemas`: a list, containing the Complex object schemas in format:
+ OrderedDict[field name: field type hint]. A schema can be empty.
+ """
+ def convert_type(tc_type: int):
+ try:
+ return tc_map(tc_type.to_bytes(1, PROTOCOL_BYTE_ORDER))
+ except (KeyError, OverflowError):
+ # if conversion to char or type lookup failed,
+ # we probably have a binary object type ID
+ return BinaryObject
+
+ def convert_schema(
+ field_ids: list, binary_fields: list
+ ) -> OrderedDict:
+ converted_schema = OrderedDict()
+ for field_id in field_ids:
+ binary_field = [
+ x
+ for x in binary_fields
+ if x['field_id'] == field_id
+ ][0]
+ converted_schema[binary_field['field_name']] = convert_type(
+ binary_field['type_id']
+ )
+ return converted_schema
+
+ result = get_binary_type(self, binary_type)
+ if result.status != 0 or not result.value['type_exists']:
+ return result
+
+ binary_fields = result.value.pop('binary_fields')
+ old_format_schemas = result.value.pop('schema')
+ result.value['schemas'] = []
+ for s_id, field_ids in old_format_schemas.items():
+ result.value['schemas'].append(
+ convert_schema(field_ids, binary_fields)
+ )
+ return result
+
+ @property
+ def compact_footer(self) -> bool:
+ """
+ This property remembers Complex object schema encoding approach when
+ decoding any Complex object, to use the same approach on Complex
+ object encoding.
+
+ :return: True if compact schema was used by server or no Complex
+ object decoding has yet taken place, False if full schema was used.
+ """
+ # this is an ordinary object property, but its backing storage
+ # is a class attribute
+
+ # use compact schema by default, but leave initial (falsy) backing
+ # value unchanged
+ return (
+ self.__class__._compact_footer
+ or self.__class__._compact_footer is None
+ )
+
+ @compact_footer.setter
+ def compact_footer(self, value: bool):
+ # normally schema approach should not change
+ if self.__class__._compact_footer not in (value, None):
+ raise Warning('Can not change client schema approach.')
+ else:
+ self.__class__._compact_footer = value
+
+ @status_to_exception(BinaryTypeError)
+ def put_binary_type(
+ self, type_name: str, affinity_key_field: str=None,
+ is_enum=False, schema: dict=None
+ ):
+ """
+ Registers binary type information in cluster. Do not update binary
+ registry. This is a literal implementation of Ignite thin client
+ protocol's `OP_PUT_BINARY_TYPE` operation. You would probably want
+ to use :py:meth:`~pyignite.client.Client.register_binary_type` instead.
+
+ :param type_name: name of the data type being registered,
+ :param affinity_key_field: (optional) name of the affinity key field,
+ :param is_enum: (optional) register enum if True, binary object
+ otherwise. Defaults to False,
+ :param schema: (optional) when register enum, pass a dict
+ of enumerated parameter names as keys and an integers as values.
+ When register binary type, pass a dict of field names: field types.
+ Binary type with no fields is OK.
+ """
+ return put_binary_type(
+ self, type_name, affinity_key_field, is_enum, schema
+ )
+
+ @staticmethod
+ def _create_dataclass(type_name: str, schema: OrderedDict=None) -> Type:
+ """
+ Creates default (generic) class for Ignite Complex object.
+
+ :param type_name: Complex object type name,
+ :param schema: Complex object schema,
+ :return: the resulting class.
+ """
+ schema = schema or {}
+ return GenericObjectMeta(type_name, (), {}, schema=schema)
+
+ def _sync_binary_registry(self, type_id: int):
+ """
+ Reads Complex object description from Ignite server. Creates default
+ Complex object classes and puts in registry, if not already there.
+
+ :param type_id: Complex object type ID.
+ """
+ type_info = self.get_binary_type(type_id)
+ if type_info['type_exists']:
+ for schema in type_info['schemas']:
+ if not self._registry[type_id].get(schema_id(schema), None):
+ data_class = self._create_dataclass(
+ type_info['type_name'],
+ schema,
+ )
+ self._registry[type_id][schema_id(schema)] = data_class
+
+ def register_binary_type(
+ self, data_class: Type, affinity_key_field: str=None,
+ ):
+ """
+ Register the given class as a representation of a certain Complex
+ object type. Discards autogenerated or previously registered class.
+
+ :param data_class: Complex object class,
+ :param affinity_key_field: (optional) affinity parameter.
+ """
+ if not self.query_binary_type(
+ data_class.type_id, data_class.schema_id
+ ):
+ self.put_binary_type(
+ data_class.type_name,
+ affinity_key_field,
+ schema=data_class.schema,
+ )
+ self._registry[data_class.type_id][data_class.schema_id] = data_class
+
+ def query_binary_type(
+ self, binary_type: Union[int, str], schema: Union[int, dict]=None,
+ sync: bool=True
+ ):
+ """
+ Queries the registry of Complex object classes.
+
+ :param binary_type: Complex object type name or ID,
+ :param schema: (optional) Complex object schema or schema ID,
+ :param sync: (optional) look up the Ignite server for registered
+ Complex objects and create data classes for them if needed,
+ :return: found dataclass or None, if `schema` parameter is provided,
+ a dict of {schema ID: dataclass} format otherwise.
+ """
+ type_id = entity_id(binary_type)
+ s_id = schema_id(schema)
+
+ if schema:
+ try:
+ result = self._registry[type_id][s_id]
+ except KeyError:
+ result = None
+ else:
+ result = self._registry[type_id]
+
+ if sync and not result:
+ self._sync_binary_registry(type_id)
+ return self.query_binary_type(type_id, s_id, sync=False)
+
+ return result
+
+ def create_cache(self, settings: Union[str, dict]) -> 'Cache':
+ """
+ Creates Ignite cache by name. Raises `CacheError` if such a cache is
+ already exists.
+
+ :param settings: cache name or dict of cache properties' codes
+ and values. All cache properties are documented here:
+ :ref:`cache_props`. See also the
+ :ref:`cache creation example <sql_cache_create>`,
+ :return: :class:`~pyignite.cache.Cache` object.
+ """
+ return Cache(self, settings)
+
+ def get_or_create_cache(self, settings: Union[str, dict]) -> 'Cache':
+ """
+ Creates Ignite cache, if not exist.
+
+ :param settings: cache name or dict of cache properties' codes
+ and values. All cache properties are documented here:
+ :ref:`cache_props`. See also the
+ :ref:`cache creation example <sql_cache_create>`,
+ :return: :class:`~pyignite.cache.Cache` object.
+ """
+ return Cache(self, settings, with_get=True)
+
+ def get_cache(self, settings: Union[str, dict]) -> 'Cache':
+ """
+ Creates Cache object with a given cache name without checking it up
+ on server. If such a cache does not exist, some kind of exception
+ (most probably `CacheError`) may be raised later.
+
+ :param settings: cache name or cache properties (but only `PROP_NAME`
+ property is allowed),
+ :return: :class:`~pyignite.cache.Cache` object.
+ """
+ return Cache(self, settings, get_only=True)
+
+ @status_to_exception(CacheError)
+ def get_cache_names(self) -> list:
+ """
+ Gets existing cache names.
+
+ :return: list of cache names.
+ """
+ return cache_get_names(self)
+
+ def sql(
+ self, query_str: str, page_size: int=1, query_args: Iterable=None,
+ schema: Union[int, str]='PUBLIC',
+ statement_type: int=0, distributed_joins: bool=False,
+ local: bool=False, replicated_only: bool=False,
+ enforce_join_order: bool=False, collocated: bool=False,
+ lazy: bool=False, include_field_names: bool=False,
+ max_rows: int=-1, timeout: int=0,
+ ):
+ """
+ Runs an SQL query and returns its result.
+
+ :param query_str: SQL query string,
+ :param page_size: (optional) cursor page size. Default is 1, which
+ means that client makes one server call per row,
+ :param query_args: (optional) query arguments. List of values or
+ (value, type hint) tuples,
+ :param schema: (optional) schema for the query. Defaults to `PUBLIC`,
+ :param statement_type: (optional) statement type. Can be:
+
+ * StatementType.ALL − any type (default),
+ * StatementType.SELECT − select,
+ * StatementType.UPDATE − update.
+
+ :param distributed_joins: (optional) distributed joins. Defaults
+ to False,
+ :param local: (optional) pass True if this query should be executed
+ on local node only. Defaults to False,
+ :param replicated_only: (optional) whether query contains only
+ replicated tables or not. Defaults to False,
+ :param enforce_join_order: (optional) enforce join order. Defaults
+ to False,
+ :param collocated: (optional) whether your data is co-located or not.
+ Defaults to False,
+ :param lazy: (optional) lazy query execution. Defaults to False,
+ :param include_field_names: (optional) include field names in result.
+ Defaults to False,
+ :param max_rows: (optional) query-wide maximum of rows. Defaults to -1
+ (all rows),
+ :param timeout: (optional) non-negative timeout value in ms.
+ Zero disables timeout (default),
+ :return: generator with result rows as a lists. If
+ `include_field_names` was set, the first row will hold field names.
+ """
+ def generate_result(value):
+ cursor = value['cursor']
+ more = value['more']
+
+ if include_field_names:
+ yield value['fields']
+ field_count = len(value['fields'])
+ else:
+ field_count = value['field_count']
+ for line in value['data']:
+ yield line
+
+ while more:
+ inner_result = sql_fields_cursor_get_page(
+ self, cursor, field_count
+ )
+ if inner_result.status != 0:
+ raise SQLError(result.message)
+ more = inner_result.value['more']
+ for line in inner_result.value['data']:
+ yield line
+
+ schema = self.get_or_create_cache(schema)
+ result = sql_fields(
+ self, schema.cache_id, query_str,
+ page_size, query_args, schema.name,
+ statement_type, distributed_joins, local, replicated_only,
+ enforce_join_order, collocated, lazy, include_field_names,
+ max_rows, timeout,
+ )
+ if result.status != 0:
+ raise SQLError(result.message)
+
+ return generate_result(result.value)
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/connection/__init__.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/connection/__init__.py b/modules/platforms/python/pyignite/connection/__init__.py
new file mode 100644
index 0000000..32decdf
--- /dev/null
+++ b/modules/platforms/python/pyignite/connection/__init__.py
@@ -0,0 +1,329 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This module contains `Connection` class, that wraps TCP socket handling,
+as well as Ignite protocol handshaking.
+"""
+
+import socket
+
+from pyignite.constants import *
+from pyignite.exceptions import (
+ HandshakeError, ParameterError, ReconnectError, SocketError,
+)
+
+from pyignite.utils import is_iterable
+from .handshake import HandshakeRequest, read_response
+from .ssl import wrap
+
+
+__all__ = ['Connection']
+
+
+class Connection:
+ """
+ This is a `pyignite` class, that represents a connection to Ignite
+ node. It serves multiple purposes:
+
+ * socket wrapper. Detects fragmentation and network errors. See also
+ https://docs.python.org/3/howto/sockets.html,
+ * binary protocol connector. Incapsulates handshake, data read-ahead and
+ failover reconnection.
+ """
+
+ _socket = None
+ nodes = None
+ host = None
+ port = None
+ timeout = None
+ prefetch = None
+ username = None
+ password = None
+
+ @staticmethod
+ def _check_kwargs(kwargs):
+ expected_args = [
+ 'timeout',
+ 'use_ssl',
+ 'ssl_version',
+ 'ssl_ciphers',
+ 'ssl_cert_reqs',
+ 'ssl_keyfile',
+ 'ssl_certfile',
+ 'ssl_ca_certfile',
+ 'username',
+ 'password',
+ ]
+ for kw in kwargs:
+ if kw not in expected_args:
+ raise ParameterError((
+ 'Unexpected parameter for connection initialization: `{}`'
+ ).format(kw))
+
+ def __init__(self, prefetch: bytes=b'', **kwargs):
+ """
+ Initialize connection.
+
+ For the use of the SSL-related parameters see
+ https://docs.python.org/3/library/ssl.html#ssl-certificates.
+
+ :param prefetch: (optional) initialize the read-ahead data buffer.
+ Empty by default,
+ :param timeout: (optional) sets timeout (in seconds) for each socket
+ operation including `connect`. 0 means non-blocking mode, which is
+ virtually guaranteed to fail. Can accept integer or float value.
+ Default is None (blocking mode),
+ :param use_ssl: (optional) set to True if Ignite server uses SSL
+ on its binary connector. Defaults to use SSL when username
+ and password has been supplied, not to use SSL otherwise,
+ :param ssl_version: (optional) SSL version constant from standard
+ `ssl` module. Defaults to TLS v1.1, as in Ignite 2.5,
+ :param ssl_ciphers: (optional) ciphers to use. If not provided,
+ `ssl` default ciphers are used,
+ :param ssl_cert_reqs: (optional) determines how the remote side
+ certificate is treated:
+
+ * `ssl.CERT_NONE` − remote certificate is ignored (default),
+ * `ssl.CERT_OPTIONAL` − remote certificate will be validated,
+ if provided,
+ * `ssl.CERT_REQUIRED` − valid remote certificate is required,
+
+ :param ssl_keyfile: (optional) a path to SSL key file to identify
+ local (client) party,
+ :param ssl_certfile: (optional) a path to ssl certificate file
+ to identify local (client) party,
+ :param ssl_ca_certfile: (optional) a path to a trusted certificate
+ or a certificate chain. Required to check the validity of the remote
+ (server-side) certificate,
+ :param username: (optional) user name to authenticate to Ignite
+ cluster,
+ :param password: (optional) password to authenticate to Ignite cluster.
+ """
+ self.prefetch = prefetch
+ self._check_kwargs(kwargs)
+ self.timeout = kwargs.pop('timeout', None)
+ self.username = kwargs.pop('username', None)
+ self.password = kwargs.pop('password', None)
+ if all([self.username, self.password, 'use_ssl' not in kwargs]):
+ kwargs['use_ssl'] = True
+ self.init_kwargs = kwargs
+
+ read_response = read_response
+ _wrap = wrap
+
+ @property
+ def socket(self) -> socket.socket:
+ """
+ Network socket.
+ """
+ if self._socket is None:
+ self._reconnect()
+ return self._socket
+
+ def __repr__(self) -> str:
+ if self.host and self.port:
+ return '{}:{}'.format(self.host, self.port)
+ else:
+ return '<not connected>'
+
+ def _connect(self, host: str, port: int):
+ """
+ Actually connect socket.
+ """
+ self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self._socket.settimeout(self.timeout)
+ self._socket = self._wrap(self.socket)
+ self._socket.connect((host, port))
+
+ hs_request = HandshakeRequest(self.username, self.password)
+ self.send(hs_request)
+ hs_response = self.read_response()
+ if hs_response['op_code'] == 0:
+ self.close()
+ error_text = 'Handshake error: {}'.format(hs_response['message'])
+ # if handshake fails for any reason other than protocol mismatch
+ # (i.e. authentication error), server version is 0.0.0
+ if any([
+ hs_response['version_major'],
+ hs_response['version_minor'],
+ hs_response['version_patch'],
+ ]):
+ error_text += (
+ ' Server expects binary protocol version '
+ '{version_major}.{version_minor}.{version_patch}. Client '
+ 'provides {client_major}.{client_minor}.{client_patch}.'
+ ).format(
+ client_major=PROTOCOL_VERSION_MAJOR,
+ client_minor=PROTOCOL_VERSION_MINOR,
+ client_patch=PROTOCOL_VERSION_PATCH,
+ **hs_response
+ )
+ raise HandshakeError(error_text)
+ self.host, self.port = host, port
+
+ def connect(self, *args):
+ """
+ Connect to the server. Connection parameters may be either one node
+ (host and port), or list (or other iterable) of nodes.
+
+ :param host: Ignite server host,
+ :param port: Ignite server port,
+ :param nodes: iterable of (host, port) tuples.
+ """
+ self.nodes = iter([])
+ if len(args) == 0:
+ host, port = IGNITE_DEFAULT_HOST, IGNITE_DEFAULT_PORT
+ elif len(args) == 1 and is_iterable(args[0]):
+ self.nodes = iter(args[0])
+ host, port = next(self.nodes)
+ elif (
+ len(args) == 2
+ and isinstance(args[0], str)
+ and isinstance(args[1], int)
+ ):
+ host, port = args
+ else:
+ raise ConnectionError('Connection parameters are not valid.')
+
+ self._connect(host, port)
+
+ def _reconnect(self):
+ """
+ Restore the connection using the next node in `nodes` iterable.
+ """
+ for host, port in self.nodes:
+ try:
+ self._connect(host, port)
+ return
+ except OSError:
+ pass
+ self.host = self.port = self.nodes = None
+ # exception chaining gives a misleading traceback here
+ raise ReconnectError('Can not reconnect: out of nodes') from None
+
+ def _transfer_params(self, to: 'Connection'):
+ """
+ Transfer non-SSL parameters to target connection object.
+
+ :param target: connection object to transfer parameters to.
+ """
+ to.username = self.username
+ to.password = self.password
+ to.nodes = self.nodes
+
+ def clone(self, prefetch: bytes=b'') -> 'Connection':
+ """
+ Clones this connection in its current state.
+
+ :return: `Connection` object.
+ """
+ clone = self.__class__(**self.init_kwargs)
+ self._transfer_params(to=clone)
+ if self.port and self.host:
+ clone._connect(self.host, self.port)
+ clone.prefetch = prefetch
+ return clone
+
+ def send(self, data: bytes, flags=None):
+ """
+ Send data down the socket.
+
+ :param data: bytes to send,
+ :param flags: (optional) OS-specific flags.
+ """
+ kwargs = {}
+ if flags is not None:
+ kwargs['flags'] = flags
+ data = bytes(data)
+ total_bytes_sent = 0
+
+ while total_bytes_sent < len(data):
+ try:
+ bytes_sent = self.socket.send(data[total_bytes_sent:], **kwargs)
+ except OSError:
+ self._socket = self.host = self.port = None
+ raise
+ if bytes_sent == 0:
+ self.socket.close()
+ raise SocketError('Socket connection broken.')
+ total_bytes_sent += bytes_sent
+
+ def recv(self, buffersize, flags=None) -> bytes:
+ """
+ Receive data from socket or read-ahead buffer.
+
+ :param buffersize: bytes to receive,
+ :param flags: (optional) OS-specific flags,
+ :return: data received.
+ """
+ pref_size = len(self.prefetch)
+ if buffersize > pref_size:
+ result = self.prefetch
+ self.prefetch = b''
+ try:
+ result += self._recv(buffersize-pref_size, flags)
+ except (SocketError, OSError):
+ self._socket = self.host = self.port = None
+ raise
+ return result
+ else:
+ result = self.prefetch[:buffersize]
+ self.prefetch = self.prefetch[buffersize:]
+ return result
+
+ def _recv(self, buffersize, flags=None) -> bytes:
+ """
+ Handle socket data reading.
+ """
+ kwargs = {}
+ if flags is not None:
+ kwargs['flags'] = flags
+ chunks = []
+ bytes_rcvd = 0
+
+ while bytes_rcvd < buffersize:
+ chunk = self.socket.recv(buffersize-bytes_rcvd, **kwargs)
+ if chunk == b'':
+ self.socket.close()
+ raise SocketError('Socket connection broken.')
+ chunks.append(chunk)
+ bytes_rcvd += len(chunk)
+
+ return b''.join(chunks)
+
+ def close(self):
+ """
+ Mark socket closed. This is recommended but not required, since
+ sockets are automatically closed when they are garbage-collected.
+ """
+ self._socket.shutdown(socket.SHUT_RDWR)
+ self._socket.close()
+ self._socket = self.host = self.port = None
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/connection/generators.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/connection/generators.py b/modules/platforms/python/pyignite/connection/generators.py
new file mode 100644
index 0000000..d76db0e
--- /dev/null
+++ b/modules/platforms/python/pyignite/connection/generators.py
@@ -0,0 +1,48 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class RoundRobin:
+ """
+ Round-robin generator for use with `Client.connect()`. Cycles a node
+ list until a maximum number of reconnects is reached (if set).
+ """
+
+ def __init__(self, nodes: list, max_reconnects: int=None):
+ """
+ :param nodes: list of two-tuples of (host, port) format,
+ :param max_reconnects: (optional) maximum number of reconnect attempts.
+ defaults to None (cycle nodes infinitely).
+ """
+ self.nodes = nodes
+ self.max_reconnects = max_reconnects
+ self.node_index = 0
+ self.reconnects = 0
+
+ def __iter__(self) -> 'RoundRobin':
+ return self
+
+ def __next__(self) -> tuple:
+ if self.max_reconnects is not None:
+ if self.reconnects >= self.max_reconnects:
+ raise StopIteration
+ else:
+ self.reconnects += 1
+
+ if self.node_index >= len(self.nodes):
+ self.node_index = 0
+ node = self.nodes[self.node_index]
+ self.node_index += 1
+ return node
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/connection/handshake.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/connection/handshake.py b/modules/platforms/python/pyignite/connection/handshake.py
new file mode 100644
index 0000000..13d57fe
--- /dev/null
+++ b/modules/platforms/python/pyignite/connection/handshake.py
@@ -0,0 +1,91 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Optional
+
+from pyignite.constants import *
+from pyignite.datatypes import Byte, Int, Short, String
+from pyignite.datatypes.internal import Struct
+
+OP_HANDSHAKE = 1
+
+
+class HandshakeRequest:
+ """ Handshake request. """
+ handshake_struct = None
+ username = None
+ password = None
+
+ def __init__(
+ self, username: Optional[str]=None, password: Optional[str]=None
+ ):
+ fields = [
+ ('length', Int),
+ ('op_code', Byte),
+ ('version_major', Short),
+ ('version_minor', Short),
+ ('version_patch', Short),
+ ('client_code', Byte),
+ ]
+ if username and password:
+ self.username = username
+ self.password = password
+ fields.extend([
+ ('username', String),
+ ('password', String),
+ ])
+ self.handshake_struct = Struct(fields)
+
+ def __bytes__(self) -> bytes:
+ handshake_data = {
+ 'length': 8,
+ 'op_code': OP_HANDSHAKE,
+ 'version_major': PROTOCOL_VERSION_MAJOR,
+ 'version_minor': PROTOCOL_VERSION_MINOR,
+ 'version_patch': PROTOCOL_VERSION_PATCH,
+ 'client_code': 2, # fixed value defined by protocol
+ }
+ if self.username and self.password:
+ handshake_data.update({
+ 'username': self.username,
+ 'password': self.password,
+ })
+ handshake_data['length'] += sum([
+ 10, # each `String` header takes 5 bytes
+ len(self.username),
+ len(self.password),
+ ])
+ return self.handshake_struct.from_python(handshake_data)
+
+
+def read_response(client):
+ response_start = Struct([
+ ('length', Int),
+ ('op_code', Byte),
+ ])
+ start_class, start_buffer = response_start.parse(client)
+ start = start_class.from_buffer_copy(start_buffer)
+ data = response_start.to_python(start)
+ if data['op_code'] == 0:
+ response_end = Struct([
+ ('version_major', Short),
+ ('version_minor', Short),
+ ('version_patch', Short),
+ ('message', String),
+ ])
+ end_class, end_buffer = response_end.parse(client)
+ end = end_class.from_buffer_copy(end_buffer)
+ data.update(response_end.to_python(end))
+ return data
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/connection/ssl.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/connection/ssl.py b/modules/platforms/python/pyignite/connection/ssl.py
new file mode 100644
index 0000000..548ca7f
--- /dev/null
+++ b/modules/platforms/python/pyignite/connection/ssl.py
@@ -0,0 +1,39 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ssl
+
+from pyignite.constants import *
+
+
+def wrap(client, _socket):
+ """ Wrap socket in SSL wrapper. """
+ if client.init_kwargs.get('use_ssl', None):
+ _socket = ssl.wrap_socket(
+ _socket,
+ ssl_version=client.init_kwargs.get(
+ 'ssl_version', SSL_DEFAULT_VERSION
+ ),
+ ciphers=client.init_kwargs.get(
+ 'ssl_ciphers', SSL_DEFAULT_CIPHERS
+ ),
+ cert_reqs=client.init_kwargs.get(
+ 'ssl_cert_reqs', ssl.CERT_NONE
+ ),
+ keyfile=client.init_kwargs.get('ssl_keyfile', None),
+ certfile=client.init_kwargs.get('ssl_certfile', None),
+ ca_certs=client.init_kwargs.get('ssl_ca_certfile', None),
+ )
+ return _socket
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/constants.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/constants.py b/modules/platforms/python/pyignite/constants.py
new file mode 100644
index 0000000..78c9379
--- /dev/null
+++ b/modules/platforms/python/pyignite/constants.py
@@ -0,0 +1,52 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This module contains some constants, used internally throughout the API.
+"""
+
+import ssl
+
+
+__all__ = [
+ 'PROTOCOL_VERSION_MAJOR', 'PROTOCOL_VERSION_MINOR',
+ 'PROTOCOL_VERSION_PATCH', 'MAX_LONG', 'MIN_LONG', 'MAX_INT', 'MIN_INT',
+ 'PROTOCOL_BYTE_ORDER', 'PROTOCOL_STRING_ENCODING',
+ 'PROTOCOL_CHAR_ENCODING', 'SSL_DEFAULT_VERSION', 'SSL_DEFAULT_CIPHERS',
+ 'FNV1_OFFSET_BASIS', 'FNV1_PRIME',
+ 'IGNITE_DEFAULT_HOST', 'IGNITE_DEFAULT_PORT',
+]
+
+PROTOCOL_VERSION_MAJOR = 1
+PROTOCOL_VERSION_MINOR = 2
+PROTOCOL_VERSION_PATCH = 0
+
+MAX_LONG = 9223372036854775807
+MIN_LONG = -9223372036854775808
+MAX_INT = 2147483647
+MIN_INT = -2147483648
+
+PROTOCOL_BYTE_ORDER = 'little'
+PROTOCOL_STRING_ENCODING = 'utf-8'
+PROTOCOL_CHAR_ENCODING = 'utf-16le'
+
+SSL_DEFAULT_VERSION = ssl.PROTOCOL_TLSv1_1
+SSL_DEFAULT_CIPHERS = ssl._DEFAULT_CIPHERS
+
+FNV1_OFFSET_BASIS = 0x811c9dc5
+FNV1_PRIME = 0x01000193
+
+IGNITE_DEFAULT_HOST = 'localhost'
+IGNITE_DEFAULT_PORT = 10800
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/__init__.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/__init__.py b/modules/platforms/python/pyignite/datatypes/__init__.py
new file mode 100644
index 0000000..5024f79
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/__init__.py
@@ -0,0 +1,27 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This module contains classes, used internally by `pyignite` for parsing and
+creating binary data.
+"""
+
+from .complex import *
+from .internal import *
+from .null_object import *
+from .primitive import *
+from .primitive_arrays import *
+from .primitive_objects import *
+from .standard import *
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/binary.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/binary.py b/modules/platforms/python/pyignite/datatypes/binary.py
new file mode 100644
index 0000000..c2344da
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/binary.py
@@ -0,0 +1,45 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyignite.datatypes import Int, Bool, String, Struct, StructArray
+
+
+binary_fields_struct = StructArray([
+ ('field_name', String),
+ ('type_id', Int),
+ ('field_id', Int),
+])
+
+body_struct = Struct([
+ ('type_id', Int),
+ ('type_name', String),
+ ('affinity_key_field', String),
+ ('binary_fields', binary_fields_struct),
+ ('is_enum', Bool),
+])
+
+enum_struct = StructArray([
+ ('literal', String),
+ ('type_id', Int),
+])
+
+schema_fields_struct = StructArray([
+ ('schema_field_id', Int),
+])
+
+schema_struct = StructArray([
+ ('schema_id', Int),
+ ('schema_fields', schema_fields_struct),
+])
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/cache_config.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/cache_config.py b/modules/platforms/python/pyignite/datatypes/cache_config.py
new file mode 100644
index 0000000..67b353d
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/cache_config.py
@@ -0,0 +1,153 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .standard import String
+from .internal import AnyDataObject, Struct, StructArray
+from .primitive import *
+
+
+__all__ = [
+ 'cache_config_struct', 'CacheMode', 'PartitionLossPolicy',
+ 'RebalanceMode', 'WriteSynchronizationMode', 'IndexType',
+]
+
+
+class CacheMode(Int):
+ LOCAL = 0
+ REPLICATED = 1
+ PARTITIONED = 2
+
+
+class PartitionLossPolicy(Int):
+ READ_ONLY_SAFE = 0
+ READ_ONLY_ALL = 1
+ READ_WRITE_SAFE = 2
+ READ_WRITE_ALL = 3
+ IGNORE = 4
+
+
+class RebalanceMode(Int):
+ SYNC = 0
+ ASYNC = 1
+ NONE = 2
+
+
+class WriteSynchronizationMode(Int):
+ FULL_SYNC = 0
+ FULL_ASYNC = 1
+ PRIMARY_SYNC = 2
+
+
+class IndexType(Byte):
+ SORTED = 0
+ FULLTEXT = 1
+ GEOSPATIAL = 2
+
+
+class CacheAtomicityMode(Int):
+ TRANSACTIONAL = 0
+ ATOMIC = 1
+
+
+QueryFields = StructArray([
+ ('name', String),
+ ('type_name', String),
+ ('is_key_field', Bool),
+ ('is_notnull_constraint_field', Bool),
+ ('default_value', AnyDataObject),
+ ('precision', Int),
+ ('scale', Int),
+], defaults={
+ 'is_key_field': False,
+ 'is_notnull_constraint_field': False,
+ 'default_value': None,
+ 'precision': -1,
+ 'scale': -1,
+})
+
+
+FieldNameAliases = StructArray([
+ ('field_name', String),
+ ('alias', String),
+])
+
+
+Fields = StructArray([
+ ('name', String),
+ ('is_descending', Bool),
+], defaults={
+ 'is_descending': False,
+})
+
+
+QueryIndexes = StructArray([
+ ('index_name', String),
+ ('index_type', IndexType),
+ ('inline_size', Int),
+ ('fields', Fields),
+])
+
+
+QueryEntities = StructArray([
+ ('key_type_name', String),
+ ('value_type_name', String),
+ ('table_name', String),
+ ('key_field_name', String),
+ ('value_field_name', String),
+ ('query_fields', QueryFields),
+ ('field_name_aliases', FieldNameAliases),
+ ('query_indexes', QueryIndexes),
+])
+
+
+CacheKeyConfiguration = StructArray([
+ ('type_name', String),
+ ('affinity_key_field_name', String),
+])
+
+
+cache_config_struct = Struct([
+ ('length', Int),
+ ('backups_number', Int),
+ ('cache_mode', CacheMode),
+ ('cache_atomicity_mode', CacheAtomicityMode),
+ ('copy_on_read', Bool),
+ ('data_region_name', String),
+ ('eager_ttl', Bool),
+ ('statistics_enabled', Bool),
+ ('group_name', String),
+ ('invalidate', Int),
+ ('default_lock_timeout', Long),
+ ('max_query_iterators', Int),
+ ('name', String),
+ ('is_onheap_cache_enabled', Bool),
+ ('partition_loss_policy', PartitionLossPolicy),
+ ('query_detail_metric_size', Int),
+ ('query_parallelism', Int),
+ ('read_from_backup', Bool),
+ ('rebalance_batch_size', Int),
+ ('rebalance_batches_prefetch_count', Long),
+ ('rebalance_delay', Long),
+ ('rebalance_mode', RebalanceMode),
+ ('rebalance_order', Int),
+ ('rebalance_throttle', Long),
+ ('rebalance_timeout', Long),
+ ('sql_escape_all', Bool),
+ ('sql_index_inline_max_size', Int),
+ ('sql_schema', String),
+ ('write_synchronization_mode', WriteSynchronizationMode),
+ ('cache_key_configuration', CacheKeyConfiguration),
+ ('query_entities', QueryEntities),
+])
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/pyignite/datatypes/cache_properties.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/pyignite/datatypes/cache_properties.py b/modules/platforms/python/pyignite/datatypes/cache_properties.py
new file mode 100644
index 0000000..e94db5f
--- /dev/null
+++ b/modules/platforms/python/pyignite/datatypes/cache_properties.py
@@ -0,0 +1,287 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ctypes
+
+from .prop_codes import *
+from .cache_config import (
+ CacheMode, CacheAtomicityMode, PartitionLossPolicy, RebalanceMode,
+ WriteSynchronizationMode, QueryEntities, CacheKeyConfiguration,
+)
+from .primitive import *
+from .standard import *
+
+
+__all__ = [
+ 'PropName', 'PropCacheMode', 'PropCacheAtomicityMode', 'PropBackupsNumber',
+ 'PropWriteSynchronizationMode', 'PropCopyOnRead', 'PropReadFromBackup',
+ 'PropDataRegionName', 'PropIsOnheapCacheEnabled', 'PropQueryEntities',
+ 'PropQueryParallelism', 'PropQueryDetailMetricSize', 'PropSQLSchema',
+ 'PropSQLIndexInlineMaxSize', 'PropSqlEscapeAll', 'PropMaxQueryIterators',
+ 'PropRebalanceMode', 'PropRebalanceDelay', 'PropRebalanceTimeout',
+ 'PropRebalanceBatchSize', 'PropRebalanceBatchesPrefetchCount',
+ 'PropRebalanceOrder', 'PropRebalanceThrottle', 'PropGroupName',
+ 'PropCacheKeyConfiguration', 'PropDefaultLockTimeout',
+ 'PropMaxConcurrentAsyncOperation', 'PropPartitionLossPolicy',
+ 'PropEagerTTL', 'PropStatisticsEnabled', 'prop_map', 'AnyProperty',
+]
+
+
+def prop_map(code: int):
+ return {
+ PROP_NAME: PropName,
+ PROP_CACHE_MODE: PropCacheMode,
+ PROP_CACHE_ATOMICITY_MODE: PropCacheAtomicityMode,
+ PROP_BACKUPS_NUMBER: PropBackupsNumber,
+ PROP_WRITE_SYNCHRONIZATION_MODE: PropWriteSynchronizationMode,
+ PROP_COPY_ON_READ: PropCopyOnRead,
+ PROP_READ_FROM_BACKUP: PropReadFromBackup,
+ PROP_DATA_REGION_NAME: PropDataRegionName,
+ PROP_IS_ONHEAP_CACHE_ENABLED: PropIsOnheapCacheEnabled,
+ PROP_QUERY_ENTITIES: PropQueryEntities,
+ PROP_QUERY_PARALLELISM: PropQueryParallelism,
+ PROP_QUERY_DETAIL_METRIC_SIZE: PropQueryDetailMetricSize,
+ PROP_SQL_SCHEMA: PropSQLSchema,
+ PROP_SQL_INDEX_INLINE_MAX_SIZE: PropSQLIndexInlineMaxSize,
+ PROP_SQL_ESCAPE_ALL: PropSqlEscapeAll,
+ PROP_MAX_QUERY_ITERATORS: PropMaxQueryIterators,
+ PROP_REBALANCE_MODE: PropRebalanceMode,
+ PROP_REBALANCE_DELAY: PropRebalanceDelay,
+ PROP_REBALANCE_TIMEOUT: PropRebalanceTimeout,
+ PROP_REBALANCE_BATCH_SIZE: PropRebalanceBatchSize,
+ PROP_REBALANCE_BATCHES_PREFETCH_COUNT: PropRebalanceBatchesPrefetchCount,
+ PROP_REBALANCE_ORDER: PropRebalanceOrder,
+ PROP_REBALANCE_THROTTLE: PropRebalanceThrottle,
+ PROP_GROUP_NAME: PropGroupName,
+ PROP_CACHE_KEY_CONFIGURATION: PropCacheKeyConfiguration,
+ PROP_DEFAULT_LOCK_TIMEOUT: PropDefaultLockTimeout,
+ PROP_MAX_CONCURRENT_ASYNC_OPERATIONS: PropMaxConcurrentAsyncOperation,
+ PROP_PARTITION_LOSS_POLICY: PartitionLossPolicy,
+ PROP_EAGER_TTL: PropEagerTTL,
+ PROP_STATISTICS_ENABLED: PropStatisticsEnabled,
+ }[code]
+
+
+class PropBase:
+ prop_code = None
+ prop_data_class = None
+
+ @classmethod
+ def build_header(cls):
+ return type(
+ cls.__name__+'Header',
+ (ctypes.LittleEndianStructure,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('prop_code', ctypes.c_short),
+ ],
+ }
+ )
+
+ @classmethod
+ def parse(cls, connection: 'Connection'):
+ header_class = cls.build_header()
+ header_buffer = connection.recv(ctypes.sizeof(header_class))
+ data_class, data_buffer = cls.prop_data_class.parse(connection)
+ prop_class = type(
+ cls.__name__,
+ (header_class,),
+ {
+ '_pack_': 1,
+ '_fields_': [
+ ('data', data_class),
+ ],
+ }
+ )
+ return prop_class, header_buffer + data_buffer
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ return cls.prop_data_class.to_python(
+ ctype_object.data, *args, **kwargs
+ )
+
+ @classmethod
+ def from_python(cls, value):
+ header_class = cls.build_header()
+ header = header_class()
+ header.prop_code = cls.prop_code
+ return bytes(header) + cls.prop_data_class.from_python(value)
+
+
+class PropName(PropBase):
+ prop_code = PROP_NAME
+ prop_data_class = String
+
+
+class PropCacheMode(PropBase):
+ prop_code = PROP_CACHE_MODE
+ prop_data_class = CacheMode
+
+
+class PropCacheAtomicityMode(PropBase):
+ prop_code = PROP_CACHE_ATOMICITY_MODE
+ prop_data_class = CacheAtomicityMode
+
+
+class PropBackupsNumber(PropBase):
+ prop_code = PROP_BACKUPS_NUMBER
+ prop_data_class = Int
+
+
+class PropWriteSynchronizationMode(PropBase):
+ prop_code = PROP_WRITE_SYNCHRONIZATION_MODE
+ prop_data_class = WriteSynchronizationMode
+
+
+class PropCopyOnRead(PropBase):
+ prop_code = PROP_COPY_ON_READ
+ prop_data_class = Bool
+
+
+class PropReadFromBackup(PropBase):
+ prop_code = PROP_READ_FROM_BACKUP
+ prop_data_class = Bool
+
+
+class PropDataRegionName(PropBase):
+ prop_code = PROP_DATA_REGION_NAME
+ prop_data_class = String
+
+
+class PropIsOnheapCacheEnabled(PropBase):
+ prop_code = PROP_IS_ONHEAP_CACHE_ENABLED
+ prop_data_class = Bool
+
+
+class PropQueryEntities(PropBase):
+ prop_code = PROP_QUERY_ENTITIES
+ prop_data_class = QueryEntities
+
+
+class PropQueryParallelism(PropBase):
+ prop_code = PROP_QUERY_PARALLELISM
+ prop_data_class = Int
+
+
+class PropQueryDetailMetricSize(PropBase):
+ prop_code = PROP_QUERY_DETAIL_METRIC_SIZE
+ prop_data_class = Int
+
+
+class PropSQLSchema(PropBase):
+ prop_code = PROP_SQL_SCHEMA
+ prop_data_class = String
+
+
+class PropSQLIndexInlineMaxSize(PropBase):
+ prop_code = PROP_SQL_INDEX_INLINE_MAX_SIZE
+ prop_data_class = Int
+
+
+class PropSqlEscapeAll(PropBase):
+ prop_code = PROP_SQL_ESCAPE_ALL
+ prop_data_class = Bool
+
+
+class PropMaxQueryIterators(PropBase):
+ prop_code = PROP_MAX_QUERY_ITERATORS
+ prop_data_class = Int
+
+
+class PropRebalanceMode(PropBase):
+ prop_code = PROP_REBALANCE_MODE
+ prop_data_class = RebalanceMode
+
+
+class PropRebalanceDelay(PropBase):
+ prop_code = PROP_REBALANCE_DELAY
+ prop_data_class = Long
+
+
+class PropRebalanceTimeout(PropBase):
+ prop_code = PROP_REBALANCE_TIMEOUT
+ prop_data_class = Long
+
+
+class PropRebalanceBatchSize(PropBase):
+ prop_code = PROP_REBALANCE_BATCH_SIZE
+ prop_data_class = Int
+
+
+class PropRebalanceBatchesPrefetchCount(PropBase):
+ prop_code = PROP_REBALANCE_BATCHES_PREFETCH_COUNT
+ prop_data_class = Long
+
+
+class PropRebalanceOrder(PropBase):
+ prop_code = PROP_REBALANCE_ORDER
+ prop_data_class = Int
+
+
+class PropRebalanceThrottle(PropBase):
+ prop_code = PROP_REBALANCE_THROTTLE
+ prop_data_class = Long
+
+
+class PropGroupName(PropBase):
+ prop_code = PROP_GROUP_NAME
+ prop_data_class = String
+
+
+class PropCacheKeyConfiguration(PropBase):
+ prop_code = PROP_CACHE_KEY_CONFIGURATION
+ prop_data_class = CacheKeyConfiguration
+
+
+class PropDefaultLockTimeout(PropBase):
+ prop_code = PROP_DEFAULT_LOCK_TIMEOUT
+ prop_data_class = Long
+
+
+class PropMaxConcurrentAsyncOperation(PropBase):
+ prop_code = PROP_MAX_CONCURRENT_ASYNC_OPERATIONS
+ prop_data_class = Int
+
+
+class PropPartitionLossPolicy(PropBase):
+ prop_code = PROP_PARTITION_LOSS_POLICY
+ prop_data_class = PartitionLossPolicy
+
+
+class PropEagerTTL(PropBase):
+ prop_code = PROP_EAGER_TTL
+ prop_data_class = Bool
+
+
+class PropStatisticsEnabled(PropBase):
+ prop_code = PROP_STATISTICS_ENABLED
+ prop_data_class = Bool
+
+
+class AnyProperty(PropBase):
+
+ @classmethod
+ def from_python(cls, value):
+ raise Exception(
+ 'You must choose a certain type '
+ 'for your cache configuration property'
+ )
+
+ @classmethod
+ def to_python(cls, ctype_object, *args, **kwargs):
+ prop_data_class = prop_map(ctype_object.prop_code)
+ return prop_data_class.to_python(ctype_object.data, *args, **kwargs)
[6/6] ignite git commit: IGNITE-7782 Python thin client
Posted by is...@apache.org.
IGNITE-7782 Python thin client
This closes #4278
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/7e547b13
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/7e547b13
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/7e547b13
Branch: refs/heads/master
Commit: 7e547b139143f08e707d0f084f4ce677c9ee6ec0
Parents: eeb25e6
Author: Dmitry Melnichuk <dm...@nobitlost.com>
Authored: Mon Oct 15 13:28:51 2018 +0300
Committer: Igor Sapego <is...@apache.org>
Committed: Mon Oct 15 13:28:51 2018 +0300
----------------------------------------------------------------------
modules/platforms/python/LICENSE | 202 ++++
modules/platforms/python/README.md | 75 ++
modules/platforms/python/docs/Makefile | 20 +
modules/platforms/python/docs/conf.py | 176 ++++
.../python/docs/datatypes/cache_props.rst | 163 +++
.../platforms/python/docs/datatypes/parsers.rst | 175 ++++
modules/platforms/python/docs/examples.rst | 624 ++++++++++++
modules/platforms/python/docs/index.rst | 33 +
modules/platforms/python/docs/modules.rst | 31 +
modules/platforms/python/docs/readme.rst | 202 ++++
.../platforms/python/docs/source/modules.rst | 7 +
.../python/docs/source/pyignite.api.binary.rst | 7 +
.../docs/source/pyignite.api.cache_config.rst | 7 +
.../docs/source/pyignite.api.key_value.rst | 7 +
.../python/docs/source/pyignite.api.result.rst | 7 +
.../python/docs/source/pyignite.api.rst | 19 +
.../python/docs/source/pyignite.api.sql.rst | 7 +
.../python/docs/source/pyignite.binary.rst | 7 +
.../python/docs/source/pyignite.cache.rst | 7 +
.../python/docs/source/pyignite.client.rst | 7 +
.../source/pyignite.connection.generators.rst | 7 +
.../source/pyignite.connection.handshake.rst | 7 +
.../python/docs/source/pyignite.connection.rst | 17 +
.../docs/source/pyignite.connection.ssl.rst | 7 +
.../python/docs/source/pyignite.constants.rst | 7 +
.../docs/source/pyignite.datatypes.binary.rst | 7 +
.../source/pyignite.datatypes.cache_config.rst | 7 +
.../pyignite.datatypes.cache_properties.rst | 7 +
.../docs/source/pyignite.datatypes.complex.rst | 7 +
.../docs/source/pyignite.datatypes.internal.rst | 7 +
.../source/pyignite.datatypes.key_value.rst | 7 +
.../source/pyignite.datatypes.null_object.rst | 7 +
.../source/pyignite.datatypes.primitive.rst | 7 +
.../pyignite.datatypes.primitive_arrays.rst | 7 +
.../pyignite.datatypes.primitive_objects.rst | 7 +
.../source/pyignite.datatypes.prop_codes.rst | 7 +
.../python/docs/source/pyignite.datatypes.rst | 28 +
.../docs/source/pyignite.datatypes.sql.rst | 7 +
.../docs/source/pyignite.datatypes.standard.rst | 7 +
.../source/pyignite.datatypes.type_codes.rst | 7 +
.../python/docs/source/pyignite.exceptions.rst | 7 +
.../docs/source/pyignite.queries.op_codes.rst | 7 +
.../python/docs/source/pyignite.queries.rst | 15 +
.../platforms/python/docs/source/pyignite.rst | 30 +
.../python/docs/source/pyignite.utils.rst | 7 +
.../platforms/python/examples/binary_basics.py | 53 +
.../platforms/python/examples/create_binary.py | 103 ++
modules/platforms/python/examples/failover.py | 61 ++
.../platforms/python/examples/get_and_put.py | 41 +
.../platforms/python/examples/migrate_binary.py | 190 ++++
.../platforms/python/examples/read_binary.py | 275 +++++
modules/platforms/python/examples/readme.md | 17 +
modules/platforms/python/examples/scans.py | 55 +
modules/platforms/python/examples/sql.py | 298 ++++++
modules/platforms/python/examples/type_hints.py | 51 +
modules/platforms/python/pyignite/__init__.py | 17 +
.../platforms/python/pyignite/api/__init__.py | 71 ++
modules/platforms/python/pyignite/api/binary.py | 209 ++++
.../python/pyignite/api/cache_config.py | 279 ++++++
.../platforms/python/pyignite/api/key_value.py | 995 +++++++++++++++++++
modules/platforms/python/pyignite/api/result.py | 38 +
modules/platforms/python/pyignite/api/sql.py | 478 +++++++++
modules/platforms/python/pyignite/binary.py | 136 +++
modules/platforms/python/pyignite/cache.py | 595 +++++++++++
modules/platforms/python/pyignite/client.py | 406 ++++++++
.../python/pyignite/connection/__init__.py | 329 ++++++
.../python/pyignite/connection/generators.py | 48 +
.../python/pyignite/connection/handshake.py | 91 ++
.../platforms/python/pyignite/connection/ssl.py | 39 +
modules/platforms/python/pyignite/constants.py | 52 +
.../python/pyignite/datatypes/__init__.py | 27 +
.../python/pyignite/datatypes/binary.py | 45 +
.../python/pyignite/datatypes/cache_config.py | 153 +++
.../pyignite/datatypes/cache_properties.py | 287 ++++++
.../python/pyignite/datatypes/complex.py | 531 ++++++++++
.../python/pyignite/datatypes/internal.py | 461 +++++++++
.../python/pyignite/datatypes/key_value.py | 24 +
.../python/pyignite/datatypes/null_object.py | 63 ++
.../python/pyignite/datatypes/primitive.py | 105 ++
.../pyignite/datatypes/primitive_arrays.py | 207 ++++
.../pyignite/datatypes/primitive_objects.py | 157 +++
.../python/pyignite/datatypes/prop_codes.py | 51 +
.../platforms/python/pyignite/datatypes/sql.py | 23 +
.../python/pyignite/datatypes/standard.py | 713 +++++++++++++
.../python/pyignite/datatypes/type_codes.py | 57 ++
modules/platforms/python/pyignite/exceptions.py | 80 ++
.../python/pyignite/queries/__init__.py | 339 +++++++
.../python/pyignite/queries/op_codes.py | 65 ++
modules/platforms/python/pyignite/utils.py | 168 ++++
modules/platforms/python/requirements/docs.txt | 6 +
.../platforms/python/requirements/install.txt | 4 +
modules/platforms/python/requirements/setup.txt | 3 +
modules/platforms/python/requirements/tests.txt | 5 +
modules/platforms/python/setup.py | 100 ++
modules/platforms/python/tests/conftest.py | 218 ++++
modules/platforms/python/tests/test_binary.py | 281 ++++++
.../platforms/python/tests/test_cache_class.py | 221 ++++
.../python/tests/test_cache_class_sql.py | 103 ++
.../platforms/python/tests/test_cache_config.py | 75 ++
.../platforms/python/tests/test_datatypes.py | 134 +++
modules/platforms/python/tests/test_examples.py | 57 ++
.../python/tests/test_generic_object.py | 33 +
.../platforms/python/tests/test_get_names.py | 30 +
.../platforms/python/tests/test_handshake.py | 63 ++
.../platforms/python/tests/test_key_value.py | 327 ++++++
modules/platforms/python/tests/test_scan.py | 66 ++
modules/platforms/python/tests/test_sql.py | 154 +++
parent/pom.xml | 5 +
108 files changed, 12388 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/LICENSE
----------------------------------------------------------------------
diff --git a/modules/platforms/python/LICENSE b/modules/platforms/python/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/modules/platforms/python/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/README.md
----------------------------------------------------------------------
diff --git a/modules/platforms/python/README.md b/modules/platforms/python/README.md
new file mode 100644
index 0000000..22732ce
--- /dev/null
+++ b/modules/platforms/python/README.md
@@ -0,0 +1,75 @@
+# ignite-python-client
+Apache Ignite thin (binary protocol) client, written in Python 3.
+
+## Prerequisites
+
+- Python 3.4 or above (3.6 is tested),
+- Access to Apache Ignite node, local or remote. The current thin client
+ version was tested on Apache Ignite 2.7.0 (binary client protocol 1.2.0).
+
+## Installation
+
+#### *for end user*
+If you only want to use the `pyignite` module in your project, do:
+```
+$ pip install pyignite
+```
+
+#### *for developer*
+If you want to run tests, examples or build documentation, clone
+the whole repository:
+```
+$ git clone git@github.com:apache/ignite.git
+$ cd ignite/modules/platforms/python
+$ pip install -e .
+```
+
+This will install the repository version of `pyignite` into your environment
+in so-called “develop” or “editable” mode. You may read more about
+[editable installs](https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs)
+in the `pip` manual.
+
+Then run through the contents of `requirements` folder to install
+the additional requirements into your working Python environment using
+```
+$ pip install -r requirements/<your task>.txt
+```
+
+You may also want to consult the `setuptools` manual about using `setup.py`.
+
+## Documentation
+[The package documentation](https://apache-ignite-binary-protocol-client.readthedocs.io)
+is available at *RTD* for your convenience.
+
+If you want to build the documentation from source, do the developer
+installation as described above, then run the following commands:
+```
+$ cd ignite/modules/platforms/python
+$ pip install -r requirements/docs.txt
+$ cd docs
+$ make html
+```
+
+Then open `ignite/modules/platforms/python/docs/generated/html/index.html`
+in your browser.
+
+## Examples
+Some examples of using pyignite are provided in
+`ignite/modules/platforms/python/examples` folder. They are extensively
+commented in the
+“[Examples of usage](https://apache-ignite-binary-protocol-client.readthedocs.io/en/latest/examples.html)”
+section of the documentation.
+
+This code implies that it is run in the environment with `pyignite` package
+installed, and Apache Ignite node is running on localhost:10800.
+
+## Testing
+Run
+```
+$ cd ignite/modules/platforms/python
+$ python setup.py pytest
+```
+
+*NB!* All tests require Apache Ignite node running on localhost:10800.
+If you need to change the connection parameters, see the documentation on
+[testing](https://apache-ignite-binary-protocol-client.readthedocs.io/en/latest/readme.html#testing).
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/Makefile
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/Makefile b/modules/platforms/python/docs/Makefile
new file mode 100644
index 0000000..e80dcf6
--- /dev/null
+++ b/modules/platforms/python/docs/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+SPHINXPROJ = ApacheIgnitebinaryclientPythonAPI
+SOURCEDIR = .
+BUILDDIR = generated
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/conf.py
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/conf.py b/modules/platforms/python/docs/conf.py
new file mode 100644
index 0000000..8c498aa
--- /dev/null
+++ b/modules/platforms/python/docs/conf.py
@@ -0,0 +1,176 @@
+# -*- coding: utf-8 -*-
+#
+# Configuration file for the Sphinx documentation builder.
+#
+# This file does only contain a selection of the most common options. For a
+# full list see the documentation:
+# http://www.sphinx-doc.org/en/master/config
+
+# -- Path setup --------------------------------------------------------------
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+import os
+import sys
+sys.path.insert(0, os.path.abspath('../'))
+
+
+# -- Project information -----------------------------------------------------
+
+project = 'Apache Ignite binary client Python API'
+copyright = '2018, Apache Software Foundation (ASF)'
+author = 'Dmitry Melnichuk'
+
+# The short X.Y version
+version = ''
+# The full version, including alpha/beta/rc tags
+release = '0.1.0'
+
+
+# -- General configuration ---------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.todo',
+ 'sphinxcontrib.fulltoc',
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+#
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This pattern also affects html_static_path and html_extra_path .
+exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+
+# -- Options for HTML output -------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#
+html_theme = 'alabaster'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#
+# html_theme_options = {}
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = []
+
+# Custom sidebar templates, must be a dictionary that maps document names
+# to template names.
+#
+# The default sidebars (for documents that don't match any pattern) are
+# defined by theme itself. Builtin themes are using these templates by
+# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
+# 'searchbox.html']``.
+#
+# html_sidebars = {}
+
+
+# -- Options for HTMLHelp output ---------------------------------------------
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'ApacheIgnitebinaryclientPythonAPIdoc'
+
+
+# -- Options for LaTeX output ------------------------------------------------
+
+latex_elements = {
+ # The paper size ('letterpaper' or 'a4paper').
+ #
+ # 'papersize': 'letterpaper',
+
+ # The font size ('10pt', '11pt' or '12pt').
+ #
+ # 'pointsize': '10pt',
+
+ # Additional stuff for the LaTeX preamble.
+ #
+ # 'preamble': '',
+
+ # Latex figure (float) alignment
+ #
+ # 'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ (master_doc, 'ApacheIgnitebinaryclientPythonAPI.tex', 'Apache Ignite binary client Python API Documentation',
+ 'Apache Software Foundation (ASF)', 'manual'),
+]
+
+
+# -- Options for manual page output ------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ (master_doc, 'apacheignitebinaryclientpythonapi', 'Apache Ignite binary client Python API Documentation',
+ [author], 1)
+]
+
+
+# -- Options for Texinfo output ----------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (master_doc, 'ApacheIgnitebinaryclientPythonAPI', 'Apache Ignite binary client Python API Documentation',
+ author, 'ApacheIgnitebinaryclientPythonAPI', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+
+# -- Extension configuration -------------------------------------------------
+
+# -- Options for todo extension ----------------------------------------------
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = True
+
+
+def skip_fn(app, what, name, obj, skip, options):
+ if name == "__init__":
+ return False
+ return skip
+
+
+def setup(app):
+ app.connect("autodoc-skip-member", skip_fn)
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/datatypes/cache_props.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/datatypes/cache_props.rst b/modules/platforms/python/docs/datatypes/cache_props.rst
new file mode 100644
index 0000000..03443b9
--- /dev/null
+++ b/modules/platforms/python/docs/datatypes/cache_props.rst
@@ -0,0 +1,163 @@
+.. Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+.. http://www.apache.org/licenses/LICENSE-2.0
+
+.. Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+.. _cache_props:
+
+================
+Cache Properties
+================
+
+The :mod:`~pyignite.datatypes.prop_codes` module contains a list of ordinal
+values, that represent various cache settings.
+
+Please refer to the `Apache Ignite Data Grid`_ documentation on cache
+synchronization, rebalance, affinity and other cache configuration-related
+matters.
+
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| Property | Ordinal | Property | Description |
+| name | value | type | |
++=======================================+==========+==========+=======================================================+
+| Read/write cache properties, used to configure cache via :py:meth:`~pyignite.client.Client.create_cache` or |
+| :py:meth:`~pyignite.client.Client.get_or_create_cache` |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_NAME | 0 | str | Cache name. This is the only *required* property. |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_CACHE_MODE | 1 | int | Cache mode: LOCAL=0, REPLICATED=1, PARTITIONED=2 |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_CACHE_ATOMICITY_MODE | 2 | int | Cache atomicity mode: TRANSACTIONAL=0, ATOMIC=1 |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_BACKUPS_NUMBER | 3 | int | Number of backups |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_WRITE_SYNCHRONIZATION_MODE | 4 | int | Write synchronization mode: FULL_SYNC=0, |
+| | | | FULL_ASYNC=1, PRIMARY_SYNC=2 |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_COPY_ON_READ | 5 | bool | Copy-on-read |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_READ_FROM_BACKUP | 6 | bool | Read from backup |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_DATA_REGION_NAME | 100 | str | Data region name |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_IS_ONHEAP_CACHE_ENABLED | 101 | bool | Is OnHeap cache enabled? |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_QUERY_ENTITIES | 200 | list | A list of query entities (see `Query entity`_) |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_QUERY_PARALLELISM | 201 | int | Query parallelism |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_QUERY_DETAIL_METRIC_SIZE | 202 | int | Query detail metric size |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_SQL_SCHEMA | 203 | str | SQL schema |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_SQL_INDEX_INLINE_MAX_SIZE | 204 | int | SQL index inline maximum size |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_SQL_ESCAPE_ALL | 205 | bool | Turns on SQL escapes |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_MAX_QUERY_ITERATORS | 206 | int | Maximum number of query iterators |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_REBALANCE_MODE | 300 | int | Rebalance mode: SYNC=0, ASYNC=1, NONE=2 |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_REBALANCE_DELAY | 301 | int | Rebalance delay (ms) |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_REBALANCE_TIMEOUT | 302 | int | Rebalance timeout (ms) |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_REBALANCE_BATCH_SIZE | 303 | int | Rebalance batch size |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_REBALANCE_BATCHES_PREFETCH_COUNT | 304 | int | Rebalance batches prefetch count |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_REBALANCE_ORDER | 305 | int | Rebalance order |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_REBALANCE_THROTTLE | 306 | int | Rebalance throttle (ms) |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_GROUP_NAME | 400 | str | Group name |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_CACHE_KEY_CONFIGURATION | 401 | list | Cache key configuration (see `Cache key`_) |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_DEFAULT_LOCK_TIMEOUT | 402 | int | Default lock timeout (ms) |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_MAX_CONCURRENT_ASYNC_OPERATIONS | 403 | int | Maximum number of concurrent asynchronous operations |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_PARTITION_LOSS_POLICY | 404 | int | Partition loss policy: READ_ONLY_SAFE=0, |
+| | | | READ_ONLY_ALL=1, READ_WRITE_SAFE=2, READ_WRITE_ALL=3, |
+| | | | IGNORE=4 |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_EAGER_TTL | 405 | bool | Eager TTL |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_STATISTICS_ENABLED | 406 | bool | Statistics enabled |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| Read-only cache properties. Can not be set, but only retrieved via :py:meth:`~pyignite.cache.Cache.settings` |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+| PROP_INVALIDATE | -1 | bool | Invalidate |
++---------------------------------------+----------+----------+-------------------------------------------------------+
+
+Query entity
+------------
+
+A dict with all ot the following keys:
+
+- `table_name`: SQL table name,
+- `key_field_name`: name of the key field,
+- `key_type_name`: name of the key type (Java type or complex object),
+- `value_field_name`: name of the value field,
+- `value_type_name`: name of the value type,
+- `field_name_aliases`: a list of 0 or more dicts of aliases
+ (see `Field name alias`_),
+- `query_fields`: a list of 0 or more query field names (see `Query field`_),
+- `query_indexes`: a list of 0 or more query indexes (see `Query index`_).
+
+Field name alias
+================
+
+- `field_name`: field name,
+- `alias`: alias (str).
+
+Query field
+===========
+
+- `name`: field name,
+- `type_name`: name of Java type or complex object,
+- `is_key_field`: (optional) boolean value, `False` by default,
+- `is_notnull_constraint_field`: boolean value,
+- `default_value`: (optional) anything that can be converted to `type_name`
+ type. `None` (:py:class:`~pyignite.datatypes.null_object.Null`) by default,
+- `precision` − (optional) decimal precision: total number of digits
+ in decimal value. Defaults to -1 (use cluster default). Ignored for
+ non-decimal SQL types (other than `java.math.BigDecimal`),
+- `scale` − (optional) decimal precision: number of digits after the decimal
+ point. Defaults to -1 (use cluster default). Ignored for non-decimal SQL
+ types.
+
+Query index
+===========
+
+- `index_name`: index name,
+- `index_type`: index type code as an integer value in unsigned byte range,
+- `inline_size`: integer value,
+- `fields`: a list of 0 or more indexed fields (see `Fields`_).
+
+Fields
+======
+
+- `name`: field name,
+- `is_descending`: (optional) boolean value, `False` by default.
+
+Cache key
+---------
+
+A dict of the following format:
+
+- `type_name`: name of the complex object,
+- `affinity_key_field_name`: name of the affinity key field.
+
+.. _Apache Ignite Data Grid: https://apacheignite.readme.io/docs/data-grid
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/datatypes/parsers.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/datatypes/parsers.rst b/modules/platforms/python/docs/datatypes/parsers.rst
new file mode 100644
index 0000000..a717f4c
--- /dev/null
+++ b/modules/platforms/python/docs/datatypes/parsers.rst
@@ -0,0 +1,175 @@
+.. Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+.. http://www.apache.org/licenses/LICENSE-2.0
+
+.. Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+.. _data_types:
+
+==========
+Data Types
+==========
+
+Apache Ignite uses a sophisticated system of serializable data types
+to store and retrieve user data, as well as to manage the configuration
+of its caches through the Ignite binary protocol.
+
+The complexity of data types varies from simple integer or character types
+to arrays, maps, collections and structures.
+
+Each data type is defined by its code. `Type code` is byte-sized. Thus,
+every data object can be represented as a payload of fixed or variable size,
+logically divided into one or more fields, prepended by the `type_code` field.
+
+Most of Ignite data types can be represented by some of the standard Python
+data type or class. Some of them, however, are conceptually alien, overly
+complex, or ambiguous to Python dynamic type system.
+
+The following table summarizes the notion of Apache Ignite data types,
+as well as their representation and handling in Python. For the nice
+description, as well as gory implementation details, you may follow the link
+to the parser/constructor class definition. Note that parser/constructor
+classes are not instantiatable. The `class` here is used mostly as a sort of
+tupperware for organizing methods together.
+
+*Note:* you are not obliged to actually use those parser/constructor classes.
+Pythonic types will suffice to interact with Apache Ignite binary API.
+However, in some rare cases of type ambiguity, as well as for the needs
+of interoperability, you may have to sneak one or the other class, along
+with your data, in to some API function as a *type conversion hint*.
+
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|`type_code`|Apache Ignite |Python type |Parser/constructor |
+| |docs reference |or class |class |
++===========+====================+===============================+=================================================================+
+|*Primitive data types* |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x01 |Byte_ |int |:class:`~pyignite.datatypes.primitive_objects.ByteObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x02 |Short_ |int |:class:`~pyignite.datatypes.primitive_objects.ShortObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x03 |Int_ |int |:class:`~pyignite.datatypes.primitive_objects.IntObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x04 |Long_ |int |:class:`~pyignite.datatypes.primitive_objects.LongObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x05 |Float_ |float |:class:`~pyignite.datatypes.primitive_objects.FloatObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x06 |Double_ |float |:class:`~pyignite.datatypes.primitive_objects.DoubleObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x07 |Char_ |str |:class:`~pyignite.datatypes.primitive_objects.CharObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x08 |Bool_ |bool |:class:`~pyignite.datatypes.primitive_objects.BoolObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x65 |Null_ |NoneType |:class:`~pyignite.datatypes.null_object.Null` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|*Standard objects* |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x09 |String_ |Str |:class:`~pyignite.datatypes.standard.String` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x0a |UUID_ |uuid.UUID |:class:`~pyignite.datatypes.standard.UUIDObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x21 |Timestamp_ |tuple |:class:`~pyignite.datatypes.standard.TimestampObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x0b |Date_ |datetime.datetime |:class:`~pyignite.datatypes.standard.DateObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x24 |Time_ |datetime.timedelta |:class:`~pyignite.datatypes.standard.TimeObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x1e |Decimal_ |decimal.Decimal |:class:`~pyignite.datatypes.standard.DecimalObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x1c |Enum_ |tuple |:class:`~pyignite.datatypes.standard.EnumObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x67 |`Binary enum`_ |tuple |:class:`~pyignite.datatypes.standard.BinaryEnumObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|*Arrays of primitives* |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x0c |`Byte array`_ |iterable/list |:class:`~pyignite.datatypes.primitive_arrays.ByteArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x0d |`Short array`_ |iterable/list |:class:`~pyignite.datatypes.primitive_arrays.ShortArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x0e |`Int array`_ |iterable/list |:class:`~pyignite.datatypes.primitive_arrays.IntArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x0f |`Long array`_ |iterable/list |:class:`~pyignite.datatypes.primitive_arrays.LongArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x10 |`Float array`_ |iterable/list |:class:`~pyignite.datatypes.primitive_arrays.FloatArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x11 |`Double array`_ |iterable/list |:class:`~pyignite.datatypes.primitive_arrays.DoubleArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x12 |`Char array`_ |iterable/list |:class:`~pyignite.datatypes.primitive_arrays.CharArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x13 |`Bool array`_ |iterable/list |:class:`~pyignite.datatypes.primitive_arrays.BoolArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|*Arrays of standard objects* |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x14 |`String array`_ |iterable/list |:class:`~pyignite.datatypes.standard.StringArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x15 |`UUID array`_ |iterable/list |:class:`~pyignite.datatypes.standard.UUIDArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x22 |`Timestamp array`_ |iterable/list |:class:`~pyignite.datatypes.standard.TimestampArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x16 |`Date array`_ |iterable/list |:class:`~pyignite.datatypes.standard.DateArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x23 |`Time array`_ |iterable/list |:class:`~pyignite.datatypes.standard.TimeArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x1f |`Decimal array`_ |iterable/list |:class:`~pyignite.datatypes.standard.DecimalArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|*Object collections, special types, and complex object* |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x17 |`Object array`_ |iterable/list |:class:`~pyignite.datatypes.complex.ObjectArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x18 |`Collection`_ |tuple |:class:`~pyignite.datatypes.complex.CollectionObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x19 |`Map`_ |dict, collections.OrderedDict |:class:`~pyignite.datatypes.complex.MapObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x1d |`Enum array`_ |iterable/list |:class:`~pyignite.datatypes.standard.EnumArrayObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x67 |`Complex object`_ |object |:class:`~pyignite.datatypes.complex.BinaryObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+|0x1b |`Wrapped data`_ |tuple |:class:`~pyignite.datatypes.complex.WrappedDataObject` |
++-----------+--------------------+-------------------------------+-----------------------------------------------------------------+
+
+.. _Byte: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-byte
+.. _Short: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-short
+.. _Int: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-int
+.. _Long: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-long
+.. _Float: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-float
+.. _Double: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-double
+.. _Char: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-char
+.. _Bool: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-bool
+.. _Null: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-null
+.. _String: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-string
+.. _UUID: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-uuid-guid-
+.. _Timestamp: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-timestamp
+.. _Date: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-date
+.. _Time: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-time
+.. _Decimal: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-decimal
+.. _Enum: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-enum
+.. _Byte array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-byte-array
+.. _Short array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-short-array
+.. _Int array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-int-array
+.. _Long array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-long-array
+.. _Float array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-float-array
+.. _Double array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-double-array
+.. _Char array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-char-array
+.. _Bool array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-bool-array
+.. _String array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-string-array
+.. _UUID array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-uuid-guid-array
+.. _Timestamp array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-timestamp-array
+.. _Date array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-date-array
+.. _Time array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-time-array
+.. _Decimal array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-decimal-array
+.. _Object array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-object-collections
+.. _Collection: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-collection
+.. _Map: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-map
+.. _Enum array: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-enum-array
+.. _Binary enum: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-binary-enum
+.. _Wrapped data: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-wrapped-data
+.. _Complex object: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-complex-object
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/examples.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/examples.rst b/modules/platforms/python/docs/examples.rst
new file mode 100644
index 0000000..3d8d2d9
--- /dev/null
+++ b/modules/platforms/python/docs/examples.rst
@@ -0,0 +1,624 @@
+.. Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+.. http://www.apache.org/licenses/LICENSE-2.0
+
+.. Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+.. _examples_of_usage:
+
+=================
+Examples of usage
+=================
+File: `get_and_put.py`_.
+
+Key-value
+---------
+
+Open connection
+===============
+
+.. literalinclude:: ../examples/get_and_put.py
+ :language: python
+ :lines: 16-19
+
+.. _create_cache:
+
+Create cache
+============
+
+.. literalinclude:: ../examples/get_and_put.py
+ :language: python
+ :lines: 21
+
+Put value in cache
+==================
+
+.. literalinclude:: ../examples/get_and_put.py
+ :language: python
+ :lines: 23
+
+Get value from cache
+====================
+
+.. literalinclude:: ../examples/get_and_put.py
+ :language: python
+ :lines: 25-29
+
+Get multiple values from cache
+==============================
+
+.. literalinclude:: ../examples/get_and_put.py
+ :language: python
+ :lines: 31-36
+
+Type hints usage
+================
+File: `type_hints.py`_
+
+.. literalinclude:: ../examples/type_hints.py
+ :language: python
+ :lines: 24-48
+
+As a rule of thumb:
+
+- when a `pyignite` method or function deals with a single value or key, it
+ has an additional parameter, like `value_hint` or `key_hint`, which accepts
+ a parser/constructor class,
+
+- nearly any structure element (inside dict or list) can be replaced with
+ a two-tuple of (said element, type hint).
+
+Refer the :ref:`data_types` section for the full list
+of parser/constructor classes you can use as type hints.
+
+Scan
+====
+File: `scans.py`_.
+
+Cache's :py:meth:`~pyignite.cache.Cache.scan` method queries allows you
+to get the whole contents of the cache, element by element.
+
+Let us put some data in cache.
+
+.. literalinclude:: ../examples/scans.py
+ :language: python
+ :lines: 23-33
+
+:py:meth:`~pyignite.cache.Cache.scan` returns a generator, that yields
+two-tuples of key and value. You can iterate through the generated pairs
+in a safe manner:
+
+.. literalinclude:: ../examples/scans.py
+ :language: python
+ :lines: 34-41
+
+Or, alternatively, you can convert the generator to dictionary in one go:
+
+.. literalinclude:: ../examples/scans.py
+ :language: python
+ :lines: 44-52
+
+But be cautious: if the cache contains a large set of data, the dictionary
+may eat too much memory!
+
+Do cleanup
+==========
+
+Destroy created cache and close connection.
+
+.. literalinclude:: ../examples/scans.py
+ :language: python
+ :lines: 54-55
+
+.. _sql_examples:
+
+SQL
+---
+File: `sql.py`_.
+
+These examples are similar to the ones given in the Apache Ignite SQL
+Documentation: `Getting Started`_.
+
+Setup
+=====
+
+First let us establish a connection.
+
+.. literalinclude:: ../examples/sql.py
+ :language: python
+ :lines: 195-196
+
+Then create tables. Begin with `Country` table, than proceed with related
+tables `City` and `CountryLanguage`.
+
+.. literalinclude:: ../examples/sql.py
+ :language: python
+ :lines: 25-42, 51-59, 67-74, 199-204
+
+Create indexes.
+
+.. literalinclude:: ../examples/sql.py
+ :language: python
+ :lines: 60-62, 75-77, 207-208
+
+Fill tables with data.
+
+.. literalinclude:: ../examples/sql.py
+ :language: python
+ :lines: 43-50, 63-66, 78-81, 211-218
+
+Data samples are taken from `Ignite GitHub repository`_.
+
+That concludes the preparation of data. Now let us answer some questions.
+
+What are the 10 largest cities in our data sample (population-wise)?
+====================================================================
+
+.. literalinclude:: ../examples/sql.py
+ :language: python
+ :lines: 24, 221-238
+
+The :py:meth:`~pyignite.client.Client.sql` method returns a generator,
+that yields the resulting rows.
+
+What are the 10 most populated cities throughout the 3 chosen countries?
+========================================================================
+
+If you set the `include_field_names` argument to `True`, the
+:py:meth:`~pyignite.client.Client.sql` method will generate a list of
+column names as a first yield. You can access field names with Python built-in
+`next` function.
+
+.. literalinclude:: ../examples/sql.py
+ :language: python
+ :lines: 241-269
+
+Display all the information about a given city
+==============================================
+
+.. literalinclude:: ../examples/sql.py
+ :language: python
+ :lines: 272-290
+
+Finally, delete the tables used in this example with the following queries:
+
+.. literalinclude:: ../examples/sql.py
+ :language: python
+ :lines: 82-83, 293-298
+
+.. _complex_object_usage:
+
+Complex objects
+---------------
+File: `binary_basics.py`_.
+
+`Complex object`_ (that is often called ‘Binary object’) is an Ignite data
+type, that is designed to represent a Java class. It have the following
+features:
+
+- have a unique ID (type id), which is derives from a class name (type name),
+- have one or more associated schemas, that describes its inner structure (the
+ order, names and types of its fields). Each schema have its own ID,
+- have an optional version number, that is aimed towards the end users
+ to help them distinguish between objects of the same type, serialized
+ with different schemas.
+
+Unfortunately, these distinctive features of the Complex object have few to no
+meaning outside of Java language. Python class can not be defined by its name
+(it is not unique), ID (object ID in Python is volatile; in CPython it is just
+a pointer in the interpreter's memory heap), or complex of its fields (they
+do not have an associated data types, moreover, they can be added or deleted
+in run-time). For the `pyignite` user it means that for all purposes
+of storing native Python data it is better to use Ignite
+:class:`~pyignite.datatypes.complex.CollectionObject`
+or :class:`~pyignite.datatypes.complex.MapObject` data types.
+
+However, for interoperability purposes, `pyignite` has a mechanism of creating
+special Python classes to read or write Complex objects. These classes have
+an interface, that simulates all the features of the Complex object: type name,
+type ID, schema, schema ID, and version number.
+
+Assuming that one concrete class for representing one Complex object can
+severely limit the user's data manipulation capabilities, all the
+functionality said above is implemented through the metaclass:
+:class:`~pyignite.binary.GenericObjectMeta`. This metaclass is used
+automatically when reading Complex objects.
+
+.. literalinclude:: ../examples/binary_basics.py
+ :language: python
+ :lines: 18-20, 30-34, 39-42, 48-49
+
+Here you can see how :class:`~pyignite.binary.GenericObjectMeta` uses
+`attrs`_ package internally for creating nice `__init__()` and `__repr__()`
+methods.
+
+You can reuse the autogenerated class for subsequent writes:
+
+.. literalinclude:: ../examples/binary_basics.py
+ :language: python
+ :lines: 53, 34-37
+
+:class:`~pyignite.binary.GenericObjectMeta` can also be used directly
+for creating custom classes:
+
+.. literalinclude:: ../examples/binary_basics.py
+ :language: python
+ :lines: 22-27
+
+Note how the `Person` class is defined. `schema` is a
+:class:`~pyignite.binary.GenericObjectMeta` metaclass parameter.
+Another important :class:`~pyignite.binary.GenericObjectMeta` parameter
+is a `type_name`, but it is optional and defaults to the class name (‘Person’
+in our example).
+
+Note also, that `Person` do not have to define its own attributes, methods and
+properties (`pass`), although it is completely possible.
+
+Now, when your custom `Person` class is created, you are ready to send data
+to Ignite server using its objects. The client will implicitly register your
+class as soon as the first Complex object is sent. If you intend to use your
+custom class for reading existing Complex objects' values before all, you must
+register said class explicitly with your client:
+
+.. literalinclude:: ../examples/binary_basics.py
+ :language: python
+ :lines: 51
+
+Now, when we dealt with the basics of `pyignite` implementation of Complex
+Objects, let us move on to more elaborate examples.
+
+.. _sql_cache_read:
+
+Read
+====
+File: `read_binary.py`_.
+
+Ignite SQL uses Complex objects internally to represent keys and rows
+in SQL tables. Normally SQL data is accessed via queries (see `SQL`_),
+so we will consider the following example solely for the demonstration
+of how Binary objects (not Ignite SQL) work.
+
+In the :ref:`previous examples <sql_examples>` we have created some SQL tables.
+Let us do it again and examine the Ignite storage afterwards.
+
+.. literalinclude:: ../examples/read_binary.py
+ :language: python
+ :lines: 222-229
+
+We can see that Ignite created a cache for each of our tables. The caches are
+conveniently named using ‘`SQL_<schema name>_<table name>`’ pattern.
+
+Now let us examine a configuration of a cache that contains SQL data
+using a :py:attr:`~pyignite.cache.Cache.settings` property.
+
+.. literalinclude:: ../examples/read_binary.py
+ :language: python
+ :lines: 231-251
+
+The values of `value_type_name` and `key_type_name` are names of the binary
+types. The `City` table's key fields are stored using `key_type_name` type,
+and the other fields − `value_type_name` type.
+
+Now when we have the cache, in which the SQL data resides, and the names
+of the key and value data types, we can read the data without using SQL
+functions and verify the correctness of the result.
+
+.. literalinclude:: ../examples/read_binary.py
+ :language: python
+ :lines: 253-267
+
+What we see is a tuple of key and value, extracted from the cache. Both key
+and value are represent Complex objects. The dataclass names are the same
+as the `value_type_name` and `key_type_name` cache settings. The objects'
+fields correspond to the SQL query.
+
+.. _sql_cache_create:
+
+Create
+======
+File: `create_binary.py`_.
+
+Now, that we aware of the internal structure of the Ignite SQL storage,
+we can create a table and put data in it using only key-value functions.
+
+For example, let us create a table to register High School students:
+a rough equivalent of the following SQL DDL statement:
+
+::
+
+ CREATE TABLE Student (
+ sid CHAR(9),
+ name VARCHAR(20),
+ login CHAR(8),
+ age INTEGER(11),
+ gpa REAL
+ )
+
+These are the necessary steps to perform the task.
+
+1. Create table cache.
+
+.. literalinclude:: ../examples/create_binary.py
+ :language: python
+ :lines: 22-63
+
+2. Define Complex object data class.
+
+.. literalinclude:: ../examples/create_binary.py
+ :language: python
+ :lines: 66-76
+
+3. Insert row.
+
+.. literalinclude:: ../examples/create_binary.py
+ :language: python
+ :lines: 79-83
+
+Now let us make sure that our cache really can be used with SQL functions.
+
+.. literalinclude:: ../examples/create_binary.py
+ :language: python
+ :lines: 85-93
+
+Note, however, that the cache we create can not be dropped with DDL command.
+
+.. literalinclude:: ../examples/create_binary.py
+ :language: python
+ :lines: 95-100
+
+It should be deleted as any other key-value cache.
+
+.. literalinclude:: ../examples/create_binary.py
+ :language: python
+ :lines: 102
+
+Migrate
+=======
+File: `migrate_binary.py`_.
+
+Suppose we have an accounting app that stores its data in key-value format.
+Our task would be to introduce the following changes to the original expense
+voucher's format and data:
+
+- rename `date` to `expense_date`,
+- add `report_date`,
+- set `report_date` to the current date if `reported` is True, None if False,
+- delete `reported`.
+
+First get the vouchers' cache.
+
+.. literalinclude:: ../examples/migrate_binary.py
+ :language: python
+ :lines: 108-111
+
+If you do not store the schema of the Complex object in code, you can obtain
+it as a dataclass property with
+:py:meth:`~pyignite.client.Client.query_binary_type` method.
+
+.. literalinclude:: ../examples/migrate_binary.py
+ :language: python
+ :lines: 116-123
+
+Let us modify the schema and create a new Complex object class with an updated
+schema.
+
+.. literalinclude:: ../examples/migrate_binary.py
+ :language: python
+ :lines: 125-138
+
+Now migrate the data from the old schema to the new one.
+
+.. literalinclude:: ../examples/migrate_binary.py
+ :language: python
+ :lines: 141-190
+
+At this moment all the fields, defined in both of our schemas, can be
+available in the resulting binary object, depending on which schema was used
+when writing it using :py:meth:`~pyignite.cache.Cache.put` or similar methods.
+Ignite Binary API do not have the method to delete Complex object schema;
+all the schemas ever defined will stay in cluster until its shutdown.
+
+This versioning mechanism is quite simple and robust, but it have its
+limitations. The main thing is: you can not change the type of the existing
+field. If you try, you will be greeted with the following message:
+
+```org.apache.ignite.binary.BinaryObjectException: Wrong value has been set
+[typeName=SomeType, fieldName=f1, fieldType=String, assignedValueType=int]```
+
+As an alternative, you can rename the field or create a new Complex object.
+
+Failover
+--------
+File: `failover.py`_.
+
+When connection to the server is broken or timed out,
+:class:`~pyignite.client.Client` object propagates an original exception
+(`OSError` or `SocketError`), but keeps its constructor's parameters intact
+and tries to reconnect transparently.
+
+When there's no way for :class:`~pyignite.client.Client` to reconnect, it
+raises a special :class:`~pyignite.exceptions.ReconnectError` exception.
+
+The following example features a simple node list traversal failover mechanism.
+Gather 3 Ignite nodes on `localhost` into one cluster and run:
+
+.. literalinclude:: ../examples/failover.py
+ :language: python
+ :lines: 16-49
+
+Then try shutting down and restarting nodes, and see what happens.
+
+.. literalinclude:: ../examples/failover.py
+ :language: python
+ :lines: 51-61
+
+Client reconnection do not require an explicit user action, like calling
+a special method or resetting a parameter. Note, however, that reconnection
+is lazy: it happens only if (and when) it is needed. In this example,
+the automatic reconnection happens, when the script checks upon the last
+saved value:
+
+.. literalinclude:: ../examples/failover.py
+ :language: python
+ :lines: 48
+
+It means that instead of checking the connection status it is better for
+`pyignite` user to just try the supposed data operations and catch
+the resulting exception.
+
+:py:meth:`~pyignite.connection.Connection.connect` method accepts any
+iterable, not just list. It means that you can implement any reconnection
+policy (round-robin, nodes prioritization, pause on reconnect or graceful
+backoff) with a generator.
+
+`pyignite` comes with a sample
+:class:`~pyignite.connection.generators.RoundRobin` generator. In the above
+example try to replace
+
+.. literalinclude:: ../examples/failover.py
+ :language: python
+ :lines: 29
+
+with
+
+.. code-block:: python3
+
+ client.connect(RoundRobin(nodes, max_reconnects=20))
+
+The client will try to reconnect to node 1 after node 3 is crashed, then to
+node 2, et c. At least one node should be active for the
+:class:`~pyignite.connection.generators.RoundRobin` to work properly.
+
+SSL/TLS
+-------
+
+There are some special requirements for testing SSL connectivity.
+
+The Ignite server must be configured for securing the binary protocol port.
+The server configuration process can be split up into these basic steps:
+
+1. Create a key store and a trust store using `Java keytool`_. When creating
+ the trust store, you will probably need a client X.509 certificate. You
+ will also need to export the server X.509 certificate to include in the
+ client chain of trust.
+
+2. Turn on the `SslContextFactory` for your Ignite cluster according to this
+ document: `Securing Connection Between Nodes`_.
+
+3. Tell Ignite to encrypt data on its thin client port, using the settings for
+ `ClientConnectorConfiguration`_. If you only want to encrypt connection,
+ not to validate client's certificate, set `sslClientAuth` property to
+ `false`. You'll still have to set up the trust store on step 1 though.
+
+Client SSL settings is summarized here:
+:class:`~pyignite.client.Client`.
+
+To use the SSL encryption without certificate validation just `use_ssl`.
+
+.. code-block:: python3
+
+ from pyignite import Client
+
+ client = Client(use_ssl=True)
+ client.connect('127.0.0.1', 10800)
+
+To identify the client, create an SSL keypair and a certificate with
+`openssl`_ command and use them in this manner:
+
+.. code-block:: python3
+
+ from pyignite import Client
+
+ client = Client(
+ use_ssl=True,
+ ssl_keyfile='etc/.ssl/keyfile.key',
+ ssl_certfile='etc/.ssl/certfile.crt',
+ )
+ client.connect('ignite-example.com', 10800)
+
+To check the authenticity of the server, get the server certificate or
+certificate chain and provide its path in the `ssl_ca_certfile` parameter.
+
+.. code-block:: python3
+
+ import ssl
+
+ from pyignite import Client
+
+ client = Client(
+ use_ssl=True,
+ ssl_ca_certfile='etc/.ssl/ca_certs',
+ ssl_cert_reqs=ssl.CERT_REQUIRED,
+ )
+ client.connect('ignite-example.com', 10800)
+
+You can also provide such parameters as the set of ciphers (`ssl_ciphers`) and
+the SSL version (`ssl_version`), if the defaults
+(:py:obj:`ssl._DEFAULT_CIPHERS` and TLS 1.1) do not suit you.
+
+Password authentication
+-----------------------
+
+To authenticate you must set `authenticationEnabled` property to `true` and
+enable persistance in Ignite XML configuration file, as described in
+`Authentication`_ section of Ignite documentation.
+
+Be advised that sending credentials over the open channel is greatly
+discouraged, since they can be easily intercepted. Supplying credentials
+automatically turns SSL on from the client side. It is highly recommended
+to secure the connection to the Ignite server, as described
+in `SSL/TLS`_ example, in order to use password authentication.
+
+Then just supply `username` and `password` parameters to
+:class:`~pyignite.client.Client` constructor.
+
+.. code-block:: python3
+
+ from pyignite import Client
+
+ client = Client(username='ignite', password='ignite')
+ client.connect('ignite-example.com', 10800)
+
+If you still do not wish to secure the connection is spite of the warning,
+then disable SSL explicitly on creating the client object:
+
+.. code-block:: python3
+
+ client = Client(username='ignite', password='ignite', use_ssl=False)
+
+Note, that it is not possible for Ignite thin client to obtain the cluster's
+authentication settings through the binary protocol. Unexpected credentials
+are simply ignored by the server. In the opposite case, the user is greeted
+with the following message:
+
+.. code-block:: python3
+
+ # pyignite.exceptions.HandshakeError: Handshake error: Unauthenticated sessions are prohibited.
+
+.. _get_and_put.py: https://github.com/apache/ignite/tree/master/modules/platforms/python/examples/get_and_put.py
+.. _type_hints.py: https://github.com/apache/ignite/tree/master/modules/platforms/python/examples/type_hints.py
+.. _failover.py: https://github.com/apache/ignite/tree/master/modules/platforms/python/examples/failover.py
+.. _scans.py: https://github.com/apache/ignite/tree/master/modules/platforms/python/examples/scans.py
+.. _sql.py: https://github.com/apache/ignite/tree/master/modules/platforms/python/examples/sql.py
+.. _binary_basics.py: https://github.com/apache/ignite/tree/master/modules/platforms/python/examples/binary_basics.py
+.. _read_binary.py: https://github.com/apache/ignite/tree/master/modules/platforms/python/examples/read_binary.py
+.. _create_binary.py: https://github.com/apache/ignite/tree/master/modules/platforms/python/examples/create_binary.py
+.. _migrate_binary.py: https://github.com/apache/ignite/tree/master/modules/platforms/python/examples/migrate_binary.py
+.. _Getting Started: https://apacheignite-sql.readme.io/docs/getting-started
+.. _Ignite GitHub repository: https://github.com/apache/ignite/blob/master/examples/sql/world.sql
+.. _Complex object: https://apacheignite.readme.io/docs/binary-client-protocol-data-format#section-complex-object
+.. _Java keytool: https://docs.oracle.com/javase/8/docs/technotes/tools/unix/keytool.html
+.. _Securing Connection Between Nodes: https://apacheignite.readme.io/docs/ssltls#section-securing-connection-between-nodes
+.. _ClientConnectorConfiguration: https://ignite.apache.org/releases/latest/javadoc/org/apache/ignite/configuration/ClientConnectorConfiguration.html
+.. _openssl: https://www.openssl.org/docs/manmaster/man1/openssl.html
+.. _Authentication: https://apacheignite.readme.io/docs/advanced-security#section-authentication
+.. _attrs: https://pypi.org/project/attrs/
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/index.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/index.rst b/modules/platforms/python/docs/index.rst
new file mode 100644
index 0000000..35bd18c
--- /dev/null
+++ b/modules/platforms/python/docs/index.rst
@@ -0,0 +1,33 @@
+.. Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+.. http://www.apache.org/licenses/LICENSE-2.0
+
+.. Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+Welcome to Apache Ignite binary client Python API documentation!
+================================================================
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Contents:
+
+ readme
+ modules
+ examples
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
http://git-wip-us.apache.org/repos/asf/ignite/blob/7e547b13/modules/platforms/python/docs/modules.rst
----------------------------------------------------------------------
diff --git a/modules/platforms/python/docs/modules.rst b/modules/platforms/python/docs/modules.rst
new file mode 100644
index 0000000..cabc915
--- /dev/null
+++ b/modules/platforms/python/docs/modules.rst
@@ -0,0 +1,31 @@
+.. Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+.. http://www.apache.org/licenses/LICENSE-2.0
+
+.. Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+=================
+API Specification
+=================
+
+The modules and subpackages listed here are the basis of a stable API
+of `pyignite`, intended for end users.
+
+.. toctree::
+ :maxdepth: 1
+ :caption: Modules:
+
+ Client <source/pyignite.client>
+ Cache <source/pyignite.cache>
+ datatypes/parsers
+ datatypes/cache_props
+ Exceptions <source/pyignite.exceptions>