You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ignite.apache.org by ag...@apache.org on 2015/11/24 12:42:20 UTC

[01/25] ignite git commit: IGNITE-1964 .NET: Enum type ID is written if it is registered as portable type.

Repository: ignite
Updated Branches:
  refs/heads/ignite-1282 b75184304 -> 76803923d


IGNITE-1964 .NET: Enum type ID is written if it is registered as portable type.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/19d2dd05
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/19d2dd05
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/19d2dd05

Branch: refs/heads/ignite-1282
Commit: 19d2dd0571ba1e92593da50b8b35b35e2fb7b6e7
Parents: 171bbee
Author: Pavel Tupitsyn <pt...@gridgain.com>
Authored: Mon Nov 23 12:08:26 2015 +0300
Committer: vozerov-gridgain <vo...@gridgain.com>
Committed: Mon Nov 23 12:08:26 2015 +0300

----------------------------------------------------------------------
 .../platform/PlatformComputeEchoTask.java       | 18 +++++++
 .../Binary/BinarySelfTest.cs                    | 35 ++++++++++++++
 .../Compute/ComputeApiTest.cs                   | 51 +++++++++++++++-----
 .../Impl/Binary/BinarySystemHandlers.cs         |  2 +-
 .../Impl/Binary/BinaryUtils.cs                  | 36 ++++++++++----
 .../Impl/Binary/BinaryWriter.cs                 | 32 +++++++-----
 6 files changed, 140 insertions(+), 34 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/19d2dd05/modules/core/src/test/java/org/apache/ignite/platform/PlatformComputeEchoTask.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/platform/PlatformComputeEchoTask.java b/modules/core/src/test/java/org/apache/ignite/platform/PlatformComputeEchoTask.java
index fe4e01c..c464945 100644
--- a/modules/core/src/test/java/org/apache/ignite/platform/PlatformComputeEchoTask.java
+++ b/modules/core/src/test/java/org/apache/ignite/platform/PlatformComputeEchoTask.java
@@ -17,13 +17,17 @@
 
 package org.apache.ignite.platform;
 
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
 import org.apache.ignite.IgniteException;
+import org.apache.ignite.binary.BinaryObject;
 import org.apache.ignite.cluster.ClusterNode;
 import org.apache.ignite.compute.ComputeJob;
 import org.apache.ignite.compute.ComputeJobAdapter;
 import org.apache.ignite.compute.ComputeJobResult;
 import org.apache.ignite.compute.ComputeTaskAdapter;
 import org.apache.ignite.internal.util.typedef.F;
+import org.apache.ignite.resources.IgniteInstanceResource;
 import org.jetbrains.annotations.Nullable;
 
 import java.util.Collections;
@@ -88,6 +92,9 @@ public class PlatformComputeEchoTask extends ComputeTaskAdapter<Integer, Object>
     /** Type: enum array. */
     private static final int TYPE_ENUM_ARRAY = 17;
 
+    /** Type: enum array. */
+    private static final int TYPE_ENUM_FIELD = 18;
+
     /** {@inheritDoc} */
     @Nullable @Override public Map<? extends ComputeJob, ClusterNode> map(List<ClusterNode> subgrid,
         @Nullable Integer arg) {
@@ -106,6 +113,10 @@ public class PlatformComputeEchoTask extends ComputeTaskAdapter<Integer, Object>
         /** Type. */
         private Integer type;
 
+        /** Ignite. */
+        @IgniteInstanceResource
+        private Ignite ignite;
+
         /**
          * Constructor.
          *
@@ -180,6 +191,13 @@ public class PlatformComputeEchoTask extends ComputeTaskAdapter<Integer, Object>
                         PlatformComputeEnum.FOO
                     };
 
+                case TYPE_ENUM_FIELD:
+                    IgniteCache<Integer, BinaryObject> cache = ignite.cache(null).withKeepBinary();
+                    BinaryObject obj = cache.get(TYPE_ENUM_FIELD);
+                    PlatformComputeEnum val = obj.field("interopEnum");
+
+                    return val;
+
                 default:
                     throw new IgniteException("Unknown type: " + type);
             }

http://git-wip-us.apache.org/repos/asf/ignite/blob/19d2dd05/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Binary/BinarySelfTest.cs
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Binary/BinarySelfTest.cs b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Binary/BinarySelfTest.cs
index 102afd1..f7455be 100644
--- a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Binary/BinarySelfTest.cs
+++ b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Binary/BinarySelfTest.cs
@@ -486,6 +486,23 @@ namespace Apache.Ignite.Core.Tests.Binary
             Assert.AreEqual(_marsh.Unmarshal<TestEnum>(_marsh.Marshal(val)), val);
         }
 
+        /// <summary>
+        /// Tests the write of registered enum.
+        /// </summary>
+        [Test]
+        public void TestWriteEnumRegistered()
+        {
+            var marsh =
+                new Marshaller(new BinaryConfiguration
+                {
+                    TypeConfigurations = new[] { new BinaryTypeConfiguration(typeof(TestEnum)) }
+                });
+
+            TestEnum val = TestEnum.Val1;
+
+            Assert.AreEqual(marsh.Unmarshal<TestEnum>(marsh.Marshal(val)), val);
+        }
+
         /**
         * <summary>Check write of enum.</summary>
         */
@@ -497,6 +514,24 @@ namespace Apache.Ignite.Core.Tests.Binary
 
             Assert.AreEqual(vals, newVals);
         }
+
+        /// <summary>
+        /// Tests the write of registered enum array.
+        /// </summary>
+        [Test]
+        public void TestWriteEnumArrayRegistered()
+        {
+            var marsh =
+                new Marshaller(new BinaryConfiguration
+                {
+                    TypeConfigurations = new[] { new BinaryTypeConfiguration(typeof(TestEnum)) }
+                });
+
+            TestEnum[] vals = { TestEnum.Val2, TestEnum.Val3 };
+            TestEnum[] newVals = marsh.Unmarshal<TestEnum[]>(marsh.Marshal(vals));
+
+            Assert.AreEqual(vals, newVals);
+        }
         
         /// <summary>
         /// Test object with dates.

http://git-wip-us.apache.org/repos/asf/ignite/blob/19d2dd05/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Compute/ComputeApiTest.cs
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Compute/ComputeApiTest.cs b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Compute/ComputeApiTest.cs
index 68616ab..1e999e3 100644
--- a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Compute/ComputeApiTest.cs
+++ b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Compute/ComputeApiTest.cs
@@ -105,6 +105,9 @@ namespace Apache.Ignite.Core.Tests.Compute
         /** Type: enum array. */
         private const int EchoTypeEnumArray = 17;
 
+        /** Type: enum field. */
+        private const int EchoTypeEnumField = 18;
+
         /** First node. */
         private IIgnite _grid1;
 
@@ -861,9 +864,9 @@ namespace Apache.Ignite.Core.Tests.Compute
         [Test]
         public void TestEchoTaskEnum()
         {
-            var res = _grid1.GetCompute().ExecuteJavaTask<InteropComputeEnum>(EchoTask, EchoTypeEnum);
+            var res = _grid1.GetCompute().ExecuteJavaTask<PlatformComputeEnum>(EchoTask, EchoTypeEnum);
 
-            Assert.AreEqual(InteropComputeEnum.Bar, res);
+            Assert.AreEqual(PlatformComputeEnum.Bar, res);
         }
 
         /// <summary>
@@ -872,17 +875,34 @@ namespace Apache.Ignite.Core.Tests.Compute
         [Test]
         public void TestEchoTaskEnumArray()
         {
-            var res = _grid1.GetCompute().ExecuteJavaTask<InteropComputeEnum[]>(EchoTask, EchoTypeEnumArray);
+            var res = _grid1.GetCompute().ExecuteJavaTask<PlatformComputeEnum[]>(EchoTask, EchoTypeEnumArray);
 
             Assert.AreEqual(new[]
             {
-                InteropComputeEnum.Bar,
-                InteropComputeEnum.Baz,
-                InteropComputeEnum.Foo
+                PlatformComputeEnum.Bar,
+                PlatformComputeEnum.Baz,
+                PlatformComputeEnum.Foo
             }, res);
         }
 
         /// <summary>
+        /// Tests the echo task reading enum from a binary object field.
+        /// Ensures that Java can understand enums written by .NET.
+        /// </summary>
+        [Test]
+        public void TestEchoTaskEnumField()
+        {
+            var enumVal = PlatformComputeEnum.Baz;
+
+            _grid1.GetCache<int, InteropComputeEnumFieldTest>(null)
+                .Put(EchoTypeEnumField, new InteropComputeEnumFieldTest {InteropEnum = enumVal});
+
+            var res = _grid1.GetCompute().ExecuteJavaTask<PlatformComputeEnum>(EchoTask, EchoTypeEnumField);
+
+            Assert.AreEqual(enumVal, res);
+        }
+
+        /// <summary>
         /// Test for binary argument in Java.
         /// </summary>
         [Test]
@@ -1107,11 +1127,15 @@ namespace Apache.Ignite.Core.Tests.Compute
 
             BinaryConfiguration portCfg = new BinaryConfiguration();
 
-            ICollection<BinaryTypeConfiguration> portTypeCfgs = new List<BinaryTypeConfiguration>();
+            var portTypeCfgs = new List<BinaryTypeConfiguration>
+            {
+                new BinaryTypeConfiguration(typeof (PlatformComputeBinarizable)),
+                new BinaryTypeConfiguration(typeof (PlatformComputeNetBinarizable)),
+                new BinaryTypeConfiguration(JavaBinaryCls),
+                new BinaryTypeConfiguration(typeof(PlatformComputeEnum)),
+                new BinaryTypeConfiguration(typeof(InteropComputeEnumFieldTest))
+            };
 
-            portTypeCfgs.Add(new BinaryTypeConfiguration(typeof(PlatformComputeBinarizable)));
-            portTypeCfgs.Add(new BinaryTypeConfiguration(typeof(PlatformComputeNetBinarizable)));
-            portTypeCfgs.Add(new BinaryTypeConfiguration(JavaBinaryCls));
 
             portCfg.TypeConfigurations = portTypeCfgs;
 
@@ -1294,10 +1318,15 @@ namespace Apache.Ignite.Core.Tests.Compute
         }
     }
 
-    public enum InteropComputeEnum
+    public enum PlatformComputeEnum
     {
         Foo,
         Bar,
         Baz
     }
+
+    public class InteropComputeEnumFieldTest
+    {
+        public PlatformComputeEnum InteropEnum { get; set; }
+    }
 }

http://git-wip-us.apache.org/repos/asf/ignite/blob/19d2dd05/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinarySystemHandlers.cs
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinarySystemHandlers.cs b/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinarySystemHandlers.cs
index 2c10d6a..b49c29d 100644
--- a/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinarySystemHandlers.cs
+++ b/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinarySystemHandlers.cs
@@ -631,7 +631,7 @@ namespace Apache.Ignite.Core.Impl.Binary
         {
             ctx.Stream.WriteByte(BinaryUtils.TypeEnum);
 
-            BinaryUtils.WriteEnum(ctx.Stream, (Enum)obj);
+            BinaryUtils.WriteEnum(ctx, obj);
         }
 
         /// <summary>

http://git-wip-us.apache.org/repos/asf/ignite/blob/19d2dd05/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinaryUtils.cs
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinaryUtils.cs b/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinaryUtils.cs
index a387066..1aed03f 100644
--- a/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinaryUtils.cs
+++ b/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinaryUtils.cs
@@ -1001,11 +1001,14 @@ namespace Apache.Ignite.Core.Impl.Binary
         /// </summary>
         /// <param name="val">Array.</param>
         /// <param name="ctx">Write context.</param>
-        public static void WriteArray(Array val, BinaryWriter ctx)
+        /// <param name="elementType">Type of the array element.</param>
+        public static void WriteArray(Array val, BinaryWriter ctx, int elementType = ObjTypeId)
         {
+            Debug.Assert(val != null && ctx != null);
+
             IBinaryStream stream = ctx.Stream;
 
-            stream.WriteInt(ObjTypeId);
+            stream.WriteInt(elementType);
 
             stream.WriteInt(val.Length);
 
@@ -1291,18 +1294,31 @@ namespace Apache.Ignite.Core.Impl.Binary
         /// <summary>
         /// Write enum.
         /// </summary>
-        /// <param name="stream">Stream.</param>
+        /// <param name="writer">Writer.</param>
         /// <param name="val">Value.</param>
-        public static void WriteEnum(IBinaryStream stream, Enum val)
+        public static void WriteEnum<T>(BinaryWriter writer, T val)
+        {
+            writer.WriteInt(GetEnumTypeId(val.GetType(), writer.Marshaller));
+            writer.WriteInt(TypeCaster<int>.Cast(val));
+        }
+
+        /// <summary>
+        /// Gets the enum type identifier.
+        /// </summary>
+        /// <param name="enumType">The enum type.</param>
+        /// <param name="marshaller">The marshaller.</param>
+        /// <returns>Enum type id.</returns>
+        public static int GetEnumTypeId(Type enumType, Marshaller marshaller)
         {
-            if (Enum.GetUnderlyingType(val.GetType()) == TypInt)
+            if (Enum.GetUnderlyingType(enumType) == TypInt)
             {
-                stream.WriteInt(ObjTypeId);
-                stream.WriteInt((int) (object) val);
+                var desc = marshaller.GetDescriptor(enumType);
+
+                return desc == null ? ObjTypeId : desc.TypeId;
             }
-            else
-                throw new BinaryObjectException("Only Int32 underlying type is supported for enums: " +
-                    val.GetType().Name);
+
+            throw new BinaryObjectException("Only Int32 underlying type is supported for enums: " +
+                enumType.Name);
         }
 
         /// <summary>

http://git-wip-us.apache.org/repos/asf/ignite/blob/19d2dd05/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinaryWriter.cs
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinaryWriter.cs b/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinaryWriter.cs
index e09a7f4..c00dad6 100644
--- a/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinaryWriter.cs
+++ b/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Binary/BinaryWriter.cs
@@ -25,6 +25,7 @@ namespace Apache.Ignite.Core.Impl.Binary
     using Apache.Ignite.Core.Impl.Binary.IO;
     using Apache.Ignite.Core.Impl.Binary.Metadata;
     using Apache.Ignite.Core.Impl.Binary.Structure;
+    using Apache.Ignite.Core.Impl.Common;
 
     /// <summary>
     /// Binary writer implementation.
@@ -792,8 +793,7 @@ namespace Apache.Ignite.Core.Impl.Binary
         {
             WriteFieldId(fieldName, BinaryUtils.TypeEnum);
 
-            _stream.WriteByte(BinaryUtils.TypeEnum);
-            BinaryUtils.WriteEnum(_stream, (Enum)(object)val);
+            WriteEnum(val);
         }
 
         /// <summary>
@@ -804,7 +804,7 @@ namespace Apache.Ignite.Core.Impl.Binary
         public void WriteEnum<T>(T val)
         {
             _stream.WriteByte(BinaryUtils.TypeEnum);
-            BinaryUtils.WriteEnum(_stream, (Enum)(object)val);
+            BinaryUtils.WriteEnum(this, val);
         }
 
         /// <summary>
@@ -820,10 +820,7 @@ namespace Apache.Ignite.Core.Impl.Binary
             if (val == null)
                 WriteNullField();
             else
-            {
-                _stream.WriteByte(BinaryUtils.TypeArrayEnum);
-                BinaryUtils.WriteArray(val, this);
-            }
+                WriteEnumArray0(val);
         }
 
         /// <summary>
@@ -836,10 +833,21 @@ namespace Apache.Ignite.Core.Impl.Binary
             if (val == null)
                 WriteNullRawField();
             else
-            {
-                _stream.WriteByte(BinaryUtils.TypeArrayEnum);
-                BinaryUtils.WriteArray(val, this);
-            }
+                WriteEnumArray0(val);
+        }
+
+        /// <summary>
+        /// Writes the enum array.
+        /// </summary>
+        /// <param name="val">The value.</param>
+        private void WriteEnumArray0<T>(T[] val)
+        {
+            _stream.WriteByte(BinaryUtils.TypeArrayEnum);
+
+            // typeof(T) can yield wrong results (string[] is object[], for example)
+            var elementType = val.GetType().GetElementType();  
+
+            BinaryUtils.WriteArray(val, this, BinaryUtils.GetEnumTypeId(elementType, Marshaller));
         }
 
         /// <summary>
@@ -1048,7 +1056,7 @@ namespace Apache.Ignite.Core.Impl.Binary
                 return;
 
             // Suppose that we faced normal object and perform descriptor lookup.
-            IBinaryTypeDescriptor desc = _marsh.GetDescriptor(type);
+            IBinaryTypeDescriptor desc = type.IsEnum ? null : _marsh.GetDescriptor(type);
 
             if (desc != null)
             {


[02/25] ignite git commit: IGNITE-1753 Refactored usages of deprectaed CacheTypeMetadata to JdbcType.

Posted by ag...@apache.org.
http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/src/test/java/org/apache/ignite/schema/test/model/ignite-type-metadata.xml
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/test/java/org/apache/ignite/schema/test/model/ignite-type-metadata.xml b/modules/schema-import/src/test/java/org/apache/ignite/schema/test/model/ignite-type-metadata.xml
index 2de9c62..f03f24f 100644
--- a/modules/schema-import/src/test/java/org/apache/ignite/schema/test/model/ignite-type-metadata.xml
+++ b/modules/schema-import/src/test/java/org/apache/ignite/schema/test/model/ignite-type-metadata.xml
@@ -18,7 +18,7 @@
 -->
 
 <!--
-    XML generated by Apache Ignite Schema Import utility: 02/05/2015
+    XML generated by Apache Ignite Schema Import utility: 11/20/2015
 -->
 <beans xmlns="http://www.springframework.org/schema/beans"
        xmlns:util="http://www.springframework.org/schema/util"
@@ -27,149 +27,301 @@
                            http://www.springframework.org/schema/beans/spring-beans.xsd
                            http://www.springframework.org/schema/util
                            http://www.springframework.org/schema/util/spring-util.xsd">
-    <bean class="org.apache.ignite.cache.CacheTypeMetadata">
-        <property name="databaseSchema" value="PUBLIC"/>
-        <property name="databaseTable" value="OBJECTS"/>
-        <property name="keyType" value="org.apache.ignite.schema.test.model.ObjectsKey"/>
-        <property name="valueType" value="org.apache.ignite.schema.test.model.Objects"/>
-        <property name="keyFields">
+    <bean class="org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactory">
+        <property name="types">
             <list>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="PK"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.INTEGER"/>
-                    </property>
-                    <property name="javaName" value="pk"/>
-                    <property name="javaType" value="int"/>
-                </bean>
-            </list>
-        </property>
-        <property name="valueFields">
-            <list>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="PK"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.INTEGER"/>
-                    </property>
-                    <property name="javaName" value="pk"/>
-                    <property name="javaType" value="int"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="BOOLCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.BOOLEAN"/>
-                    </property>
-                    <property name="javaName" value="boolcol"/>
-                    <property name="javaType" value="java.lang.Boolean"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="BYTECOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.TINYINT"/>
-                    </property>
-                    <property name="javaName" value="bytecol"/>
-                    <property name="javaType" value="java.lang.Byte"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="SHORTCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.SMALLINT"/>
-                    </property>
-                    <property name="javaName" value="shortcol"/>
-                    <property name="javaType" value="java.lang.Short"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="INTCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.INTEGER"/>
-                    </property>
-                    <property name="javaName" value="intcol"/>
-                    <property name="javaType" value="java.lang.Integer"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="LONGCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.BIGINT"/>
-                    </property>
-                    <property name="javaName" value="longcol"/>
-                    <property name="javaType" value="java.lang.Long"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="FLOATCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.REAL"/>
-                    </property>
-                    <property name="javaName" value="floatcol"/>
-                    <property name="javaType" value="java.lang.Float"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="DOUBLECOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.DOUBLE"/>
+                <bean class="org.apache.ignite.cache.store.jdbc.JdbcType">
+                    <property name="databaseSchema" value="PUBLIC"/>
+                    <property name="databaseTable" value="OBJECTS"/>
+                    <property name="keyType" value="org.apache.ignite.schema.test.model.ObjectsKey"/>
+                    <property name="valueType" value="org.apache.ignite.schema.test.model.Objects"/>
+                    <property name="keyFields">
+                        <list>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.INTEGER"/>
+                                </property>
+                                <property name="databaseFieldName" value="PK"/>
+                                <property name="javaFieldType" value="int"/>
+                                <property name="javaFieldName" value="pk"/>
+                            </bean>
+                        </list>
+                    </property>
+                    <property name="valueFields">
+                        <list>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.INTEGER"/>
+                                </property>
+                                <property name="databaseFieldName" value="PK"/>
+                                <property name="javaFieldType" value="int"/>
+                                <property name="javaFieldName" value="pk"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.BOOLEAN"/>
+                                </property>
+                                <property name="databaseFieldName" value="BOOLCOL"/>
+                                <property name="javaFieldType" value="java.lang.Boolean"/>
+                                <property name="javaFieldName" value="boolcol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.TINYINT"/>
+                                </property>
+                                <property name="databaseFieldName" value="BYTECOL"/>
+                                <property name="javaFieldType" value="java.lang.Byte"/>
+                                <property name="javaFieldName" value="bytecol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.SMALLINT"/>
+                                </property>
+                                <property name="databaseFieldName" value="SHORTCOL"/>
+                                <property name="javaFieldType" value="java.lang.Short"/>
+                                <property name="javaFieldName" value="shortcol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.INTEGER"/>
+                                </property>
+                                <property name="databaseFieldName" value="INTCOL"/>
+                                <property name="javaFieldType" value="java.lang.Integer"/>
+                                <property name="javaFieldName" value="intcol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.BIGINT"/>
+                                </property>
+                                <property name="databaseFieldName" value="LONGCOL"/>
+                                <property name="javaFieldType" value="java.lang.Long"/>
+                                <property name="javaFieldName" value="longcol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.REAL"/>
+                                </property>
+                                <property name="databaseFieldName" value="FLOATCOL"/>
+                                <property name="javaFieldType" value="java.lang.Float"/>
+                                <property name="javaFieldName" value="floatcol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.DOUBLE"/>
+                                </property>
+                                <property name="databaseFieldName" value="DOUBLECOL"/>
+                                <property name="javaFieldType" value="java.lang.Double"/>
+                                <property name="javaFieldName" value="doublecol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.DOUBLE"/>
+                                </property>
+                                <property name="databaseFieldName" value="DOUBLECOL2"/>
+                                <property name="javaFieldType" value="java.lang.Double"/>
+                                <property name="javaFieldName" value="doublecol2"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.DECIMAL"/>
+                                </property>
+                                <property name="databaseFieldName" value="BIGDECIMALCOL"/>
+                                <property name="javaFieldType" value="java.math.BigDecimal"/>
+                                <property name="javaFieldName" value="bigdecimalcol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.VARCHAR"/>
+                                </property>
+                                <property name="databaseFieldName" value="STRCOL"/>
+                                <property name="javaFieldType" value="java.lang.String"/>
+                                <property name="javaFieldName" value="strcol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.DATE"/>
+                                </property>
+                                <property name="databaseFieldName" value="DATECOL"/>
+                                <property name="javaFieldType" value="java.sql.Date"/>
+                                <property name="javaFieldName" value="datecol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.TIME"/>
+                                </property>
+                                <property name="databaseFieldName" value="TIMECOL"/>
+                                <property name="javaFieldType" value="java.sql.Time"/>
+                                <property name="javaFieldName" value="timecol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.TIMESTAMP"/>
+                                </property>
+                                <property name="databaseFieldName" value="TSCOL"/>
+                                <property name="javaFieldType" value="java.sql.Timestamp"/>
+                                <property name="javaFieldName" value="tscol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.VARBINARY"/>
+                                </property>
+                                <property name="databaseFieldName" value="ARRCOL"/>
+                                <property name="javaFieldType" value="java.lang.Object"/>
+                                <property name="javaFieldName" value="arrcol"/>
+                            </bean>
+                        </list>
+                    </property>
+                </bean>
+                <bean class="org.apache.ignite.cache.store.jdbc.JdbcType">
+                    <property name="databaseSchema" value="PUBLIC"/>
+                    <property name="databaseTable" value="PRIMITIVES"/>
+                    <property name="keyType" value="org.apache.ignite.schema.test.model.PrimitivesKey"/>
+                    <property name="valueType" value="org.apache.ignite.schema.test.model.Primitives"/>
+                    <property name="keyFields">
+                        <list>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.INTEGER"/>
+                                </property>
+                                <property name="databaseFieldName" value="PK"/>
+                                <property name="javaFieldType" value="int"/>
+                                <property name="javaFieldName" value="pk"/>
+                            </bean>
+                        </list>
+                    </property>
+                    <property name="valueFields">
+                        <list>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.INTEGER"/>
+                                </property>
+                                <property name="databaseFieldName" value="PK"/>
+                                <property name="javaFieldType" value="int"/>
+                                <property name="javaFieldName" value="pk"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.BOOLEAN"/>
+                                </property>
+                                <property name="databaseFieldName" value="BOOLCOL"/>
+                                <property name="javaFieldType" value="boolean"/>
+                                <property name="javaFieldName" value="boolcol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.TINYINT"/>
+                                </property>
+                                <property name="databaseFieldName" value="BYTECOL"/>
+                                <property name="javaFieldType" value="byte"/>
+                                <property name="javaFieldName" value="bytecol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.SMALLINT"/>
+                                </property>
+                                <property name="databaseFieldName" value="SHORTCOL"/>
+                                <property name="javaFieldType" value="short"/>
+                                <property name="javaFieldName" value="shortcol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.INTEGER"/>
+                                </property>
+                                <property name="databaseFieldName" value="INTCOL"/>
+                                <property name="javaFieldType" value="int"/>
+                                <property name="javaFieldName" value="intcol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.BIGINT"/>
+                                </property>
+                                <property name="databaseFieldName" value="LONGCOL"/>
+                                <property name="javaFieldType" value="long"/>
+                                <property name="javaFieldName" value="longcol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.REAL"/>
+                                </property>
+                                <property name="databaseFieldName" value="FLOATCOL"/>
+                                <property name="javaFieldType" value="float"/>
+                                <property name="javaFieldName" value="floatcol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.DOUBLE"/>
+                                </property>
+                                <property name="databaseFieldName" value="DOUBLECOL"/>
+                                <property name="javaFieldType" value="double"/>
+                                <property name="javaFieldName" value="doublecol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.DOUBLE"/>
+                                </property>
+                                <property name="databaseFieldName" value="DOUBLECOL2"/>
+                                <property name="javaFieldType" value="double"/>
+                                <property name="javaFieldName" value="doublecol2"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.DECIMAL"/>
+                                </property>
+                                <property name="databaseFieldName" value="BIGDECIMALCOL"/>
+                                <property name="javaFieldType" value="java.math.BigDecimal"/>
+                                <property name="javaFieldName" value="bigdecimalcol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.VARCHAR"/>
+                                </property>
+                                <property name="databaseFieldName" value="STRCOL"/>
+                                <property name="javaFieldType" value="java.lang.String"/>
+                                <property name="javaFieldName" value="strcol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.DATE"/>
+                                </property>
+                                <property name="databaseFieldName" value="DATECOL"/>
+                                <property name="javaFieldType" value="java.sql.Date"/>
+                                <property name="javaFieldName" value="datecol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.TIME"/>
+                                </property>
+                                <property name="databaseFieldName" value="TIMECOL"/>
+                                <property name="javaFieldType" value="java.sql.Time"/>
+                                <property name="javaFieldName" value="timecol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.TIMESTAMP"/>
+                                </property>
+                                <property name="databaseFieldName" value="TSCOL"/>
+                                <property name="javaFieldType" value="java.sql.Timestamp"/>
+                                <property name="javaFieldName" value="tscol"/>
+                            </bean>
+                            <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                <property name="databaseFieldType">
+                                    <util:constant static-field="java.sql.Types.VARBINARY"/>
+                                </property>
+                                <property name="databaseFieldName" value="ARRCOL"/>
+                                <property name="javaFieldType" value="java.lang.Object"/>
+                                <property name="javaFieldName" value="arrcol"/>
+                            </bean>
+                        </list>
                     </property>
-                    <property name="javaName" value="doublecol"/>
-                    <property name="javaType" value="java.lang.Double"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="DOUBLECOL2"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.DOUBLE"/>
-                    </property>
-                    <property name="javaName" value="doublecol2"/>
-                    <property name="javaType" value="java.lang.Double"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="BIGDECIMALCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.DECIMAL"/>
-                    </property>
-                    <property name="javaName" value="bigdecimalcol"/>
-                    <property name="javaType" value="java.math.BigDecimal"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="STRCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.VARCHAR"/>
-                    </property>
-                    <property name="javaName" value="strcol"/>
-                    <property name="javaType" value="java.lang.String"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="DATECOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.DATE"/>
-                    </property>
-                    <property name="javaName" value="datecol"/>
-                    <property name="javaType" value="java.sql.Date"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="TIMECOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.TIME"/>
-                    </property>
-                    <property name="javaName" value="timecol"/>
-                    <property name="javaType" value="java.sql.Time"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="TSCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.TIMESTAMP"/>
-                    </property>
-                    <property name="javaName" value="tscol"/>
-                    <property name="javaType" value="java.sql.Timestamp"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="ARRCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.VARBINARY"/>
-                    </property>
-                    <property name="javaName" value="arrcol"/>
-                    <property name="javaType" value="java.lang.Object"/>
                 </bean>
             </list>
         </property>
-        <property name="queryFields">
-            <map>
+    </bean>
+    <bean class="org.apache.ignite.cache.QueryEntity">
+        <property name="keyType" value="org.apache.ignite.schema.test.model.ObjectsKey"/>
+        <property name="valueType" value="org.apache.ignite.schema.test.model.Objects"/>
+        <property name="fields">
+            <util:map map-class="java.util.LinkedHashMap">
                 <entry key="pk" value="int"/>
                 <entry key="boolcol" value="java.lang.Boolean"/>
                 <entry key="bytecol" value="java.lang.Byte"/>
@@ -185,157 +337,29 @@
                 <entry key="timecol" value="java.sql.Time"/>
                 <entry key="tscol" value="java.sql.Timestamp"/>
                 <entry key="arrcol" value="java.lang.Object"/>
-            </map>
-        </property>
-        <property name="ascendingFields">
-            <map>
-                <entry key="pk" value="int"/>
-            </map>
-        </property>
-    </bean>
-    <bean class="org.apache.ignite.cache.CacheTypeMetadata">
-        <property name="databaseSchema" value="PUBLIC"/>
-        <property name="databaseTable" value="PRIMITIVES"/>
-        <property name="keyType" value="org.apache.ignite.schema.test.model.PrimitivesKey"/>
-        <property name="valueType" value="org.apache.ignite.schema.test.model.Primitives"/>
-        <property name="keyFields">
-            <list>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="PK"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.INTEGER"/>
-                    </property>
-                    <property name="javaName" value="pk"/>
-                    <property name="javaType" value="int"/>
-                </bean>
-            </list>
+            </util:map>
         </property>
-        <property name="valueFields">
+        <property name="indexes">
             <list>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="PK"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.INTEGER"/>
-                    </property>
-                    <property name="javaName" value="pk"/>
-                    <property name="javaType" value="int"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="BOOLCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.BOOLEAN"/>
-                    </property>
-                    <property name="javaName" value="boolcol"/>
-                    <property name="javaType" value="boolean"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="BYTECOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.TINYINT"/>
-                    </property>
-                    <property name="javaName" value="bytecol"/>
-                    <property name="javaType" value="byte"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="SHORTCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.SMALLINT"/>
-                    </property>
-                    <property name="javaName" value="shortcol"/>
-                    <property name="javaType" value="short"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="INTCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.INTEGER"/>
-                    </property>
-                    <property name="javaName" value="intcol"/>
-                    <property name="javaType" value="int"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="LONGCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.BIGINT"/>
-                    </property>
-                    <property name="javaName" value="longcol"/>
-                    <property name="javaType" value="long"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="FLOATCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.REAL"/>
-                    </property>
-                    <property name="javaName" value="floatcol"/>
-                    <property name="javaType" value="float"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="DOUBLECOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.DOUBLE"/>
-                    </property>
-                    <property name="javaName" value="doublecol"/>
-                    <property name="javaType" value="double"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="DOUBLECOL2"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.DOUBLE"/>
-                    </property>
-                    <property name="javaName" value="doublecol2"/>
-                    <property name="javaType" value="double"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="BIGDECIMALCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.DECIMAL"/>
+                <bean class="org.apache.ignite.cache.QueryIndex">
+                    <property name="name" value="PRIMARY_KEY_C"/>
+                    <property name="indexType">
+                        <util:constant static-field="org.apache.ignite.cache.QueryIndexType.SORTED"/>
                     </property>
-                    <property name="javaName" value="bigdecimalcol"/>
-                    <property name="javaType" value="java.math.BigDecimal"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="STRCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.VARCHAR"/>
-                    </property>
-                    <property name="javaName" value="strcol"/>
-                    <property name="javaType" value="java.lang.String"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="DATECOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.DATE"/>
+                    <property name="fields">
+                        <map>
+                            <entry key="PK" value="true"/>
+                        </map>
                     </property>
-                    <property name="javaName" value="datecol"/>
-                    <property name="javaType" value="java.sql.Date"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="TIMECOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.TIME"/>
-                    </property>
-                    <property name="javaName" value="timecol"/>
-                    <property name="javaType" value="java.sql.Time"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="TSCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.TIMESTAMP"/>
-                    </property>
-                    <property name="javaName" value="tscol"/>
-                    <property name="javaType" value="java.sql.Timestamp"/>
-                </bean>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="ARRCOL"/>
-                    <property name="databaseType">
-                        <util:constant static-field="java.sql.Types.VARBINARY"/>
-                    </property>
-                    <property name="javaName" value="arrcol"/>
-                    <property name="javaType" value="java.lang.Object"/>
                 </bean>
             </list>
         </property>
-        <property name="queryFields">
-            <map>
+    </bean>
+    <bean class="org.apache.ignite.cache.QueryEntity">
+        <property name="keyType" value="org.apache.ignite.schema.test.model.PrimitivesKey"/>
+        <property name="valueType" value="org.apache.ignite.schema.test.model.Primitives"/>
+        <property name="fields">
+            <util:map map-class="java.util.LinkedHashMap">
                 <entry key="pk" value="int"/>
                 <entry key="boolcol" value="boolean"/>
                 <entry key="bytecol" value="byte"/>
@@ -351,12 +375,22 @@
                 <entry key="timecol" value="java.sql.Time"/>
                 <entry key="tscol" value="java.sql.Timestamp"/>
                 <entry key="arrcol" value="java.lang.Object"/>
-            </map>
+            </util:map>
         </property>
-        <property name="ascendingFields">
-            <map>
-                <entry key="pk" value="int"/>
-            </map>
+        <property name="indexes">
+            <list>
+                <bean class="org.apache.ignite.cache.QueryIndex">
+                    <property name="name" value="PRIMARY_KEY_D"/>
+                    <property name="indexType">
+                        <util:constant static-field="org.apache.ignite.cache.QueryIndexType.SORTED"/>
+                    </property>
+                    <property name="fields">
+                        <map>
+                            <entry key="PK" value="true"/>
+                        </map>
+                    </property>
+                </bean>
+            </list>
         </property>
     </bean>
 </beans>

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/yardstick/config/ignite-store-config.xml
----------------------------------------------------------------------
diff --git a/modules/yardstick/config/ignite-store-config.xml b/modules/yardstick/config/ignite-store-config.xml
index 35b8e19..031de91 100644
--- a/modules/yardstick/config/ignite-store-config.xml
+++ b/modules/yardstick/config/ignite-store-config.xml
@@ -35,31 +35,31 @@
         <property name="user" value="sa"/>
     </bean>
 
-    <bean id="sampleTypeMetadata" class="org.apache.ignite.cache.CacheTypeMetadata">
+    <bean id="sampleTypeMetadata" class="org.apache.ignite.cache.store.jdbc.JdbcType">
         <property name="databaseTable" value="SAMPLE"/>
         <property name="keyType" value="org.apache.ignite.yardstick.cache.model.SampleKey"/>
-        <property name="valueType" value="org.apache.ignite.yardstick.cache.model.SampleValue"/>
         <property name="keyFields">
             <list>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="ID"/>
-                    <property name="databaseType">
+                <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                    <property name="databaseFieldType">
                         <util:constant static-field="java.sql.Types.INTEGER"/>
                     </property>
-                    <property name="javaName" value="id"/>
-                    <property name="javaType" value="int"/>
+                    <property name="databaseFieldName" value="ID"/>
+                    <property name="javaFieldType" value="int"/>
+                    <property name="javaFieldName" value="id"/>
                 </bean>
             </list>
         </property>
+        <property name="valueType" value="org.apache.ignite.yardstick.cache.model.SampleValue"/>
         <property name="valueFields">
             <list>
-                <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                    <property name="databaseName" value="VALUE"/>
-                    <property name="databaseType">
+                <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                    <property name="databaseFieldType">
                         <util:constant static-field="java.sql.Types.INTEGER"/>
                     </property>
-                    <property name="javaName" value="id"/>
-                    <property name="javaType" value="int"/>
+                    <property name="databaseFieldName" value="VALUE"/>
+                    <property name="javaFieldType" value="int"/>
+                    <property name="javaFieldName" value="id"/>
                 </bean>
             </list>
         </property>
@@ -79,18 +79,19 @@
 
                     <property name="swapEnabled" value="false"/>
 
-                    <property name="typeMetadata">
-                        <list>
-                            <ref bean="sampleTypeMetadata"/>
-                        </list>
-                    </property>
-
                     <property name="cacheStoreFactory">
                         <bean class="org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactory">
                             <property name="dataSourceBean" value="storeDataSource"/>
                             <property name="dialect">
                                 <bean class="org.apache.ignite.cache.store.jdbc.dialect.H2Dialect"/>
                             </property>
+                            <property name="types">
+                                <list>
+                                    <bean parent="sampleTypeMetadata">
+                                        <property name="cacheName" value="atomic"/>
+                                    </bean>
+                                </list>
+                            </property>
                         </bean>
                     </property>
                 </bean>
@@ -104,18 +105,19 @@
 
                     <property name="swapEnabled" value="false"/>
 
-                    <property name="typeMetadata">
-                        <list>
-                            <ref bean="sampleTypeMetadata"/>
-                        </list>
-                    </property>
-
                     <property name="cacheStoreFactory">
                         <bean class="org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactory">
                             <property name="dataSourceBean" value="storeDataSource"/>
                             <property name="dialect">
                                 <bean class="org.apache.ignite.cache.store.jdbc.dialect.H2Dialect"/>
                             </property>
+                            <property name="types">
+                                <list>
+                                    <bean parent="sampleTypeMetadata">
+                                        <property name="cacheName" value="tx"/>
+                                    </bean>
+                                </list>
+                            </property>
                         </bean>
                     </property>
                 </bean>


[07/25] ignite git commit: ignite-yardstick: added debug info into IgniteTransactionalWriteInvokeBenchmark

Posted by ag...@apache.org.
ignite-yardstick: added debug info into IgniteTransactionalWriteInvokeBenchmark


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/fa7a4bce
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/fa7a4bce
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/fa7a4bce

Branch: refs/heads/ignite-1282
Commit: fa7a4bce65421da4f86c72e78850d987278f8e8f
Parents: d71f612
Author: ashutak <as...@gridgain.com>
Authored: Mon Nov 23 16:26:29 2015 +0300
Committer: ashutak <as...@gridgain.com>
Committed: Mon Nov 23 16:26:29 2015 +0300

----------------------------------------------------------------------
 .../IgniteAtomicInvokeRetryBenchmark.java       |   2 +-
 .../failover/IgniteConsistencyException.java    |  64 +++++++++
 ...IgniteTransactionalInvokeRetryBenchmark.java |   2 +-
 ...IgniteTransactionalWriteInvokeBenchmark.java | 135 ++++++++++++++-----
 .../IgniteTransactionalWriteReadBenchmark.java  |   2 +-
 5 files changed, 171 insertions(+), 34 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/fa7a4bce/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteAtomicInvokeRetryBenchmark.java
----------------------------------------------------------------------
diff --git a/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteAtomicInvokeRetryBenchmark.java b/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteAtomicInvokeRetryBenchmark.java
index c0567ef..6e65746 100644
--- a/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteAtomicInvokeRetryBenchmark.java
+++ b/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteAtomicInvokeRetryBenchmark.java
@@ -110,7 +110,7 @@ public class IgniteAtomicInvokeRetryBenchmark extends IgniteFailoverAbstractBenc
 
                                 }
 
-                                throw new IllegalStateException("Cache and local map are in inconsistent state " +
+                                throw new IgniteConsistencyException("Cache and local map are in inconsistent state " +
                                     "[badKeys=" + badCacheEntries.keySet() + ']');
                             }
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/fa7a4bce/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteConsistencyException.java
----------------------------------------------------------------------
diff --git a/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteConsistencyException.java b/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteConsistencyException.java
new file mode 100644
index 0000000..e1fcbfa
--- /dev/null
+++ b/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteConsistencyException.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.yardstick.cache.failover;
+
+import org.jetbrains.annotations.Nullable;
+
+/**
+ * Ignite consistency exception.
+ */
+public class IgniteConsistencyException extends RuntimeException {
+    /** */
+    private static final long serialVersionUID = 0;
+
+    /**
+     * Create empty exception.
+     */
+    public IgniteConsistencyException() {
+        // No-op.
+    }
+
+    /**
+     * Creates new exception with given error message.
+     *
+     * @param msg Error message.
+     */
+    public IgniteConsistencyException(String msg) {
+        super(msg);
+    }
+
+    /**
+     * Creates new grid exception with given throwable as a cause and
+     * source of error message.
+     *
+     * @param cause Non-null throwable cause.
+     */
+    public IgniteConsistencyException(Throwable cause) {
+        this(cause.getMessage(), cause);
+    }
+
+    /**
+     * Creates new exception with given error message and optional nested exception.
+     *
+     * @param msg Error message.
+     * @param cause Optional nested exception (can be {@code null}).
+     */
+    public IgniteConsistencyException(String msg, @Nullable Throwable cause) {
+        super(msg, cause);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/fa7a4bce/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalInvokeRetryBenchmark.java
----------------------------------------------------------------------
diff --git a/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalInvokeRetryBenchmark.java b/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalInvokeRetryBenchmark.java
index f8a1689..16b0959 100644
--- a/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalInvokeRetryBenchmark.java
+++ b/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalInvokeRetryBenchmark.java
@@ -122,7 +122,7 @@ public class IgniteTransactionalInvokeRetryBenchmark extends IgniteFailoverAbstr
                                     }
                                 }
 
-                                throw new IllegalStateException("Cache and local map are in inconsistent state.");
+                                throw new IgniteConsistencyException("Cache and local map are in inconsistent state.");
                             }
 
                             println("Cache validation successfully finished in "

http://git-wip-us.apache.org/repos/asf/ignite/blob/fa7a4bce/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalWriteInvokeBenchmark.java
----------------------------------------------------------------------
diff --git a/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalWriteInvokeBenchmark.java b/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalWriteInvokeBenchmark.java
index 2c4050b..a52ea78 100644
--- a/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalWriteInvokeBenchmark.java
+++ b/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalWriteInvokeBenchmark.java
@@ -17,14 +17,25 @@
 
 package org.apache.ignite.yardstick.cache.failover;
 
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.Callable;
 import javax.cache.processor.EntryProcessorException;
 import javax.cache.processor.MutableEntry;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteDataStreamer;
 import org.apache.ignite.cache.CacheEntryProcessor;
+import org.apache.ignite.cache.affinity.Affinity;
+import org.apache.ignite.cluster.ClusterNode;
+import org.apache.ignite.internal.IgniteKernal;
+import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtLocalPartition;
+import org.apache.ignite.internal.util.typedef.F;
+import org.yardstickframework.BenchmarkConfiguration;
 
 import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC;
 import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ;
@@ -44,12 +55,40 @@ import static org.yardstickframework.BenchmarkUtils.println;
  * </ul>
  */
 public class IgniteTransactionalWriteInvokeBenchmark extends IgniteFailoverAbstractBenchmark<String, Long> {
+    /** */
+    private static final Long INITIAL_VALUE = 1L;
+
     /** {@inheritDoc} */
-    @Override public boolean test(Map<Object, Object> ctx) throws Exception {
-        final int k = nextRandom(args.range());
+    @Override public void setUp(BenchmarkConfiguration cfg) throws Exception {
+        super.setUp(cfg);
 
         assert args.keysCount() > 0 : "Count of keys: " + args.keysCount();
 
+        println(cfg, "Populating data...");
+
+        long start = System.nanoTime();
+
+        if (cfg.memberId() == 0) {
+            try (IgniteDataStreamer<String, Long> dataLdr = ignite().dataStreamer(cacheName())) {
+                for (int k = 0; k < args.range() && !Thread.currentThread().isInterrupted(); k++) {
+                    dataLdr.addData("key-" + k + "-master", INITIAL_VALUE);
+
+                    for (int i = 0; i < args.keysCount(); i++)
+                        dataLdr.addData("key-" + k + "-" + i, INITIAL_VALUE);
+
+                    if (k % 100000 == 0)
+                        println(cfg, "Populated accounts: " + k);
+                }
+            }
+        }
+
+        println(cfg, "Finished populating data in " + ((System.nanoTime() - start) / 1_000_000) + " ms.");
+    }
+
+    /** {@inheritDoc} */
+    @Override public boolean test(Map<Object, Object> ctx) throws Exception {
+        final int k = nextRandom(args.range());
+
         final String[] keys = new String[args.keysCount()];
 
         final String masterKey = "key-" + k + "-master";
@@ -59,8 +98,10 @@ public class IgniteTransactionalWriteInvokeBenchmark extends IgniteFailoverAbstr
 
         final int scenario = nextRandom(2);
 
-        return doInTransaction(ignite().transactions(), PESSIMISTIC, REPEATABLE_READ, new Callable<Boolean>() {
-            @Override public Boolean call() throws Exception {
+        final Set<String> badKeys = new LinkedHashSet<>();
+
+        doInTransaction(ignite().transactions(), PESSIMISTIC, REPEATABLE_READ, new Callable<Void>() {
+            @Override public Void call() throws Exception {
                 final int timeout = args.cacheOperationTimeoutMillis();
 
                 switch (scenario) {
@@ -81,46 +122,40 @@ public class IgniteTransactionalWriteInvokeBenchmark extends IgniteFailoverAbstr
 
                         Set<Long> values = new HashSet<>(map.values());
 
-                        if (values.size() != 1) {
-                            // Print all usefull information and finish.
-                            println(cfg, "Got different values for keys [map=" + map + "]");
-
-                            println(cfg, "Cache content:");
-
-                            for (int k = 0; k < args.range(); k++) {
-                                for (int i = 0; i < args.keysCount(); i++) {
-                                    String key = "key-" + k + "-" + i;
-
-                                    asyncCache.get(key);
-                                    Long val = asyncCache.<Long>future().get(timeout);
-
-                                    if (val != null)
-                                        println(cfg, "Entry [key=" + key + ", val=" + val + "]");
-                                }
-                            }
-
-                            throw new IllegalStateException("Found different values for keys (see above information).");
-                        }
+                        if (values.size() != 1)
+                            throw new IgniteConsistencyException("Found different values for keys [map="+map+"]");
 
                         break;
                     case 1: // Invoke scenario.
                         asyncCache.get(masterKey);
                         Long val = asyncCache.<Long>future().get(timeout);
 
-                        asyncCache.put(masterKey, val == null ? 0 : val + 1);
+                        if (val == null)
+                            badKeys.add(masterKey);
+
+                        asyncCache.put(masterKey, val == null ? -1 : val + 1);
                         asyncCache.future().get(timeout);
 
                         for (String key : keys) {
-                            asyncCache.invoke(key, new IncrementCacheEntryProcessor());
-                            asyncCache.future().get(timeout);
+                            asyncCache.invoke(key, new IncrementCacheEntryProcessor(), cacheName());
+                            Object o = asyncCache.future().get(timeout);
+
+                            if (o != null)
+                                badKeys.add(key);
                         }
 
                         break;
                 }
 
-                return true;
+                return null;
             }
         });
+
+        if (!F.isEmpty(badKeys))
+            throw new IgniteConsistencyException("Found unexpected null-value(s) for the following " +
+                "key(s) (look for debug information on server nodes): " + badKeys);
+
+        return true;
     }
 
     /** {@inheritDoc} */
@@ -130,14 +165,52 @@ public class IgniteTransactionalWriteInvokeBenchmark extends IgniteFailoverAbstr
 
     /**
      */
-    private static class IncrementCacheEntryProcessor implements CacheEntryProcessor<String, Long, Void> {
+    private static class IncrementCacheEntryProcessor implements CacheEntryProcessor<String, Long, Object> {
         /** */
         private static final long serialVersionUID = 0;
 
         /** {@inheritDoc} */
-        @Override public Void process(MutableEntry<String, Long> entry,
+        @Override public Object process(MutableEntry<String, Long> entry,
             Object... arguments) throws EntryProcessorException {
-            entry.setValue(entry.getValue() == null ? 0 : entry.getValue() + 1);
+            if (entry.getValue() == null) {
+                String cacheName = (String)arguments[0];
+
+                IgniteKernal kernal = (IgniteKernal)entry.unwrap(Ignite.class);
+
+                Affinity<String> aff = kernal.affinity(cacheName);
+
+                final int partIdx = aff.partition(entry.getKey());
+
+                final Collection<ClusterNode> nodes = aff.mapKeyToPrimaryAndBackups(entry.getKey());
+
+                List<GridDhtLocalPartition> locPartitions = kernal.cachex(cacheName).context().topology().
+                    localPartitions();
+
+                GridDhtLocalPartition part = null;
+
+                for (GridDhtLocalPartition p : locPartitions) {
+                    if (p.id() == partIdx) {
+                        part = p;
+
+                        break;
+                    }
+                }
+
+                kernal.log().warning("Found unexpected null-value, debug info:"
+                        + "\n    entry=" + entry
+                        + "\n    key=" + entry.getKey()
+                        + "\n    locNodeId=" + kernal.cluster().localNode().id()
+                        + "\n    primaryAndBackupsNodes=" + nodes
+                        + "\n    part=" + part
+                        + "\n    partIdx=" + partIdx
+                        + "\n    locParts=" + locPartitions
+                        + "\n    allPartMap=" + kernal.cachex(cacheName).context().topology().partitionMap(true)
+                );
+
+                return new Object(); // non-null value.
+            }
+
+            entry.setValue(entry.getValue() + 1);
 
             return null;
         }

http://git-wip-us.apache.org/repos/asf/ignite/blob/fa7a4bce/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalWriteReadBenchmark.java
----------------------------------------------------------------------
diff --git a/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalWriteReadBenchmark.java b/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalWriteReadBenchmark.java
index f278e8a..c4314ed 100644
--- a/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalWriteReadBenchmark.java
+++ b/modules/yardstick/src/main/java/org/apache/ignite/yardstick/cache/failover/IgniteTransactionalWriteReadBenchmark.java
@@ -80,7 +80,7 @@ public class IgniteTransactionalWriteReadBenchmark extends IgniteFailoverAbstrac
                         }
                     }
 
-                    throw new IllegalStateException("Found different values for keys (see above information).");
+                    throw new IgniteConsistencyException("Found different values for keys (see above information).");
                 }
 
                 final Long oldVal = map.get(keys[0]);


[15/25] ignite git commit: Merge branch 'ignite-1.5' of https://git-wip-us.apache.org/repos/asf/ignite into ignite-1282

Posted by ag...@apache.org.
Merge branch 'ignite-1.5' of https://git-wip-us.apache.org/repos/asf/ignite into ignite-1282


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/a1294394
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/a1294394
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/a1294394

Branch: refs/heads/ignite-1282
Commit: a129439413b8ad440c70d339b5bc1c4b250d1232
Parents: 01c24e7 e4109f9
Author: Alexey Goncharuk <al...@gmail.com>
Authored: Tue Nov 24 09:52:05 2015 +0300
Committer: Alexey Goncharuk <al...@gmail.com>
Committed: Tue Nov 24 09:52:05 2015 +0300

----------------------------------------------------------------------
 examples/schema-import/bin/db-init.sql          |    3 +-
 .../org/apache/ignite/schema/CacheConfig.java   |    7 +-
 .../java/org/apache/ignite/schema/Demo.java     |   20 +-
 .../org/apache/ignite/cache/QueryIndex.java     |   53 +-
 .../store/jdbc/CacheAbstractJdbcStore.java      |  638 +++++++----
 .../store/jdbc/CacheJdbcBlobStoreFactory.java   |   14 +-
 .../cache/store/jdbc/CacheJdbcPojoStore.java    |  444 +++++---
 .../store/jdbc/CacheJdbcPojoStoreFactory.java   |  277 ++++-
 .../ignite/cache/store/jdbc/JdbcType.java       |  255 +++++
 .../cache/store/jdbc/JdbcTypeDefaultHasher.java |   43 +
 .../ignite/cache/store/jdbc/JdbcTypeField.java  |  172 +++
 .../ignite/cache/store/jdbc/JdbcTypeHasher.java |   34 +
 .../internal/GridEventConsumeHandler.java       |    6 +-
 .../internal/GridMessageListenHandler.java      |    6 +-
 .../internal/portable/BinaryObjectImpl.java     |   32 +-
 .../portable/BinaryObjectOffheapImpl.java       |   27 +-
 .../internal/portable/BinaryReaderExImpl.java   | 1015 ++----------------
 .../portable/BinaryReaderHandlesHolder.java     |   46 +
 .../portable/BinaryReaderHandlesHolderImpl.java |   44 +
 .../portable/GridPortableMarshaller.java        |   23 +-
 .../portable/PortableClassDescriptor.java       |    8 +-
 .../ignite/internal/portable/PortableUtils.java |  861 ++++++++++++++-
 .../portable/builder/PortableBuilderReader.java |   21 +-
 .../processors/cache/GridCacheMapEntry.java     |    3 +-
 .../continuous/CacheContinuousQueryHandler.java |    6 +-
 .../continuous/GridContinuousHandler.java       |    6 +-
 .../continuous/GridContinuousProcessor.java     |   50 +-
 .../platform/PlatformContextImpl.java           |    4 +-
 .../processors/query/GridQueryProcessor.java    |    6 +-
 .../ignite/internal/visor/cache/VisorCache.java |    4 +-
 .../CacheJdbcPojoStoreAbstractSelfTest.java     |  395 +++++++
 ...dbcPojoStoreOptimizedMarshallerSelfTest.java |   31 +
 ...JdbcPojoStorePortableMarshallerSelfTest.java |   85 ++
 .../store/jdbc/CacheJdbcPojoStoreTest.java      |  200 ++--
 ...eJdbcStoreAbstractMultithreadedSelfTest.java |    2 +-
 ...chePartitionedAtomicSetFailoverSelfTest.java |    5 +
 ...ContinuousQueryFailoverAbstractSelfTest.java |    2 +-
 .../continuous/GridEventConsumeSelfTest.java    |    2 +-
 .../platform/PlatformComputeEchoTask.java       |   18 +
 .../ignite/testsuites/IgniteCacheTestSuite.java |    6 +-
 .../Binary/BinarySelfTest.cs                    |   35 +
 .../Compute/ComputeApiTest.cs                   |   51 +-
 .../Impl/Binary/BinarySystemHandlers.cs         |    2 +-
 .../Impl/Binary/BinaryUtils.cs                  |   36 +-
 .../Impl/Binary/BinaryWriter.cs                 |   32 +-
 modules/schema-import/README.txt                |  176 +--
 .../ignite/schema/generator/CodeGenerator.java  |  281 ++---
 .../ignite/schema/generator/XmlGenerator.java   |  101 +-
 .../apache/ignite/schema/model/IndexItem.java   |   54 -
 .../ignite/schema/model/PojoDescriptor.java     |   72 +-
 .../ignite/schema/model/SchemaDescriptor.java   |    6 +-
 .../schema/parser/DatabaseMetadataParser.java   |   12 +-
 .../apache/ignite/schema/parser/DbTable.java    |   37 +-
 .../parser/dialect/DatabaseMetadataDialect.java |   32 +-
 .../parser/dialect/JdbcMetadataDialect.java     |   22 +-
 .../parser/dialect/OracleMetadataDialect.java   |   24 +-
 .../apache/ignite/schema/ui/ModalDialog.java    |    6 +-
 .../ignite/schema/ui/SchemaImportApp.java       |   13 +-
 .../schema/test/AbstractSchemaImportTest.java   |    4 +-
 .../schema/test/model/ignite-type-metadata.xml  |  610 ++++++-----
 .../yardstick/config/ignite-store-config.xml    |   50 +-
 .../IgniteAtomicInvokeRetryBenchmark.java       |    2 +-
 .../failover/IgniteConsistencyException.java    |   64 ++
 ...IgniteTransactionalInvokeRetryBenchmark.java |    2 +-
 ...IgniteTransactionalWriteInvokeBenchmark.java |  135 ++-
 .../IgniteTransactionalWriteReadBenchmark.java  |    2 +-
 66 files changed, 4336 insertions(+), 2399 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/a1294394/modules/core/src/main/java/org/apache/ignite/internal/GridEventConsumeHandler.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ignite/blob/a1294394/modules/core/src/main/java/org/apache/ignite/internal/GridMessageListenHandler.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ignite/blob/a1294394/modules/core/src/main/java/org/apache/ignite/internal/portable/PortableClassDescriptor.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ignite/blob/a1294394/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/continuous/CacheContinuousQueryHandler.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ignite/blob/a1294394/modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousHandler.java
----------------------------------------------------------------------
diff --cc modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousHandler.java
index 68b83ea,3d6e266..900835a
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousHandler.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousHandler.java
@@@ -140,14 -140,9 +140,14 @@@ public interface GridContinuousHandler 
      /**
       * @return {@code True} if for continuous queries.
       */
-     public boolean isForQuery();
+     public boolean isQuery();
  
      /**
 +     * @return {@code True} if Ignite Binary objects should be passed to the listener and filter.
 +     */
 +    public boolean keepBinary();
 +
 +    /**
       * @return Cache name if this is a continuous query handler.
       */
      public String cacheName();

http://git-wip-us.apache.org/repos/asf/ignite/blob/a1294394/modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousProcessor.java
----------------------------------------------------------------------


[14/25] ignite git commit: Merge remote-tracking branch 'origin/ignite-1.5' into ignite-1.5

Posted by ag...@apache.org.
Merge remote-tracking branch 'origin/ignite-1.5' into ignite-1.5


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/e4109f98
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/e4109f98
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/e4109f98

Branch: refs/heads/ignite-1282
Commit: e4109f9800baa5b0783755cf537c46000ffd4305
Parents: fe53753 aee7b75
Author: AKuznetsov <ak...@gridgain.com>
Authored: Mon Nov 23 22:08:35 2015 +0700
Committer: AKuznetsov <ak...@gridgain.com>
Committed: Mon Nov 23 22:08:35 2015 +0700

----------------------------------------------------------------------
 .../internal/GridEventConsumeHandler.java       |    6 +-
 .../internal/GridMessageListenHandler.java      |    6 +-
 .../internal/portable/BinaryObjectImpl.java     |   32 +-
 .../portable/BinaryObjectOffheapImpl.java       |   27 +-
 .../internal/portable/BinaryReaderExImpl.java   | 1015 ++----------------
 .../portable/BinaryReaderHandlesHolder.java     |   46 +
 .../portable/BinaryReaderHandlesHolderImpl.java |   44 +
 .../portable/GridPortableMarshaller.java        |   23 +-
 .../portable/PortableClassDescriptor.java       |    8 +-
 .../ignite/internal/portable/PortableUtils.java |  861 ++++++++++++++-
 .../portable/builder/PortableBuilderReader.java |   21 +-
 .../processors/cache/GridCacheMapEntry.java     |    3 +-
 .../continuous/CacheContinuousQueryHandler.java |    7 +-
 .../continuous/GridContinuousHandler.java       |    6 +-
 .../continuous/GridContinuousProcessor.java     |   50 +-
 .../platform/PlatformContextImpl.java           |    4 +-
 ...chePartitionedAtomicSetFailoverSelfTest.java |    5 +
 ...ContinuousQueryFailoverAbstractSelfTest.java |    2 +-
 .../continuous/GridEventConsumeSelfTest.java    |    2 +-
 .../IgniteAtomicInvokeRetryBenchmark.java       |    2 +-
 .../failover/IgniteConsistencyException.java    |   64 ++
 ...IgniteTransactionalInvokeRetryBenchmark.java |    2 +-
 ...IgniteTransactionalWriteInvokeBenchmark.java |  135 ++-
 .../IgniteTransactionalWriteReadBenchmark.java  |    2 +-
 24 files changed, 1312 insertions(+), 1061 deletions(-)
----------------------------------------------------------------------



[24/25] ignite git commit: ignite-1282 - Debugging failover.

Posted by ag...@apache.org.
ignite-1282 - Debugging failover.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/4844b3ed
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/4844b3ed
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/4844b3ed

Branch: refs/heads/ignite-1282
Commit: 4844b3edafe2ec3d5eaf93ce56a87c45e2b8e950
Parents: 6dc6ffe
Author: Alexey Goncharuk <al...@gmail.com>
Authored: Tue Nov 24 14:41:08 2015 +0300
Committer: Alexey Goncharuk <al...@gmail.com>
Committed: Tue Nov 24 14:41:08 2015 +0300

----------------------------------------------------------------------
 .../ignite/internal/processors/cache/GridCacheIoManager.java      | 2 ++
 .../processors/cache/distributed/dht/GridDhtTxFinishFuture.java   | 1 +
 .../cache/distributed/dht/GridPartitionedSingleGetFuture.java     | 3 ++-
 .../distributed/dht/IgniteCachePutRetryAbstractSelfTest.java      | 2 ++
 4 files changed, 7 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/4844b3ed/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheIoManager.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheIoManager.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheIoManager.java
index 9afbca8..7b1d749 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheIoManager.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheIoManager.java
@@ -145,6 +145,8 @@ public class GridCacheIoManager extends GridCacheSharedManagerAdapter {
             }
 
             if (fut != null && !fut.isDone()) {
+                U.debug(log, "<> Will wait for affinity ready future [fut=" + fut + ", msg=" + msg + ']');
+
                 fut.listen(new CI1<IgniteInternalFuture<?>>() {
                     @Override public void apply(IgniteInternalFuture<?> t) {
                         cctx.kernalContext().closure().runLocalSafe(new Runnable() {

http://git-wip-us.apache.org/repos/asf/ignite/blob/4844b3ed/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtTxFinishFuture.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtTxFinishFuture.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtTxFinishFuture.java
index e8ef5d4..c4a90b1 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtTxFinishFuture.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtTxFinishFuture.java
@@ -474,6 +474,7 @@ public final class GridDhtTxFinishFuture<K, V> extends GridCompoundIdentityFutur
         });
 
         return S.toString(GridDhtTxFinishFuture.class, this,
+            "xidVer", tx.xidVersion(),
             "innerFuts", futs,
             "super", super.toString());
     }

http://git-wip-us.apache.org/repos/asf/ignite/blob/4844b3ed/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridPartitionedSingleGetFuture.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridPartitionedSingleGetFuture.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridPartitionedSingleGetFuture.java
index f276cac..a5e5d53 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridPartitionedSingleGetFuture.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridPartitionedSingleGetFuture.java
@@ -323,7 +323,7 @@ public class GridPartitionedSingleGetFuture extends GridFutureAdapter<Object> im
             GridDhtCacheAdapter colocated = cctx.dht();
 
             while (true) {
-                GridCacheEntryEx entry;
+                GridCacheEntryEx entry = null;
 
                 try {
                     entry = colocated.context().isSwapOrOffheapEnabled() ? colocated.entryEx(key) :
@@ -401,6 +401,7 @@ public class GridPartitionedSingleGetFuture extends GridFutureAdapter<Object> im
                     return null;
                 }
                 catch (GridCacheEntryRemovedException ignored) {
+                    U.debug(log, ">>>>>>>>> " + entry);
                     // No-op, will retry.
                 }
             }

http://git-wip-us.apache.org/repos/asf/ignite/blob/4844b3ed/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/dht/IgniteCachePutRetryAbstractSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/dht/IgniteCachePutRetryAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/dht/IgniteCachePutRetryAbstractSelfTest.java
index ee28cf9..ce09a64 100644
--- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/dht/IgniteCachePutRetryAbstractSelfTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/dht/IgniteCachePutRetryAbstractSelfTest.java
@@ -122,6 +122,8 @@ public abstract class IgniteCachePutRetryAbstractSelfTest extends GridCommonAbst
 
         cfg.setSwapSpaceSpi(new GridTestSwapSpaceSpi());
 
+        cfg.setIncludeEventTypes(new int[0]);
+
         return cfg;
     }
 


[19/25] ignite git commit: IGNITE-1983: .NET: Fixed continuous query tests.

Posted by ag...@apache.org.
IGNITE-1983: .NET: Fixed continuous query tests.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/512fe6b0
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/512fe6b0
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/512fe6b0

Branch: refs/heads/ignite-1282
Commit: 512fe6b076331a3d187e1e0277f6bd565d8350bf
Parents: dcbfbd2
Author: vozerov-gridgain <vo...@gridgain.com>
Authored: Tue Nov 24 11:52:06 2015 +0300
Committer: vozerov-gridgain <vo...@gridgain.com>
Committed: Tue Nov 24 11:52:06 2015 +0300

----------------------------------------------------------------------
 .../Continuous/ContinuousQueryAbstractTest.cs   | 29 ++++++++++++--------
 1 file changed, 18 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/512fe6b0/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Query/Continuous/ContinuousQueryAbstractTest.cs
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Query/Continuous/ContinuousQueryAbstractTest.cs b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Query/Continuous/ContinuousQueryAbstractTest.cs
index 720483a..bdca918 100644
--- a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Query/Continuous/ContinuousQueryAbstractTest.cs
+++ b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Query/Continuous/ContinuousQueryAbstractTest.cs
@@ -308,7 +308,7 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
                 // Put from local node.
                 int key1 = PrimaryKey(cache1);
                 cache1.GetAndPut(key1, Entry(key1));
-                CheckFilterSingle(key1, null, Entry(key1), !loc);
+                CheckFilterSingle(key1, null, Entry(key1));
                 CheckCallbackSingle(key1, null, Entry(key1));
 
                 // Put from remote node.
@@ -322,7 +322,7 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
                 }
                 else
                 {
-                    CheckFilterSingle(key2, null, Entry(key2), true);
+                    CheckFilterSingle(key2, null, Entry(key2));
                     CheckCallbackSingle(key2, null, Entry(key2));
                 }
 
@@ -330,7 +330,7 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
 
                 // Ignored put from local node.
                 cache1.GetAndPut(key1, Entry(key1 + 1));
-                CheckFilterSingle(key1, Entry(key1), Entry(key1 + 1), !loc);
+                CheckFilterSingle(key1, Entry(key1), Entry(key1 + 1));
                 CheckNoCallback(100);
 
                 // Ignored put from remote node.
@@ -339,7 +339,7 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
                 if (loc)
                     CheckNoFilter(100);
                 else
-                    CheckFilterSingle(key2, Entry(key2), Entry(key2 + 1), true);
+                    CheckFilterSingle(key2, Entry(key2), Entry(key2 + 1));
 
                 CheckNoCallback(100);
             }
@@ -621,6 +621,7 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
                     .Deserialize<BinarizableEntry>());
 
                 // 2. Remote put.
+                ClearEvents();
                 cache1.GetAndPut(PrimaryKey(cache2), Entry(2));
 
                 Assert.IsTrue(FILTER_EVTS.TryTake(out filterEvt, 500));
@@ -868,15 +869,10 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
         /// <param name="expKey">Expected key.</param>
         /// <param name="expOldVal">Expected old value.</param>
         /// <param name="expVal">Expected value.</param>
-        /// <param name="hasBackup">Whether there is a backup node to check..</param>
-        private void CheckFilterSingle(int expKey, BinarizableEntry expOldVal, BinarizableEntry expVal, 
-            bool hasBackup = false)
+        private void CheckFilterSingle(int expKey, BinarizableEntry expOldVal, BinarizableEntry expVal)
         {
             CheckFilterSingle(expKey, expOldVal, expVal, 1000);
-
-            // Filter is called on each cache node (primary and backup)
-            if (hasBackup)
-                CheckFilterSingle(expKey, expOldVal, expVal, 1000);
+            ClearEvents();
         }
 
         /// <summary>
@@ -895,6 +891,17 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
             Assert.AreEqual(expKey, evt.entry.Key);
             Assert.AreEqual(expOldVal, evt.entry.OldValue);
             Assert.AreEqual(expVal, evt.entry.Value);
+
+            ClearEvents();
+        }
+
+        /// <summary>
+        /// Clears the events collection.
+        /// </summary>
+        private static void ClearEvents()
+        {
+            while (FILTER_EVTS.Count > 0)
+                FILTER_EVTS.Take();
         }
 
         /// <summary>


[21/25] ignite git commit: IGNITE-1986: C++ examples VS project file fixed.

Posted by ag...@apache.org.
IGNITE-1986: C++ examples VS project file fixed.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/ca0de93a
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/ca0de93a
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/ca0de93a

Branch: refs/heads/ignite-1282
Commit: ca0de93a9ec38615321cc16e01cdcf9f0698aaf0
Parents: 1136933
Author: isapego <is...@gridgain.com>
Authored: Tue Nov 24 12:24:41 2015 +0300
Committer: vozerov-gridgain <vo...@gridgain.com>
Committed: Tue Nov 24 12:24:41 2015 +0300

----------------------------------------------------------------------
 .../platforms/cpp/examples/project/vs/ignite-examples.vcxproj  | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/ca0de93a/modules/platforms/cpp/examples/project/vs/ignite-examples.vcxproj
----------------------------------------------------------------------
diff --git a/modules/platforms/cpp/examples/project/vs/ignite-examples.vcxproj b/modules/platforms/cpp/examples/project/vs/ignite-examples.vcxproj
index 286a4ea..b04bfb9 100644
--- a/modules/platforms/cpp/examples/project/vs/ignite-examples.vcxproj
+++ b/modules/platforms/cpp/examples/project/vs/ignite-examples.vcxproj
@@ -1,5 +1,5 @@
 <?xml version="1.0" encoding="utf-8"?>
-<Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
   <ItemGroup Label="ProjectConfigurations">
     <ProjectConfiguration Include="Release|Win32">
       <Configuration>Release</Configuration>
@@ -19,14 +19,14 @@
   <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
     <ConfigurationType>Application</ConfigurationType>
     <UseDebugLibraries>false</UseDebugLibraries>
-    <PlatformToolset>v120</PlatformToolset>
+    <PlatformToolset>v100</PlatformToolset>
     <WholeProgramOptimization>true</WholeProgramOptimization>
     <CharacterSet>Unicode</CharacterSet>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
     <ConfigurationType>Application</ConfigurationType>
     <UseDebugLibraries>false</UseDebugLibraries>
-    <PlatformToolset>v120</PlatformToolset>
+    <PlatformToolset>v100</PlatformToolset>
     <WholeProgramOptimization>true</WholeProgramOptimization>
     <CharacterSet>Unicode</CharacterSet>
   </PropertyGroup>


[25/25] ignite git commit: Merge branch 'ignite-1282' of https://git-wip-us.apache.org/repos/asf/ignite into ignite-1282

Posted by ag...@apache.org.
Merge branch 'ignite-1282' of https://git-wip-us.apache.org/repos/asf/ignite into ignite-1282


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/76803923
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/76803923
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/76803923

Branch: refs/heads/ignite-1282
Commit: 76803923dda7fb346e273aca5079352532b586dd
Parents: 4844b3e b751843
Author: Alexey Goncharuk <al...@gmail.com>
Authored: Tue Nov 24 14:41:22 2015 +0300
Committer: Alexey Goncharuk <al...@gmail.com>
Committed: Tue Nov 24 14:41:22 2015 +0300

----------------------------------------------------------------------
 .../scalar/examples/ScalarCacheExample.scala     | 19 +++++++++++++++----
 .../test/resources/spring-ping-pong-partner.xml  | 18 ------------------
 2 files changed, 15 insertions(+), 22 deletions(-)
----------------------------------------------------------------------



[10/25] ignite git commit: IGNITE-1972: Optimized "unmarshal" path (i.e. when we do not need to deserialize the object) by a factor of x3.

Posted by ag...@apache.org.
IGNITE-1972: Optimized "unmarshal" path (i.e. when we do not need to deserialize the object) by a factor of x3.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/c6b2fa56
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/c6b2fa56
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/c6b2fa56

Branch: refs/heads/ignite-1282
Commit: c6b2fa56ff232d92a0f6770ef8b66b871be90905
Parents: 19d2dd0
Author: vozerov-gridgain <vo...@gridgain.com>
Authored: Mon Nov 23 16:31:18 2015 +0300
Committer: vozerov-gridgain <vo...@gridgain.com>
Committed: Mon Nov 23 16:31:18 2015 +0300

----------------------------------------------------------------------
 .../internal/portable/BinaryObjectImpl.java     |   32 +-
 .../portable/BinaryObjectOffheapImpl.java       |   27 +-
 .../internal/portable/BinaryReaderExImpl.java   | 1015 ++----------------
 .../portable/BinaryReaderHandlesHolder.java     |   46 +
 .../portable/BinaryReaderHandlesHolderImpl.java |   44 +
 .../portable/GridPortableMarshaller.java        |   23 +-
 .../portable/PortableClassDescriptor.java       |    8 +-
 .../ignite/internal/portable/PortableUtils.java |  861 ++++++++++++++-
 .../portable/builder/PortableBuilderReader.java |   21 +-
 .../platform/PlatformContextImpl.java           |    4 +-
 10 files changed, 1099 insertions(+), 982 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/c6b2fa56/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryObjectImpl.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryObjectImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryObjectImpl.java
index 65272b0..7db4b4a 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryObjectImpl.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryObjectImpl.java
@@ -18,6 +18,9 @@
 package org.apache.ignite.internal.portable;
 
 import org.apache.ignite.IgniteCheckedException;
+import org.apache.ignite.binary.BinaryObject;
+import org.apache.ignite.binary.BinaryObjectException;
+import org.apache.ignite.binary.BinaryType;
 import org.apache.ignite.internal.GridDirectTransient;
 import org.apache.ignite.internal.IgniteCodeGeneratingFail;
 import org.apache.ignite.internal.portable.streams.PortableHeapInputStream;
@@ -26,12 +29,8 @@ import org.apache.ignite.internal.processors.cache.CacheObjectContext;
 import org.apache.ignite.internal.processors.cache.KeyCacheObject;
 import org.apache.ignite.internal.processors.cache.portable.CacheObjectBinaryProcessorImpl;
 import org.apache.ignite.internal.util.typedef.internal.U;
-import org.apache.ignite.plugin.extensions.communication.Message;
 import org.apache.ignite.plugin.extensions.communication.MessageReader;
 import org.apache.ignite.plugin.extensions.communication.MessageWriter;
-import org.apache.ignite.binary.BinaryObjectException;
-import org.apache.ignite.binary.BinaryType;
-import org.apache.ignite.binary.BinaryObject;
 import org.jetbrains.annotations.Nullable;
 
 import java.io.Externalizable;
@@ -254,13 +253,13 @@ public final class BinaryObjectImpl extends BinaryObjectEx implements Externaliz
     /** {@inheritDoc} */
     @SuppressWarnings("unchecked")
     @Nullable @Override public <F> F field(String fieldName) throws BinaryObjectException {
-        return (F)newReader().unmarshalField(fieldName);
+        return (F) reader(null).unmarshalField(fieldName);
     }
 
     /** {@inheritDoc} */
     @SuppressWarnings("unchecked")
     @Nullable @Override public <F> F field(int fieldId) throws BinaryObjectException {
-        return (F)newReader().unmarshalField(fieldId);
+        return (F) reader(null).unmarshalField(fieldId);
     }
 
     /** {@inheritDoc} */
@@ -394,10 +393,7 @@ public final class BinaryObjectImpl extends BinaryObjectEx implements Externaliz
                 break;
 
             default:
-                BinaryReaderExImpl reader = new BinaryReaderExImpl(ctx, PortableHeapInputStream.create(arr, fieldPos),
-                    null, new BinaryReaderHandles());
-
-                val = reader.unmarshal();
+                val = PortableUtils.unmarshal(PortableHeapInputStream.create(arr, fieldPos), ctx, null);
 
                 break;
         }
@@ -408,14 +404,12 @@ public final class BinaryObjectImpl extends BinaryObjectEx implements Externaliz
     /** {@inheritDoc} */
     @SuppressWarnings("unchecked")
     @Nullable @Override protected <F> F field(BinaryReaderHandles rCtx, String fieldName) {
-        BinaryReaderExImpl reader = new BinaryReaderExImpl(ctx, PortableHeapInputStream.create(arr, start), null, rCtx);
-
-        return (F)reader.unmarshalField(fieldName);
+        return (F)reader(rCtx).unmarshalField(fieldName);
     }
 
     /** {@inheritDoc} */
     @Override public boolean hasField(String fieldName) {
-        return newReader().findFieldByName(fieldName);
+        return reader(null).findFieldByName(fieldName);
     }
 
     /** {@inheritDoc} */
@@ -427,7 +421,6 @@ public final class BinaryObjectImpl extends BinaryObjectEx implements Externaliz
             obj0 = deserializeValue(null);
 
         return (T)obj0;
-
     }
 
     /** {@inheritDoc} */
@@ -447,7 +440,7 @@ public final class BinaryObjectImpl extends BinaryObjectEx implements Externaliz
 
     /** {@inheritDoc} */
     @Override protected PortableSchema createSchema() {
-        return newReader().getOrCreateSchema();
+        return reader(null).getOrCreateSchema();
     }
 
     /** {@inheritDoc} */
@@ -556,7 +549,7 @@ public final class BinaryObjectImpl extends BinaryObjectEx implements Externaliz
      */
     private Object deserializeValue(@Nullable CacheObjectContext coCtx) {
         // TODO: IGNITE-1272 - Deserialize with proper class loader.
-        BinaryReaderExImpl reader = newReader();
+        BinaryReaderExImpl reader = reader(null);
 
         Object obj0 = reader.deserialize();
 
@@ -573,9 +566,10 @@ public final class BinaryObjectImpl extends BinaryObjectEx implements Externaliz
     /**
      * Create new reader for this object.
      *
+     * @param rCtx Reader context.
      * @return Reader.
      */
-    private BinaryReaderExImpl newReader() {
-        return new BinaryReaderExImpl(ctx, PortableHeapInputStream.create(arr, start), null, new BinaryReaderHandles());
+    private BinaryReaderExImpl reader(@Nullable BinaryReaderHandles rCtx) {
+        return new BinaryReaderExImpl(ctx, PortableHeapInputStream.create(arr, start), null, rCtx);
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/c6b2fa56/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryObjectOffheapImpl.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryObjectOffheapImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryObjectOffheapImpl.java
index 1db3470..8b8e0e8 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryObjectOffheapImpl.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryObjectOffheapImpl.java
@@ -129,7 +129,7 @@ public class BinaryObjectOffheapImpl extends BinaryObjectEx implements Externali
 
     /** {@inheritDoc} */
     @Override protected PortableSchema createSchema() {
-        return newReader().getOrCreateSchema();
+        return reader(null).getOrCreateSchema();
     }
 
     /** {@inheritDoc} */
@@ -163,13 +163,13 @@ public class BinaryObjectOffheapImpl extends BinaryObjectEx implements Externali
     /** {@inheritDoc} */
     @SuppressWarnings("unchecked")
     @Nullable @Override public <F> F field(String fieldName) throws BinaryObjectException {
-        return (F)newReader().unmarshalField(fieldName);
+        return (F) reader(null).unmarshalField(fieldName);
     }
 
     /** {@inheritDoc} */
     @SuppressWarnings("unchecked")
     @Nullable @Override public <F> F field(int fieldId) throws BinaryObjectException {
-        return (F)newReader().unmarshalField(fieldId);
+        return (F) reader(null).unmarshalField(fieldId);
     }
 
     /** {@inheritDoc} */
@@ -308,9 +308,7 @@ public class BinaryObjectOffheapImpl extends BinaryObjectEx implements Externali
 
                 stream.position(fieldPos);
 
-                BinaryReaderExImpl reader = new BinaryReaderExImpl(ctx, stream, null, new BinaryReaderHandles());
-
-                val = reader.unmarshal();
+                val = PortableUtils.unmarshal(stream, ctx, null);
 
                 break;
         }
@@ -321,18 +319,12 @@ public class BinaryObjectOffheapImpl extends BinaryObjectEx implements Externali
     /** {@inheritDoc} */
     @SuppressWarnings("unchecked")
     @Nullable @Override protected <F> F field(BinaryReaderHandles rCtx, String fieldName) {
-        PortableOffheapInputStream stream = new PortableOffheapInputStream(ptr, size, false);
-
-        stream.position(start);
-
-        BinaryReaderExImpl reader = new BinaryReaderExImpl(ctx, stream, null, rCtx);
-
-        return (F)reader.unmarshalField(fieldName);
+        return (F)reader(rCtx).unmarshalField(fieldName);
     }
 
     /** {@inheritDoc} */
     @Override public boolean hasField(String fieldName) {
-        return newReader().findFieldByName(fieldName);
+        return reader(null).findFieldByName(fieldName);
     }
 
     /** {@inheritDoc} */
@@ -418,19 +410,20 @@ public class BinaryObjectOffheapImpl extends BinaryObjectEx implements Externali
      */
     private Object deserializeValue() {
         // TODO: IGNITE-1272 - Deserialize with proper class loader.
-        return newReader().deserialize();
+        return reader(null).deserialize();
     }
 
     /**
      * Create new reader for this object.
      *
+     * @param rCtx Reader context.
      * @return Reader.
      */
-    private BinaryReaderExImpl newReader() {
+    private BinaryReaderExImpl reader(@Nullable BinaryReaderHandles rCtx) {
         PortableOffheapInputStream stream = new PortableOffheapInputStream(ptr, size, false);
 
         stream.position(start);
 
-        return new BinaryReaderExImpl(ctx, stream, null, new BinaryReaderHandles());
+        return new BinaryReaderExImpl(ctx, stream, null, rCtx);
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/c6b2fa56/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryReaderExImpl.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryReaderExImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryReaderExImpl.java
index a2e18b4..4809c3c 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryReaderExImpl.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryReaderExImpl.java
@@ -17,7 +17,6 @@
 
 package org.apache.ignite.internal.portable;
 
-import org.apache.ignite.IgniteCheckedException;
 import org.apache.ignite.binary.BinaryIdMapper;
 import org.apache.ignite.binary.BinaryInvalidTypeException;
 import org.apache.ignite.binary.BinaryObject;
@@ -25,36 +24,20 @@ import org.apache.ignite.binary.BinaryObjectException;
 import org.apache.ignite.binary.BinaryRawReader;
 import org.apache.ignite.binary.BinaryReader;
 import org.apache.ignite.internal.portable.streams.PortableInputStream;
-import org.apache.ignite.internal.util.lang.GridMapEntry;
 import org.apache.ignite.internal.util.typedef.internal.SB;
-import org.apache.ignite.internal.util.typedef.internal.U;
 import org.jetbrains.annotations.NotNull;
 import org.jetbrains.annotations.Nullable;
 
-import java.io.ByteArrayInputStream;
 import java.io.EOFException;
 import java.io.IOException;
 import java.io.ObjectInput;
-import java.lang.reflect.Array;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.InvocationTargetException;
 import java.math.BigDecimal;
-import java.math.BigInteger;
 import java.sql.Timestamp;
-import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Date;
-import java.util.LinkedList;
 import java.util.Map;
-import java.util.Properties;
-import java.util.TreeMap;
-import java.util.TreeSet;
 import java.util.UUID;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentSkipListSet;
 
-import static java.nio.charset.StandardCharsets.UTF_8;
-import static org.apache.ignite.internal.portable.GridPortableMarshaller.ARR_LIST;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.BOOLEAN;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.BOOLEAN_ARR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.BYTE;
@@ -63,8 +46,6 @@ import static org.apache.ignite.internal.portable.GridPortableMarshaller.CHAR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.CHAR_ARR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.CLASS;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.COL;
-import static org.apache.ignite.internal.portable.GridPortableMarshaller.CONC_HASH_MAP;
-import static org.apache.ignite.internal.portable.GridPortableMarshaller.CONC_SKIP_LIST_SET;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.DATE;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.DATE_ARR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.DECIMAL;
@@ -77,35 +58,24 @@ import static org.apache.ignite.internal.portable.GridPortableMarshaller.ENUM_AR
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.FLOAT;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.FLOAT_ARR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.HANDLE;
-import static org.apache.ignite.internal.portable.GridPortableMarshaller.HASH_MAP;
-import static org.apache.ignite.internal.portable.GridPortableMarshaller.HASH_SET;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.INT;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.INT_ARR;
-import static org.apache.ignite.internal.portable.GridPortableMarshaller.LINKED_HASH_MAP;
-import static org.apache.ignite.internal.portable.GridPortableMarshaller.LINKED_HASH_SET;
-import static org.apache.ignite.internal.portable.GridPortableMarshaller.LINKED_LIST;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.LONG;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.LONG_ARR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.MAP;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.MAP_ENTRY;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.NULL;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.OBJ;
-import static org.apache.ignite.internal.portable.GridPortableMarshaller.OBJECT_TYPE_ID;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.OBJ_ARR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.OPTM_MARSH;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.PORTABLE_OBJ;
-import static org.apache.ignite.internal.portable.GridPortableMarshaller.PROPERTIES_MAP;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.SHORT;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.SHORT_ARR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.STRING;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.STRING_ARR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.TIMESTAMP;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.TIMESTAMP_ARR;
-import static org.apache.ignite.internal.portable.GridPortableMarshaller.TREE_MAP;
-import static org.apache.ignite.internal.portable.GridPortableMarshaller.TREE_SET;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.UNREGISTERED_TYPE_ID;
-import static org.apache.ignite.internal.portable.GridPortableMarshaller.USER_COL;
-import static org.apache.ignite.internal.portable.GridPortableMarshaller.USER_SET;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.UUID;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.UUID_ARR;
 
@@ -113,7 +83,7 @@ import static org.apache.ignite.internal.portable.GridPortableMarshaller.UUID_AR
  * Portable reader implementation.
  */
 @SuppressWarnings("unchecked")
-public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, ObjectInput {
+public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, BinaryReaderHandlesHolder, ObjectInput {
     /** Portable context. */
     private final PortableContext ctx;
 
@@ -124,7 +94,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     private final ClassLoader ldr;
 
     /** Reader context which is constantly passed between objects. */
-    private final BinaryReaderHandles rCtx;
+    private BinaryReaderHandles hnds;
 
     /** */
     private final int start;
@@ -177,14 +147,26 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
      * @param ctx Context.
      * @param in Input stream.
      * @param ldr Class loader.
-     * @param rCtx Context.
      */
-    public BinaryReaderExImpl(PortableContext ctx, PortableInputStream in, ClassLoader ldr, BinaryReaderHandles rCtx) {
+    public BinaryReaderExImpl(PortableContext ctx, PortableInputStream in, ClassLoader ldr) {
+        this(ctx, in, ldr, null);
+    }
+
+    /**
+     * Constructor.
+     *
+     * @param ctx Context.
+     * @param in Input stream.
+     * @param ldr Class loader.
+     * @param hnds Context.
+     */
+    public BinaryReaderExImpl(PortableContext ctx, PortableInputStream in, ClassLoader ldr,
+        @Nullable BinaryReaderHandles hnds) {
         // Initialize base members.
         this.ctx = ctx;
         this.in = in;
         this.ldr = ldr;
-        this.rCtx = rCtx;
+        this.hnds = hnds;
 
         start = in.position();
 
@@ -240,7 +222,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
                 int off = in.position();
 
                 // Registers class by type ID, at least locally if the cache is not ready yet.
-                typeId = ctx.descriptorForClass(doReadClass(typeId0)).typeId();
+                typeId = ctx.descriptorForClass(PortableUtils.doReadClass(in, ctx, ldr, typeId0)).typeId();
 
                 int clsNameLen = in.position() - off;
 
@@ -256,9 +238,9 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
             schema = PortableUtils.hasSchema(flags) ? getOrCreateSchema() : null;
         }
         else {
+            dataStart = 0;
             typeId = 0;
             rawOff = 0;
-            dataStart = 0;
             footerStart = 0;
             footerLen = 0;
             idMapper = null;
@@ -273,10 +255,10 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     }
 
     /**
-     * @return Handles.
+     * @return Input stream.
      */
-    public BinaryReaderHandles handles() {
-        return rCtx;
+    public PortableInputStream in() {
+        return in;
     }
 
     /**
@@ -287,14 +269,6 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     }
 
     /**
-     * @return Unmarshalled value.
-     * @throws BinaryObjectException In case of error.
-     */
-    @Nullable Object unmarshal() throws BinaryObjectException {
-        return unmarshal(false);
-    }
-
-    /**
      * @param offset Offset in the array.
      * @return Unmarshalled value.
      * @throws BinaryObjectException In case of error.
@@ -302,7 +276,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     public Object unmarshal(int offset) throws BinaryObjectException {
         streamPosition(offset);
 
-        return in.position() >= 0 ? unmarshal() : null;
+        return in.position() >= 0 ? PortableUtils.unmarshal(in, ctx, ldr, this) : null;
     }
 
     /**
@@ -311,7 +285,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
      * @throws BinaryObjectException In case of error.
      */
     @Nullable Object unmarshalField(String fieldName) throws BinaryObjectException {
-        return findFieldByName(fieldName) ? unmarshal() : null;
+        return findFieldByName(fieldName) ? PortableUtils.unmarshal(in, ctx, ldr, this) : null;
     }
 
     /**
@@ -320,7 +294,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
      * @throws BinaryObjectException In case of error.
      */
     @Nullable Object unmarshalField(int fieldId) throws BinaryObjectException {
-        return findFieldById(fieldId) ? unmarshal() : null;
+        return findFieldById(fieldId) ? PortableUtils.unmarshal(in, ctx, ldr, this) : null;
     }
 
     /**
@@ -333,7 +307,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
             Flag flag = checkFlag(MAP_ENTRY);
 
             if (flag == Flag.NORMAL)
-                return doReadMapEntry(true);
+                return PortableUtils.doReadMapEntry(in, ctx, ldr, this, true);
             else if (flag == Flag.HANDLE)
                 return readHandleField();
         }
@@ -351,7 +325,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
             if (checkFlag(PORTABLE_OBJ) == Flag.NULL)
                 return null;
 
-            return new BinaryObjectImpl(ctx, doReadByteArray(), in.readInt());
+            return new BinaryObjectImpl(ctx, PortableUtils.doReadByteArray(in), in.readInt());
         }
         else
             return null;
@@ -367,7 +341,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
             if (checkFlag(CLASS) == Flag.NULL)
                 return null;
 
-            return doReadClass();
+            return PortableUtils.doReadClass(in, ctx, ldr);
         }
 
         return null;
@@ -376,16 +350,26 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     /**
      * @param obj Object.
      */
-    void setHandler(Object obj) {
-        setHandler(obj, start);
+    void setHandle(Object obj) {
+        setHandle(obj, start);
     }
 
-    /**
-     * @param obj Object.
-     * @param pos Position.
-     */
-    void setHandler(Object obj, int pos) {
-        rCtx.put(pos, obj);
+    /** {@inheritDoc} */
+    @Override public void setHandle(Object obj, int pos) {
+        handles().put(pos, obj);
+    }
+
+    /** {@inheritDoc} */
+    @Override public Object getHandle(int pos) {
+        return hnds != null ? hnds.get(pos) : null;
+    }
+
+    /** {@inheritDoc} */
+    @Override public BinaryReaderHandles handles() {
+        if (hnds == null)
+            hnds = new BinaryReaderHandles();
+
+        return hnds;
     }
 
     /**
@@ -395,16 +379,16 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
      * @return Field.
      */
     private <T> T readHandleField() {
-        int handlePos = positionForHandle() - in.readInt();
+        int handlePos = PortableUtils.positionForHandle(in) - in.readInt();
 
-        Object obj = rCtx.get(handlePos);
+        Object obj = getHandle(handlePos);
 
         if (obj == null) {
             int retPos = in.position();
 
             streamPosition(handlePos);
 
-            obj = doReadObject();
+            obj = PortableUtils.doReadObject(in, ctx, ldr, this);
 
             streamPosition(retPos);
         }
@@ -457,7 +441,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     @Nullable @Override public byte[] readByteArray() throws BinaryObjectException {
         switch (checkFlag(BYTE_ARR)) {
             case NORMAL:
-                return doReadByteArray();
+                return PortableUtils.doReadByteArray(in);
 
             case HANDLE:
                 return readHandleField();
@@ -513,7 +497,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     @Nullable @Override public boolean[] readBooleanArray() throws BinaryObjectException {
         switch (checkFlag(BOOLEAN_ARR)) {
             case NORMAL:
-                return doReadBooleanArray();
+                return PortableUtils.doReadBooleanArray(in);
 
             case HANDLE:
                 return readHandleField();
@@ -569,7 +553,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     @Nullable @Override public short[] readShortArray() throws BinaryObjectException {
         switch (checkFlag(SHORT_ARR)) {
             case NORMAL:
-                return doReadShortArray();
+                return PortableUtils.doReadShortArray(in);
 
             case HANDLE:
                 return readHandleField();
@@ -625,7 +609,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     @Nullable @Override public char[] readCharArray() throws BinaryObjectException {
         switch (checkFlag(CHAR_ARR)) {
             case NORMAL:
-                return doReadCharArray();
+                return PortableUtils.doReadCharArray(in);
 
             case HANDLE:
                 return readHandleField();
@@ -681,7 +665,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     @Nullable @Override public int[] readIntArray() throws BinaryObjectException {
         switch (checkFlag(INT_ARR)) {
             case NORMAL:
-                return doReadIntArray();
+                return PortableUtils.doReadIntArray(in);
 
             case HANDLE:
                 return readHandleField();
@@ -737,7 +721,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     @Nullable @Override public long[] readLongArray() throws BinaryObjectException {
         switch (checkFlag(LONG_ARR)) {
             case NORMAL:
-                return doReadLongArray();
+                return PortableUtils.doReadLongArray(in);
 
             case HANDLE:
                 return readHandleField();
@@ -793,7 +777,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     @Nullable @Override public float[] readFloatArray() throws BinaryObjectException {
         switch (checkFlag(FLOAT_ARR)) {
             case NORMAL:
-                return doReadFloatArray();
+                return PortableUtils.doReadFloatArray(in);
 
             case HANDLE:
                 return readHandleField();
@@ -849,7 +833,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     @Nullable @Override public double[] readDoubleArray() throws BinaryObjectException {
         switch (checkFlag(DOUBLE_ARR)) {
             case NORMAL:
-                return doReadDoubleArray();
+                return PortableUtils.doReadDoubleArray(in);
 
             case HANDLE:
                 return readHandleField();
@@ -875,7 +859,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
 
     /** {@inheritDoc} */
     @Override @Nullable public BigDecimal readDecimal() throws BinaryObjectException {
-        return checkFlagNoHandles(DECIMAL) == Flag.NORMAL ? doReadDecimal() : null;
+        return checkFlagNoHandles(DECIMAL) == Flag.NORMAL ? PortableUtils.doReadDecimal(in) : null;
     }
 
     /** {@inheritDoc} */
@@ -896,7 +880,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     @Override @Nullable public BigDecimal[] readDecimalArray() throws BinaryObjectException {
         switch (checkFlag(DECIMAL_ARR)) {
             case NORMAL:
-                return doReadDecimalArray();
+                return PortableUtils.doReadDecimalArray(in);
 
             case HANDLE:
                 return readHandleField();
@@ -922,7 +906,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
 
     /** {@inheritDoc} */
     @Override @Nullable public String readString() throws BinaryObjectException {
-        return checkFlagNoHandles(STRING) == Flag.NORMAL ? doReadString() : null;
+        return checkFlagNoHandles(STRING) == Flag.NORMAL ? PortableUtils.doReadString(in) : null;
     }
 
     /** {@inheritDoc} */
@@ -943,7 +927,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     @Override @Nullable public String[] readStringArray() throws BinaryObjectException {
         switch (checkFlag(STRING_ARR)) {
             case NORMAL:
-                return doReadStringArray();
+                return PortableUtils.doReadStringArray(in);
 
             case HANDLE:
                 return readHandleField();
@@ -969,7 +953,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
 
     /** {@inheritDoc} */
     @Override @Nullable public UUID readUuid() throws BinaryObjectException {
-        return checkFlagNoHandles(UUID) == Flag.NORMAL ? doReadUuid() : null;
+        return checkFlagNoHandles(UUID) == Flag.NORMAL ? PortableUtils.doReadUuid(in) : null;
     }
 
     /** {@inheritDoc} */
@@ -990,7 +974,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     @Override @Nullable public UUID[] readUuidArray() throws BinaryObjectException {
         switch (checkFlag(UUID_ARR)) {
             case NORMAL:
-                return doReadUuidArray();
+                return PortableUtils.doReadUuidArray(in);
 
             case HANDLE:
                 return readHandleField();
@@ -1016,7 +1000,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
 
     /** {@inheritDoc} */
     @Override @Nullable public Date readDate() throws BinaryObjectException {
-        return checkFlagNoHandles(DATE) == Flag.NORMAL ? doReadDate() : null;
+        return checkFlagNoHandles(DATE) == Flag.NORMAL ? PortableUtils.doReadDate(in) : null;
     }
 
     /** {@inheritDoc} */
@@ -1037,7 +1021,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     @Override @Nullable public Date[] readDateArray() throws BinaryObjectException {
         switch (checkFlag(DATE_ARR)) {
             case NORMAL:
-                return doReadDateArray();
+                return PortableUtils.doReadDateArray(in);
 
             case HANDLE:
                 return readHandleField();
@@ -1063,7 +1047,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
 
     /** {@inheritDoc} */
     @Override @Nullable public Timestamp readTimestamp() throws BinaryObjectException {
-        return checkFlagNoHandles(TIMESTAMP) == Flag.NORMAL ? doReadTimestamp() : null;
+        return checkFlagNoHandles(TIMESTAMP) == Flag.NORMAL ? PortableUtils.doReadTimestamp(in) : null;
     }
 
     /** {@inheritDoc} */
@@ -1084,7 +1068,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     @Override @Nullable public Timestamp[] readTimestampArray() throws BinaryObjectException {
         switch (checkFlag(TIMESTAMP_ARR)) {
             case NORMAL:
-                return doReadTimestampArray();
+                return PortableUtils.doReadTimestampArray(in);
 
             case HANDLE:
                 return readHandleField();
@@ -1097,7 +1081,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     /** {@inheritDoc} */
     @SuppressWarnings("unchecked")
     @Nullable @Override public <T> T readObject(String fieldName) throws BinaryObjectException {
-        return findFieldByName(fieldName) ? (T)doReadObject() : null;
+        return findFieldByName(fieldName) ? (T)PortableUtils.doReadObject(in, ctx, ldr, this) : null;
     }
 
     /**
@@ -1106,17 +1090,17 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
      * @throws BinaryObjectException In case of error.
      */
     @Nullable Object readObject(int fieldId) throws BinaryObjectException {
-        return findFieldById(fieldId) ? doReadObject() : null;
+        return findFieldById(fieldId) ? PortableUtils.doReadObject(in, ctx, ldr, this) : null;
     }
 
     /** {@inheritDoc} */
     @Override public Object readObject() throws BinaryObjectException {
-        return doReadObject();
+        return PortableUtils.doReadObject(in, ctx, ldr, this);
     }
 
     /** {@inheritDoc} */
     @Nullable @Override public Object readObjectDetached() throws BinaryObjectException {
-        return unmarshal(true);
+        return PortableUtils.unmarshal(in, ctx, ldr, this, true);
     }
 
     /** {@inheritDoc} */
@@ -1137,7 +1121,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     @Nullable @Override public Object[] readObjectArray() throws BinaryObjectException {
         switch (checkFlag(OBJ_ARR)) {
             case NORMAL:
-                return doReadObjectArray(true);
+                return PortableUtils.doReadObjectArray(in, ctx, ldr, this, true);
 
             case HANDLE:
                 return readHandleField();
@@ -1177,12 +1161,12 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     private Enum<?> readEnum0(@Nullable Class<?> cls) throws BinaryObjectException {
         if (checkFlagNoHandles(ENUM) == Flag.NORMAL) {
             // Read class even if we know it in advance to set correct stream position.
-            Class<?> cls0 = doReadClass();
+            Class<?> cls0 = PortableUtils.doReadClass(in, ctx, ldr);
 
             if (cls == null)
                 cls = cls0;
 
-            return doReadEnum(cls);
+            return PortableUtils.doReadEnum(in, cls);
         }
         else
             return null;
@@ -1220,12 +1204,12 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
         switch (checkFlag(ENUM_ARR)) {
             case NORMAL:
                 // Read class even if we know it in advance to set correct stream position.
-                Class<?> cls0 = doReadClass();
+                Class<?> cls0 = PortableUtils.doReadClass(in, ctx, ldr);
 
                 if (cls == null)
                     cls = cls0;
 
-                return doReadEnumArray(cls);
+                return PortableUtils.doReadEnumArray(in, ctx, ldr, cls);
 
             case HANDLE:
                 return readHandleField();
@@ -1279,7 +1263,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
         throws BinaryObjectException {
         switch (checkFlag(COL)) {
             case NORMAL:
-                return (Collection)doReadCollection(true, cls);
+                return (Collection)PortableUtils.doReadCollection(in, ctx, ldr, this, true, cls);
 
             case HANDLE:
                 return readHandleField();
@@ -1331,7 +1315,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     private Map readMap0(@Nullable Class<? extends Map> cls) throws BinaryObjectException {
         switch (checkFlag(MAP)) {
             case NORMAL:
-                return (Map)doReadMap(true, cls);
+                return (Map)PortableUtils.doReadMap(in, ctx, ldr, this, true, cls);
 
             case HANDLE:
                 return readHandleField();
@@ -1358,7 +1342,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
         else if (flag == HANDLE)
             return Flag.HANDLE;
 
-        int pos = positionForHandle();
+        int pos = PortableUtils.positionForHandle(in);
 
         throw new BinaryObjectException("Unexpected flag value [pos=" + pos + ", expected=" + expFlag +
             ", actual=" + flag + ']');
@@ -1379,7 +1363,7 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
         else if (flag == NULL)
             return Flag.NULL;
 
-        int pos = positionForHandle();
+        int pos = PortableUtils.positionForHandle(in);
 
         throw new BinaryObjectException("Unexpected flag value [pos=" + pos + ", expected=" + expFlag +
             ", actual=" + flag + ']');
@@ -1399,252 +1383,6 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     }
 
     /**
-     * @return Unmarshalled value.
-     * @throws BinaryObjectException In case of error.
-     */
-    @Nullable private Object unmarshal(boolean detach) throws BinaryObjectException {
-        int start = in.position();
-
-        byte flag = in.readByte();
-
-        switch (flag) {
-            case NULL:
-                return null;
-
-            case HANDLE: {
-                int handlePos = start - in.readInt();
-
-                Object obj = rCtx.get(handlePos);
-
-                if (obj == null) {
-                    int retPos = in.position();
-
-                    streamPosition(handlePos);
-
-                    obj = unmarshal();
-
-                    streamPosition(retPos);
-                }
-
-                return obj;
-            }
-
-            case OBJ: {
-                PortableUtils.checkProtocolVersion(in.readByte());
-
-                int len = PortableUtils.length(in, start);
-
-                BinaryObjectEx po;
-
-                if (detach) {
-                    // In detach mode we simply copy object's content.
-                    streamPosition(start);
-
-                    po = new BinaryObjectImpl(ctx, in.readByteArray(len), 0);
-                }
-                else {
-                    if (in.offheapPointer() == 0)
-                        po = new BinaryObjectImpl(ctx, in.array(), start);
-                    else
-                        po = new BinaryObjectOffheapImpl(ctx, in.offheapPointer(), start,
-                            in.remaining() + in.position());
-
-                    streamPosition(start + po.length());
-                }
-
-                rCtx.put(start, po);
-
-                return po;
-            }
-
-            case BYTE:
-                return in.readByte();
-
-            case SHORT:
-                return in.readShort();
-
-            case INT:
-                return in.readInt();
-
-            case LONG:
-                return in.readLong();
-
-            case FLOAT:
-                return in.readFloat();
-
-            case DOUBLE:
-                return in.readDouble();
-
-            case CHAR:
-                return in.readChar();
-
-            case BOOLEAN:
-                return in.readBoolean();
-
-            case DECIMAL:
-                return doReadDecimal();
-
-            case STRING:
-                return doReadString();
-
-            case UUID:
-                return doReadUuid();
-
-            case DATE:
-                return doReadDate();
-
-            case TIMESTAMP:
-                return doReadTimestamp();
-
-            case BYTE_ARR:
-                return doReadByteArray();
-
-            case SHORT_ARR:
-                return doReadShortArray();
-
-            case INT_ARR:
-                return doReadIntArray();
-
-            case LONG_ARR:
-                return doReadLongArray();
-
-            case FLOAT_ARR:
-                return doReadFloatArray();
-
-            case DOUBLE_ARR:
-                return doReadDoubleArray();
-
-            case CHAR_ARR:
-                return doReadCharArray();
-
-            case BOOLEAN_ARR:
-                return doReadBooleanArray();
-
-            case DECIMAL_ARR:
-                return doReadDecimalArray();
-
-            case STRING_ARR:
-                return doReadStringArray();
-
-            case UUID_ARR:
-                return doReadUuidArray();
-
-            case DATE_ARR:
-                return doReadDateArray();
-
-            case TIMESTAMP_ARR:
-                return doReadTimestampArray();
-
-            case OBJ_ARR:
-                return doReadObjectArray(false);
-
-            case COL:
-                return doReadCollection(false, null);
-
-            case MAP:
-                return doReadMap(false, null);
-
-            case MAP_ENTRY:
-                return doReadMapEntry(false);
-
-            case PORTABLE_OBJ:
-                return doReadPortableObject();
-
-            case ENUM:
-                return doReadEnum(doReadClass());
-
-            case ENUM_ARR:
-                return doReadEnumArray(doReadClass());
-
-            case CLASS:
-                return doReadClass();
-
-            case OPTM_MARSH:
-                return doReadOptimized();
-
-            default:
-                throw new BinaryObjectException("Invalid flag value: " + flag);
-        }
-    }
-
-    /**
-     * @return Value.
-     */
-    private BigDecimal doReadDecimal() {
-        int scale = in.readInt();
-        byte[] mag = doReadByteArray();
-
-        BigInteger intVal = new BigInteger(mag);
-
-        if (scale < 0) {
-            scale &= 0x7FFFFFFF;
-
-            intVal = intVal.negate();
-        }
-
-        return new BigDecimal(intVal, scale);
-    }
-
-    /**
-     * @return Value.
-     */
-    private String doReadString() {
-        if (!in.hasArray())
-            return new String(doReadByteArray(), UTF_8);
-
-        int strLen = in.readInt();
-
-        int pos = in.position();
-
-        // String will copy necessary array part for us.
-        String res = new String(in.array(), pos, strLen, UTF_8);
-
-        streamPosition(pos + strLen);
-
-        return res;
-    }
-
-    /**
-     * @return Value.
-     */
-    private UUID doReadUuid() {
-        return new UUID(in.readLong(), in.readLong());
-    }
-
-    /**
-     * @return Value.
-     */
-    private Date doReadDate() {
-        long time = in.readLong();
-
-        return new Date(time);
-    }
-
-    /**
-     * @return Value.
-     */
-    private Timestamp doReadTimestamp() {
-        long time = in.readLong();
-        int nanos = in.readInt();
-
-        Timestamp ts = new Timestamp(time);
-
-        ts.setNanos(ts.getNanos() + nanos);
-
-        return ts;
-    }
-
-    /**
-     * @return Object.
-     * @throws BinaryObjectException In case of error.
-     */
-    @Nullable private Object doReadObject() throws BinaryObjectException {
-        BinaryReaderExImpl reader = new BinaryReaderExImpl(ctx, in, ldr, rCtx);
-
-        return reader.deserialize();
-    }
-
-    /**
      * @return Deserialized object.
      * @throws BinaryObjectException If failed.
      */
@@ -1662,14 +1400,14 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
             case HANDLE:
                 int handlePos = start - in.readInt();
 
-                obj = rCtx.get(handlePos);
+                obj = getHandle(handlePos);
 
                 if (obj == null) {
                     int retPos = in.position();
 
                     streamPosition(handlePos);
 
-                    obj = doReadObject();
+                    obj = PortableUtils.doReadObject(in, ctx, ldr, this);
 
                     streamPosition(retPos);
                 }
@@ -1731,117 +1469,117 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
                 break;
 
             case DECIMAL:
-                obj = doReadDecimal();
+                obj = PortableUtils.doReadDecimal(in);
 
                 break;
 
             case STRING:
-                obj = doReadString();
+                obj = PortableUtils.doReadString(in);
 
                 break;
 
             case UUID:
-                obj = doReadUuid();
+                obj = PortableUtils.doReadUuid(in);
 
                 break;
 
             case DATE:
-                obj = doReadDate();
+                obj = PortableUtils.doReadDate(in);
 
                 break;
 
             case TIMESTAMP:
-                obj = doReadTimestamp();
+                obj = PortableUtils.doReadTimestamp(in);
 
                 break;
 
             case BYTE_ARR:
-                obj = doReadByteArray();
+                obj = PortableUtils.doReadByteArray(in);
 
                 break;
 
             case SHORT_ARR:
-                obj = doReadShortArray();
+                obj = PortableUtils.doReadShortArray(in);
 
                 break;
 
             case INT_ARR:
-                obj = doReadIntArray();
+                obj = PortableUtils.doReadIntArray(in);
 
                 break;
 
             case LONG_ARR:
-                obj = doReadLongArray();
+                obj = PortableUtils.doReadLongArray(in);
 
                 break;
 
             case FLOAT_ARR:
-                obj = doReadFloatArray();
+                obj = PortableUtils.doReadFloatArray(in);
 
                 break;
 
             case DOUBLE_ARR:
-                obj = doReadDoubleArray();
+                obj = PortableUtils.doReadDoubleArray(in);
 
                 break;
 
             case CHAR_ARR:
-                obj = doReadCharArray();
+                obj = PortableUtils.doReadCharArray(in);
 
                 break;
 
             case BOOLEAN_ARR:
-                obj = doReadBooleanArray();
+                obj = PortableUtils.doReadBooleanArray(in);
 
                 break;
 
             case DECIMAL_ARR:
-                obj = doReadDecimalArray();
+                obj = PortableUtils.doReadDecimalArray(in);
 
                 break;
 
             case STRING_ARR:
-                obj = doReadStringArray();
+                obj = PortableUtils.doReadStringArray(in);
 
                 break;
 
             case UUID_ARR:
-                obj = doReadUuidArray();
+                obj = PortableUtils.doReadUuidArray(in);
 
                 break;
 
             case DATE_ARR:
-                obj = doReadDateArray();
+                obj = PortableUtils.doReadDateArray(in);
 
                 break;
 
             case TIMESTAMP_ARR:
-                obj = doReadTimestampArray();
+                obj = PortableUtils.doReadTimestampArray(in);
 
                 break;
 
             case OBJ_ARR:
-                obj = doReadObjectArray(true);
+                obj = PortableUtils.doReadObjectArray(in, ctx, ldr, this, true);
 
                 break;
 
             case COL:
-                obj = doReadCollection(true, null);
+                obj = PortableUtils.doReadCollection(in, ctx, ldr, this, true, null);
 
                 break;
 
             case MAP:
-                obj = doReadMap(true, null);
+                obj = PortableUtils.doReadMap(in, ctx, ldr, this, true, null);
 
                 break;
 
             case MAP_ENTRY:
-                obj = doReadMapEntry(true);
+                obj = PortableUtils.doReadMapEntry(in, ctx, ldr, this, true);
 
                 break;
 
             case PORTABLE_OBJ:
-                obj = doReadPortableObject();
+                obj = PortableUtils.doReadPortableObject(in, ctx);
 
                 ((BinaryObjectImpl)obj).context(ctx);
 
@@ -1851,22 +1589,22 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
                 break;
 
             case ENUM:
-                obj = doReadEnum(doReadClass());
+                obj = PortableUtils.doReadEnum(in, PortableUtils.doReadClass(in, ctx, ldr));
 
                 break;
 
             case ENUM_ARR:
-                obj = doReadEnumArray(doReadClass());
+                obj = PortableUtils.doReadEnumArray(in, ctx, ldr, PortableUtils.doReadClass(in, ctx, ldr));
 
                 break;
 
             case CLASS:
-                obj = doReadClass();
+                obj = PortableUtils.doReadClass(in, ctx, ldr);
 
                 break;
 
             case OPTM_MARSH:
-                obj = doReadOptimized();
+                obj = PortableUtils.doReadOptimized(in, ctx);
 
                 break;
 
@@ -1878,541 +1616,6 @@ public class BinaryReaderExImpl implements BinaryReader, BinaryRawReaderEx, Obje
     }
 
     /**
-     * Read object serialized using optimized marshaller.
-     *
-     * @return Result.
-     */
-    private Object doReadOptimized() {
-        int len = in.readInt();
-
-        ByteArrayInputStream input = new ByteArrayInputStream(in.array(), in.position(), len);
-
-        try {
-            return ctx.optimizedMarsh().unmarshal(input, null);
-        }
-        catch (IgniteCheckedException e) {
-            throw new BinaryObjectException("Failed to unmarshal object with optimized marshaller", e);
-        }
-        finally {
-            streamPosition(in.position() + len);
-        }
-    }
-
-    /**
-     * @return Value.
-     */
-    private byte[] doReadByteArray() {
-        int len = in.readInt();
-
-        return in.readByteArray(len);
-    }
-
-    /**
-     * @return Value.
-     */
-    private short[] doReadShortArray() {
-        int len = in.readInt();
-
-        return in.readShortArray(len);
-    }
-
-    /**
-     * @return Value.
-     */
-    private int[] doReadIntArray() {
-        int len = in.readInt();
-
-        return in.readIntArray(len);
-    }
-
-    /**
-     * @return Value.
-     */
-    private long[] doReadLongArray() {
-        int len = in.readInt();
-
-        return in.readLongArray(len);
-    }
-
-    /**
-     * @return Value.
-     */
-    private float[] doReadFloatArray() {
-        int len = in.readInt();
-
-        return in.readFloatArray(len);
-    }
-
-    /**
-     * @return Value.
-     */
-    private double[] doReadDoubleArray() {
-        int len = in.readInt();
-
-        return in.readDoubleArray(len);
-    }
-
-    /**
-     * @return Value.
-     */
-    private char[] doReadCharArray() {
-        int len = in.readInt();
-
-        return in.readCharArray(len);
-    }
-
-    /**
-     * @return Value.
-     */
-    private boolean[] doReadBooleanArray() {
-        int len = in.readInt();
-
-        return in.readBooleanArray(len);
-    }
-
-    /**
-     * @return Value.
-     * @throws BinaryObjectException In case of error.
-     */
-    private BigDecimal[] doReadDecimalArray() throws BinaryObjectException {
-        int len = in.readInt();
-
-        BigDecimal[] arr = new BigDecimal[len];
-
-        for (int i = 0; i < len; i++) {
-            byte flag = in.readByte();
-
-            if (flag == NULL)
-                arr[i] = null;
-            else {
-                if (flag != DECIMAL)
-                    throw new BinaryObjectException("Invalid flag value: " + flag);
-
-                arr[i] = doReadDecimal();
-            }
-        }
-
-        return arr;
-    }
-
-    /**
-     * @return Value.
-     * @throws BinaryObjectException In case of error.
-     */
-    private String[] doReadStringArray() throws BinaryObjectException {
-        int len = in.readInt();
-
-        String[] arr = new String[len];
-
-        for (int i = 0; i < len; i++) {
-            byte flag = in.readByte();
-
-            if (flag == NULL)
-                arr[i] = null;
-            else {
-                if (flag != STRING)
-                    throw new BinaryObjectException("Invalid flag value: " + flag);
-
-                arr[i] = doReadString();
-            }
-        }
-
-        return arr;
-    }
-    
-    /**
-     * @return Value.
-     * @throws BinaryObjectException In case of error.
-     */
-    private UUID[] doReadUuidArray() throws BinaryObjectException {
-        int len = in.readInt();
-
-        UUID[] arr = new UUID[len];
-
-        for (int i = 0; i < len; i++) {
-            byte flag = in.readByte();
-
-            if (flag == NULL)
-                arr[i] = null;
-            else {
-                if (flag != UUID)
-                    throw new BinaryObjectException("Invalid flag value: " + flag);
-
-                arr[i] = doReadUuid();
-            }
-        }
-
-        return arr;
-    }
-
-    /**
-     * @return Value.
-     * @throws BinaryObjectException In case of error.
-     */
-    private Date[] doReadDateArray() throws BinaryObjectException {
-        int len = in.readInt();
-
-        Date[] arr = new Date[len];
-
-        for (int i = 0; i < len; i++) {
-            byte flag = in.readByte();
-
-            if (flag == NULL)
-                arr[i] = null;
-            else {
-                if (flag != DATE)
-                    throw new BinaryObjectException("Invalid flag value: " + flag);
-
-                arr[i] = doReadDate();
-            }
-        }
-
-        return arr;
-    }
-
-    /**
-     * @return Value.
-     * @throws BinaryObjectException In case of error.
-     */
-    private Timestamp[] doReadTimestampArray() throws BinaryObjectException {
-        int len = in.readInt();
-
-        Timestamp[] arr = new Timestamp[len];
-
-        for (int i = 0; i < len; i++) {
-            byte flag = in.readByte();
-
-            if (flag == NULL)
-                arr[i] = null;
-            else {
-                if (flag != TIMESTAMP)
-                    throw new BinaryObjectException("Invalid flag value: " + flag);
-
-                arr[i] = doReadTimestamp();
-            }
-        }
-
-        return arr;
-    }
-
-    /**
-     * @param deep Deep flag.
-     * @return Value.
-     * @throws BinaryObjectException In case of error.
-     */
-    private Object[] doReadObjectArray(boolean deep) throws BinaryObjectException {
-        int hPos = positionForHandle();
-
-        Class compType = doReadClass();
-
-        int len = in.readInt();
-
-        Object[] arr = deep ? (Object[])Array.newInstance(compType, len) : new Object[len];
-
-        setHandler(arr, hPos);
-
-        for (int i = 0; i < len; i++)
-            arr[i] = deep ? doReadObject() : unmarshal();
-
-        return arr;
-    }
-
-    /**
-     * @param deep Deep flag.
-     * @param cls Collection class.
-     * @return Value.
-     * @throws BinaryObjectException In case of error.
-     */
-    @SuppressWarnings("unchecked")
-    private Collection<?> doReadCollection(boolean deep, @Nullable Class<? extends Collection> cls)
-        throws BinaryObjectException {
-        int hPos = positionForHandle();
-
-        int size = in.readInt();
-
-        assert size >= 0;
-
-        byte colType = in.readByte();
-
-        Collection<Object> col;
-
-        if (cls != null) {
-            try {
-                Constructor<? extends Collection> cons = cls.getConstructor();
-
-                col = cons.newInstance();
-            }
-            catch (NoSuchMethodException ignored) {
-                throw new BinaryObjectException("Collection class doesn't have public default constructor: " +
-                    cls.getName());
-            }
-            catch (InvocationTargetException | InstantiationException | IllegalAccessException e) {
-                throw new BinaryObjectException("Failed to instantiate collection: " + cls.getName(), e);
-            }
-        }
-        else {
-            switch (colType) {
-                case ARR_LIST:
-                    col = new ArrayList<>(size);
-
-                    break;
-
-                case LINKED_LIST:
-                    col = new LinkedList<>();
-
-                    break;
-
-                case HASH_SET:
-                    col = U.newHashSet(size);
-
-                    break;
-
-                case LINKED_HASH_SET:
-                    col = U.newLinkedHashSet(size);
-
-                    break;
-
-                case TREE_SET:
-                    col = new TreeSet<>();
-
-                    break;
-
-                case CONC_SKIP_LIST_SET:
-                    col = new ConcurrentSkipListSet<>();
-
-                    break;
-
-                case USER_SET:
-                    col = U.newHashSet(size);
-
-                    break;
-
-                case USER_COL:
-                    col = new ArrayList<>(size);
-
-                    break;
-
-                default:
-                    throw new BinaryObjectException("Invalid collection type: " + colType);
-            }
-        }
-
-        setHandler(col, hPos);
-
-        for (int i = 0; i < size; i++)
-            col.add(deep ? doReadObject() : unmarshal());
-
-        return col;
-    }
-
-    /**
-     * @param deep Deep flag.
-     * @param cls Map class.
-     * @return Value.
-     * @throws BinaryObjectException In case of error.
-     */
-    @SuppressWarnings("unchecked")
-    private Map<?, ?> doReadMap(boolean deep, @Nullable Class<? extends Map> cls)
-        throws BinaryObjectException {
-        int hPos = positionForHandle();
-
-        int size = in.readInt();
-
-        assert size >= 0;
-
-        byte mapType = in.readByte();
-
-        Map<Object, Object> map;
-
-        if (cls != null) {
-            try {
-                Constructor<? extends Map> cons = cls.getConstructor();
-
-                map = cons.newInstance();
-            }
-            catch (NoSuchMethodException ignored) {
-                throw new BinaryObjectException("Map class doesn't have public default constructor: " +
-                    cls.getName());
-            }
-            catch (InvocationTargetException | InstantiationException | IllegalAccessException e) {
-                throw new BinaryObjectException("Failed to instantiate map: " + cls.getName(), e);
-            }
-        }
-        else {
-            switch (mapType) {
-                case HASH_MAP:
-                    map = U.newHashMap(size);
-
-                    break;
-
-                case LINKED_HASH_MAP:
-                    map = U.newLinkedHashMap(size);
-
-                    break;
-
-                case TREE_MAP:
-                    map = new TreeMap<>();
-
-                    break;
-
-                case CONC_HASH_MAP:
-                    map = new ConcurrentHashMap<>(size);
-
-                    break;
-
-                case USER_COL:
-                    map = U.newHashMap(size);
-
-                    break;
-
-                case PROPERTIES_MAP:
-                    map = new Properties();
-
-                    break;
-
-                default:
-                    throw new BinaryObjectException("Invalid map type: " + mapType);
-            }
-        }
-
-        setHandler(map, hPos);
-
-        for (int i = 0; i < size; i++)
-            map.put(deep ? doReadObject() : unmarshal(), deep ? doReadObject() : unmarshal());
-
-        return map;
-    }
-
-    /**
-     * @param deep Deep flag.
-     * @return Value.
-     * @throws BinaryObjectException In case of error.
-     */
-    private Map.Entry<?, ?> doReadMapEntry(boolean deep) throws BinaryObjectException {
-        int hPos = positionForHandle();
-
-        Object val1 = deep ? doReadObject() : unmarshal();
-        Object val2 = deep ? doReadObject() : unmarshal();
-
-        GridMapEntry entry = new GridMapEntry<>(val1, val2);
-
-        setHandler(entry, hPos);
-
-        return entry;
-    }
-
-    /**
-     * @return Value.
-     */
-    private BinaryObject doReadPortableObject() {
-        if (in.offheapPointer() > 0) {
-            int len = in.readInt();
-
-            int pos = in.position();
-
-            streamPosition(in.position() + len);
-
-            int start = in.readInt();
-
-            return new BinaryObjectOffheapImpl(ctx, in.offheapPointer() + pos, start, len);
-        }
-        else {
-            byte[] arr = doReadByteArray();
-            int start = in.readInt();
-
-            return new BinaryObjectImpl(ctx, arr, start);
-        }
-    }
-
-    /**
-     * Having target class in place we simply read ordinal and create final representation.
-     *
-     * @param cls Enum class.
-     * @return Value.
-     */
-    private Enum<?> doReadEnum(Class<?> cls) throws BinaryObjectException {
-        assert cls != null;
-
-        if (!cls.isEnum())
-            throw new BinaryObjectException("Class does not represent enum type: " + cls.getName());
-
-        int ord = in.readInt();
-
-        return BinaryEnumCache.get(cls, ord);
-    }
-
-    /**
-     * @param cls Enum class.
-     * @return Value.
-     */
-    private Object[] doReadEnumArray(Class<?> cls) throws BinaryObjectException {
-        int len = in.readInt();
-
-        Object[] arr = (Object[])Array.newInstance(cls, len);
-
-        for (int i = 0; i < len; i++) {
-            byte flag = in.readByte();
-
-            if (flag == NULL)
-                arr[i] = null;
-            else
-                arr[i] = doReadEnum(doReadClass());
-        }
-
-        return arr;
-    }
-
-    /**
-     * @return Value.
-     */
-    private Class doReadClass() throws BinaryObjectException {
-        return doReadClass(in.readInt());
-    }
-
-    /**
-     * @param typeId Type id.
-     * @return Value.
-     */
-    private Class doReadClass(int typeId) throws BinaryObjectException {
-        Class cls;
-
-        if (typeId == OBJECT_TYPE_ID)
-            return Object.class;
-
-        if (typeId != UNREGISTERED_TYPE_ID)
-            cls = ctx.descriptorForTypeId(true, typeId, ldr).describedClass();
-        else {
-            byte flag = in.readByte();
-
-            if (flag != STRING)
-                throw new BinaryObjectException("No class definition for typeId: " + typeId);
-
-            String clsName = doReadString();
-
-            try {
-                cls = U.forName(clsName, ldr);
-            }
-            catch (ClassNotFoundException e) {
-                throw new BinaryInvalidTypeException("Failed to load the class: " + clsName, e);
-            }
-
-            // forces registering of class by type id, at least locally
-            ctx.descriptorForClass(cls);
-        }
-
-        return cls;
-    }
-
-    /**
-     * Get position to be used for handle. We assume here that the hdr byte was read, hence subtract -1.  
-     *
-     * @return Position for handle.
-     */
-    int positionForHandle() {
-        return in.position() - 1;
-    }
-    
-    /**
      * @param name Field name.
      * @return Field offset.
      */

http://git-wip-us.apache.org/repos/asf/ignite/blob/c6b2fa56/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryReaderHandlesHolder.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryReaderHandlesHolder.java b/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryReaderHandlesHolder.java
new file mode 100644
index 0000000..09a896c
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryReaderHandlesHolder.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.portable;
+
+/**
+ * Holder for handles.
+ */
+public interface BinaryReaderHandlesHolder {
+    /**
+     * Set handle.
+     *
+     * @param obj Object.
+     * @param pos Position.
+     */
+    public void setHandle(Object obj, int pos);
+
+    /**
+     * Get handle.
+     *
+     * @param pos Position.
+     * @return Handle.
+     */
+    public Object getHandle(int pos);
+
+    /**
+     * Get all handles.
+     *
+     * @return Handles.
+     */
+    public BinaryReaderHandles handles();
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/c6b2fa56/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryReaderHandlesHolderImpl.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryReaderHandlesHolderImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryReaderHandlesHolderImpl.java
new file mode 100644
index 0000000..1d40136
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryReaderHandlesHolderImpl.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.portable;
+
+/**
+ * Simple holder for handles.
+ */
+public class BinaryReaderHandlesHolderImpl implements BinaryReaderHandlesHolder  {
+    /** Handles. */
+    private BinaryReaderHandles hnds;
+
+    /** {@inheritDoc} */
+    @Override public void setHandle(Object obj, int pos) {
+        handles().put(pos, obj);
+    }
+
+    /** {@inheritDoc} */
+    @Override public Object getHandle(int pos) {
+        return hnds != null ? hnds.get(pos) : null;
+    }
+
+    /** {@inheritDoc} */
+    @Override public BinaryReaderHandles handles() {
+        if (hnds == null)
+            hnds = new BinaryReaderHandles();
+
+        return hnds;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/c6b2fa56/modules/core/src/main/java/org/apache/ignite/internal/portable/GridPortableMarshaller.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/portable/GridPortableMarshaller.java b/modules/core/src/main/java/org/apache/ignite/internal/portable/GridPortableMarshaller.java
index 9c61ef2..af1ed68 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/portable/GridPortableMarshaller.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/portable/GridPortableMarshaller.java
@@ -255,10 +255,7 @@ public class GridPortableMarshaller {
     @Nullable public <T> T unmarshal(byte[] bytes, @Nullable ClassLoader clsLdr) throws BinaryObjectException {
         assert bytes != null;
 
-        BinaryReaderExImpl reader =
-            new BinaryReaderExImpl(ctx, PortableHeapInputStream.create(bytes, 0), clsLdr, new BinaryReaderHandles());
-
-        return (T)reader.unmarshal();
+        return (T)PortableUtils.unmarshal(PortableHeapInputStream.create(bytes, 0), ctx, clsLdr);
     }
 
     /**
@@ -268,7 +265,7 @@ public class GridPortableMarshaller {
      */
     @SuppressWarnings("unchecked")
     @Nullable public <T> T unmarshal(PortableInputStream in) throws BinaryObjectException {
-        return (T)reader(in).unmarshal();
+        return (T)PortableUtils.unmarshal(in, ctx, null);
     }
 
     /**
@@ -285,10 +282,7 @@ public class GridPortableMarshaller {
         if (arr[0] == NULL)
             return null;
 
-        BinaryReaderExImpl reader =
-            new BinaryReaderExImpl(ctx, PortableHeapInputStream.create(arr, 0), ldr, new BinaryReaderHandles());
-
-        return (T)reader.deserialize();
+        return (T)new BinaryReaderExImpl(ctx, PortableHeapInputStream.create(arr, 0), ldr).deserialize();
     }
 
     /**
@@ -302,17 +296,6 @@ public class GridPortableMarshaller {
     }
 
     /**
-     * Gets reader for the given input stream.
-     *
-     * @param in Input stream.
-     * @return Reader.
-     */
-    public BinaryReaderExImpl reader(PortableInputStream in) {
-        // TODO: IGNITE-1272 - Is class loader needed here?
-        return new BinaryReaderExImpl(ctx, in, null, new BinaryReaderHandles());
-    }
-
-    /**
      * @return Context.
      */
     public PortableContext context() {

http://git-wip-us.apache.org/repos/asf/ignite/blob/c6b2fa56/modules/core/src/main/java/org/apache/ignite/internal/portable/PortableClassDescriptor.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/portable/PortableClassDescriptor.java b/modules/core/src/main/java/org/apache/ignite/internal/portable/PortableClassDescriptor.java
index 974f891..78d4bc5 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/portable/PortableClassDescriptor.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/portable/PortableClassDescriptor.java
@@ -646,7 +646,7 @@ public class PortableClassDescriptor {
             case PORTABLE:
                 res = newInstance();
 
-                reader.setHandler(res);
+                reader.setHandle(res);
 
                 if (serializer != null)
                     serializer.readBinary(res, reader);
@@ -658,7 +658,7 @@ public class PortableClassDescriptor {
             case EXTERNALIZABLE:
                 res = newInstance();
 
-                reader.setHandler(res);
+                reader.setHandle(res);
 
                 try {
                     ((Externalizable)res).readExternal(reader);
@@ -673,7 +673,7 @@ public class PortableClassDescriptor {
             case OBJECT:
                 res = newInstance();
 
-                reader.setHandler(res);
+                reader.setHandle(res);
 
                 for (BinaryFieldAccessor info : fields)
                     info.read(res, reader);
@@ -690,7 +690,7 @@ public class PortableClassDescriptor {
             try {
                 res = readResolveMtd.invoke(res);
 
-                reader.setHandler(res);
+                reader.setHandle(res);
             }
             catch (IllegalAccessException e) {
                 throw new RuntimeException(e);


[12/25] ignite git commit: Renaming.

Posted by ag...@apache.org.
Renaming.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/aee7b759
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/aee7b759
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/aee7b759

Branch: refs/heads/ignite-1282
Commit: aee7b759ff363fa9150097fd212b9f40724a8d15
Parents: 8050346
Author: Tikhonov Nikolay <ti...@gmail.com>
Authored: Mon Nov 23 18:01:53 2015 +0300
Committer: Tikhonov Nikolay <ti...@gmail.com>
Committed: Mon Nov 23 18:01:53 2015 +0300

----------------------------------------------------------------------
 .../internal/GridEventConsumeHandler.java       |  6 +--
 .../internal/GridMessageListenHandler.java      |  6 +--
 .../continuous/CacheContinuousQueryHandler.java |  7 ++-
 .../continuous/GridContinuousHandler.java       |  6 +--
 .../continuous/GridContinuousProcessor.java     | 50 ++++++++------------
 ...ContinuousQueryFailoverAbstractSelfTest.java |  2 +-
 .../continuous/GridEventConsumeSelfTest.java    |  2 +-
 7 files changed, 35 insertions(+), 44 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/aee7b759/modules/core/src/main/java/org/apache/ignite/internal/GridEventConsumeHandler.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/GridEventConsumeHandler.java b/modules/core/src/main/java/org/apache/ignite/internal/GridEventConsumeHandler.java
index 3918976..1869d2e 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/GridEventConsumeHandler.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/GridEventConsumeHandler.java
@@ -110,17 +110,17 @@ class GridEventConsumeHandler implements GridContinuousHandler {
     }
 
     /** {@inheritDoc} */
-    @Override public boolean isForEvents() {
+    @Override public boolean isEvents() {
         return true;
     }
 
     /** {@inheritDoc} */
-    @Override public boolean isForMessaging() {
+    @Override public boolean isMessaging() {
         return false;
     }
 
     /** {@inheritDoc} */
-    @Override public boolean isForQuery() {
+    @Override public boolean isQuery() {
         return false;
     }
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/aee7b759/modules/core/src/main/java/org/apache/ignite/internal/GridMessageListenHandler.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/GridMessageListenHandler.java b/modules/core/src/main/java/org/apache/ignite/internal/GridMessageListenHandler.java
index aa837b8..6b51107 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/GridMessageListenHandler.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/GridMessageListenHandler.java
@@ -83,17 +83,17 @@ public class GridMessageListenHandler implements GridContinuousHandler {
     }
 
     /** {@inheritDoc} */
-    @Override public boolean isForEvents() {
+    @Override public boolean isEvents() {
         return false;
     }
 
     /** {@inheritDoc} */
-    @Override public boolean isForMessaging() {
+    @Override public boolean isMessaging() {
         return true;
     }
 
     /** {@inheritDoc} */
-    @Override public boolean isForQuery() {
+    @Override public boolean isQuery() {
         return false;
     }
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/aee7b759/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/continuous/CacheContinuousQueryHandler.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/continuous/CacheContinuousQueryHandler.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/continuous/CacheContinuousQueryHandler.java
index b69d4cd..030ab4a 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/continuous/CacheContinuousQueryHandler.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/query/continuous/CacheContinuousQueryHandler.java
@@ -201,17 +201,17 @@ class CacheContinuousQueryHandler<K, V> implements GridContinuousHandler {
     }
 
     /** {@inheritDoc} */
-    @Override public boolean isForEvents() {
+    @Override public boolean isEvents() {
         return false;
     }
 
     /** {@inheritDoc} */
-    @Override public boolean isForMessaging() {
+    @Override public boolean isMessaging() {
         return false;
     }
 
     /** {@inheritDoc} */
-    @Override public boolean isForQuery() {
+    @Override public boolean isQuery() {
         return true;
     }
 
@@ -848,7 +848,6 @@ class CacheContinuousQueryHandler<K, V> implements GridContinuousHandler {
 
         /**
          * @param e Entry.
-         * @param topVer Topology version.
          * @return Continuous query entry.
          */
         private CacheContinuousQueryEntry skipEntry(CacheContinuousQueryEntry e) {

http://git-wip-us.apache.org/repos/asf/ignite/blob/aee7b759/modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousHandler.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousHandler.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousHandler.java
index d8698b3..3d6e266 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousHandler.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousHandler.java
@@ -130,17 +130,17 @@ public interface GridContinuousHandler extends Externalizable, Cloneable {
     /**
      * @return {@code True} if for events.
      */
-    public boolean isForEvents();
+    public boolean isEvents();
 
     /**
      * @return {@code True} if for messaging.
      */
-    public boolean isForMessaging();
+    public boolean isMessaging();
 
     /**
      * @return {@code True} if for continuous queries.
      */
-    public boolean isForQuery();
+    public boolean isQuery();
 
     /**
      * @return Cache name if this is a continuous query handler.

http://git-wip-us.apache.org/repos/asf/ignite/blob/aee7b759/modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousProcessor.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousProcessor.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousProcessor.java
index e218790..00f5e64 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousProcessor.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/continuous/GridContinuousProcessor.java
@@ -210,29 +210,25 @@ public class GridContinuousProcessor extends GridProcessorAdapter {
                         if (msg.errs().isEmpty()) {
                             LocalRoutineInfo routine = locInfos.get(msg.routineId());
 
-                            if (routine != null && routine.handler().isForQuery()) {
-                                try {
-                                    Map<Integer, Long> cntrs = msg.updateCounters();
+                            // Update partition counters.
+                            if (routine != null && routine.handler().isQuery()) {
+                                Map<Integer, Long> cntrs = msg.updateCounters();
 
-                                    GridCacheAdapter<Object, Object> interCache =
-                                        ctx.cache().internalCache(routine.handler().cacheName());
+                                GridCacheAdapter<Object, Object> interCache =
+                                    ctx.cache().internalCache(routine.handler().cacheName());
 
-                                    if (interCache != null && cntrs != null && interCache.context() != null
-                                        && !interCache.isLocal() && !CU.clientNode(ctx.grid().localNode())) {
-                                        Map<Integer, Long> map = interCache.context().topology().updateCounters();
+                                if (interCache != null && cntrs != null && interCache.context() != null
+                                    && !interCache.isLocal() && !CU.clientNode(ctx.grid().localNode())) {
+                                    Map<Integer, Long> map = interCache.context().topology().updateCounters();
 
-                                        for (Map.Entry<Integer, Long> e : map.entrySet()) {
-                                            Long cntr0 = cntrs.get(e.getKey());
-                                            Long cntr1 = e.getValue();
+                                    for (Map.Entry<Integer, Long> e : map.entrySet()) {
+                                        Long cntr0 = cntrs.get(e.getKey());
+                                        Long cntr1 = e.getValue();
 
-                                            if (cntr0 == null || cntr1 > cntr0)
-                                                cntrs.put(e.getKey(), cntr1);
-                                        }
+                                        if (cntr0 == null || cntr1 > cntr0)
+                                            cntrs.put(e.getKey(), cntr1);
                                     }
                                 }
-                                catch (Exception e) {
-                                    U.warn(log, "Failed to load update counters.", e);
-                                }
 
                                 routine.handler().updateCounters(msg.updateCounters());
                             }
@@ -496,7 +492,7 @@ public class GridContinuousProcessor extends GridProcessorAdapter {
 
             GridContinuousHandler hnd = rmtInfo.hnd;
 
-            if (hnd.isForQuery() && F.eq(ctx.name(), hnd.cacheName()) && rmtInfo.clearDelayedRegister()) {
+            if (hnd.isQuery() && F.eq(ctx.name(), hnd.cacheName()) && rmtInfo.clearDelayedRegister()) {
                 GridContinuousHandler.RegisterStatus status = hnd.register(rmtInfo.nodeId, routineId, this.ctx);
 
                 assert status != GridContinuousHandler.RegisterStatus.DELAYED;
@@ -518,7 +514,7 @@ public class GridContinuousProcessor extends GridProcessorAdapter {
 
             GridContinuousHandler hnd = entry.getValue().hnd;
 
-            if (hnd.isForQuery() && F.eq(ctx.name(), hnd.cacheName()))
+            if (hnd.isQuery() && F.eq(ctx.name(), hnd.cacheName()))
                 it.remove();
         }
     }
@@ -888,16 +884,12 @@ public class GridContinuousProcessor extends GridProcessorAdapter {
             }
         }
 
-        try {
-            if (hnd.isForQuery() && ctx.cache() != null && ctx.cache().internalCache(hnd.cacheName()) != null) {
-                Map<Integer, Long> cntrs = ctx.cache().internalCache(hnd.cacheName())
-                    .context().topology().updateCounters();
+        // Load partition counters.
+        if (hnd.isQuery() && ctx.cache() != null && ctx.cache().internalCache(hnd.cacheName()) != null) {
+            Map<Integer, Long> cntrs = ctx.cache().internalCache(hnd.cacheName())
+                .context().topology().updateCounters();
 
-                req.addUpdateCounters(cntrs);
-            }
-        }
-        catch (Exception e) {
-            U.warn(log, "Failed to load partition counters.", e);
+            req.addUpdateCounters(cntrs);
         }
 
         if (err != null)
@@ -1319,7 +1311,7 @@ public class GridContinuousProcessor extends GridProcessorAdapter {
          * Marks info to be registered when cache is started.
          */
         public void markDelayedRegister() {
-            assert hnd.isForQuery();
+            assert hnd.isQuery();
 
             delayedRegister = true;
         }

http://git-wip-us.apache.org/repos/asf/ignite/blob/aee7b759/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/query/continuous/CacheContinuousQueryFailoverAbstractSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/query/continuous/CacheContinuousQueryFailoverAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/query/continuous/CacheContinuousQueryFailoverAbstractSelfTest.java
index b311272..08e8adb 100644
--- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/query/continuous/CacheContinuousQueryFailoverAbstractSelfTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/query/continuous/CacheContinuousQueryFailoverAbstractSelfTest.java
@@ -1327,7 +1327,7 @@ public abstract class CacheContinuousQueryFailoverAbstractSelfTest extends GridC
         for (Object info : infos.values()) {
             GridContinuousHandler hnd = GridTestUtils.getFieldValue(info, "hnd");
 
-            if (hnd.isForQuery() && hnd.cacheName() == null) {
+            if (hnd.isQuery() && hnd.cacheName() == null) {
                 backupQueue = GridTestUtils.getFieldValue(hnd, "backupQueue");
 
                 break;

http://git-wip-us.apache.org/repos/asf/ignite/blob/aee7b759/modules/core/src/test/java/org/apache/ignite/internal/processors/continuous/GridEventConsumeSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/continuous/GridEventConsumeSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/continuous/GridEventConsumeSelfTest.java
index 8f28aef..d239ea8 100644
--- a/modules/core/src/test/java/org/apache/ignite/internal/processors/continuous/GridEventConsumeSelfTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/continuous/GridEventConsumeSelfTest.java
@@ -163,7 +163,7 @@ public class GridEventConsumeSelfTest extends GridCommonAbstractTest {
         return F.view(U.<Map<UUID, LocalRoutineInfo>>field(proc, "locInfos").values(),
             new IgnitePredicate<LocalRoutineInfo>() {
                 @Override public boolean apply(LocalRoutineInfo info) {
-                    return info.handler().isForEvents();
+                    return info.handler().isEvents();
                 }
             });
     }


[09/25] ignite git commit: IGNITE-1972: Optimized "unmarshal" path (i.e. when we do not need to deserialize the object) by a factor of x3.

Posted by ag...@apache.org.
http://git-wip-us.apache.org/repos/asf/ignite/blob/c6b2fa56/modules/core/src/main/java/org/apache/ignite/internal/portable/PortableUtils.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/portable/PortableUtils.java b/modules/core/src/main/java/org/apache/ignite/internal/portable/PortableUtils.java
index 6d155fe..5c798b8 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/portable/PortableUtils.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/portable/PortableUtils.java
@@ -17,19 +17,29 @@
 
 package org.apache.ignite.internal.portable;
 
+import org.apache.ignite.IgniteCheckedException;
+import org.apache.ignite.binary.BinaryInvalidTypeException;
 import org.apache.ignite.binary.BinaryObject;
 import org.apache.ignite.binary.BinaryObjectException;
 import org.apache.ignite.binary.Binarylizable;
 import org.apache.ignite.internal.portable.builder.PortableLazyValue;
+import org.apache.ignite.internal.portable.streams.PortableInputStream;
+import org.apache.ignite.internal.util.lang.GridMapEntry;
 import org.apache.ignite.internal.util.typedef.F;
 import org.apache.ignite.internal.util.typedef.internal.U;
 import org.apache.ignite.lang.IgniteBiTuple;
 import org.jetbrains.annotations.Nullable;
 import org.jsr166.ConcurrentHashMap8;
 
+import java.io.ByteArrayInputStream;
 import java.io.Externalizable;
+import java.lang.reflect.Array;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
 import java.math.BigDecimal;
+import java.math.BigInteger;
 import java.sql.Timestamp;
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Date;
 import java.util.HashMap;
@@ -37,7 +47,9 @@ import java.util.HashSet;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
+import java.util.LinkedList;
 import java.util.Map;
+import java.util.Properties;
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.TreeSet;
@@ -45,6 +57,8 @@ import java.util.UUID;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentSkipListSet;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.ARR_LIST;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.BOOLEAN;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.BOOLEAN_ARR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.BYTE;
@@ -53,6 +67,8 @@ import static org.apache.ignite.internal.portable.GridPortableMarshaller.CHAR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.CHAR_ARR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.CLASS;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.COL;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.CONC_HASH_MAP;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.CONC_SKIP_LIST_SET;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.DATE;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.DATE_ARR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.DECIMAL;
@@ -63,16 +79,25 @@ import static org.apache.ignite.internal.portable.GridPortableMarshaller.ENUM;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.ENUM_ARR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.FLOAT;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.FLOAT_ARR;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.HANDLE;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.HASH_MAP;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.HASH_SET;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.INT;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.INT_ARR;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.LINKED_HASH_MAP;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.LINKED_HASH_SET;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.LINKED_LIST;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.LONG;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.LONG_ARR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.MAP;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.MAP_ENTRY;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.NULL;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.OBJ;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.OBJECT_TYPE_ID;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.OBJ_ARR;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.OPTM_MARSH;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.PORTABLE_OBJ;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.PROPERTIES_MAP;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.PROTO_VER;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.SHORT;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.SHORT_ARR;
@@ -80,6 +105,11 @@ import static org.apache.ignite.internal.portable.GridPortableMarshaller.STRING;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.STRING_ARR;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.TIMESTAMP;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.TIMESTAMP_ARR;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.TREE_MAP;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.TREE_SET;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.UNREGISTERED_TYPE_ID;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.USER_COL;
+import static org.apache.ignite.internal.portable.GridPortableMarshaller.USER_SET;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.UUID;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.UUID_ARR;
 
@@ -802,9 +832,9 @@ public class PortableUtils {
     public static int fieldOffsetRelative(PortablePositionReadable stream, int pos, int fieldOffsetSize) {
         int res;
 
-        if (fieldOffsetSize == PortableUtils.OFFSET_1)
+        if (fieldOffsetSize == OFFSET_1)
             res = (int)stream.readBytePositioned(pos) & 0xFF;
-        else if (fieldOffsetSize == PortableUtils.OFFSET_2)
+        else if (fieldOffsetSize == OFFSET_2)
             res = (int)stream.readShortPositioned(pos) & 0xFFFF;
         else
             res = stream.readIntPositioned(pos);
@@ -860,8 +890,8 @@ public class PortableUtils {
                     throw new BinaryObjectException(
                         "Binary type has different field types [" + "typeName=" + oldMeta.typeName() +
                             ", fieldName=" + newField.getKey() +
-                            ", fieldTypeName1=" + PortableUtils.fieldTypeName(oldFieldType) +
-                            ", fieldTypeName2=" + PortableUtils.fieldTypeName(newField.getValue()) + ']'
+                            ", fieldTypeName1=" + fieldTypeName(oldFieldType) +
+                            ", fieldTypeName2=" + fieldTypeName(newField.getValue()) + ']'
                     );
                 }
             }
@@ -983,4 +1013,827 @@ public class PortableUtils {
         else
             return BinaryWriteMode.OBJECT;
     }
+
+    /**
+     * @return Value.
+     */
+    public static byte[] doReadByteArray(PortableInputStream in) {
+        int len = in.readInt();
+
+        return in.readByteArray(len);
+    }
+
+    /**
+     * @return Value.
+     */
+    public static boolean[] doReadBooleanArray(PortableInputStream in) {
+        int len = in.readInt();
+
+        return in.readBooleanArray(len);
+    }
+
+    /**
+     * @return Value.
+     */
+    public static short[] doReadShortArray(PortableInputStream in) {
+        int len = in.readInt();
+
+        return in.readShortArray(len);
+    }
+
+    /**
+     * @return Value.
+     */
+    public static char[] doReadCharArray(PortableInputStream in) {
+        int len = in.readInt();
+
+        return in.readCharArray(len);
+    }
+
+    /**
+     * @return Value.
+     */
+    public static int[] doReadIntArray(PortableInputStream in) {
+        int len = in.readInt();
+
+        return in.readIntArray(len);
+    }
+
+    /**
+     * @return Value.
+     */
+    public static long[] doReadLongArray(PortableInputStream in) {
+        int len = in.readInt();
+
+        return in.readLongArray(len);
+    }
+
+    /**
+     * @return Value.
+     */
+    public static float[] doReadFloatArray(PortableInputStream in) {
+        int len = in.readInt();
+
+        return in.readFloatArray(len);
+    }
+
+    /**
+     * @return Value.
+     */
+    public static double[] doReadDoubleArray(PortableInputStream in) {
+        int len = in.readInt();
+
+        return in.readDoubleArray(len);
+    }
+
+    /**
+     * @return Value.
+     */
+    public static BigDecimal doReadDecimal(PortableInputStream in) {
+        int scale = in.readInt();
+        byte[] mag = doReadByteArray(in);
+
+        BigInteger intVal = new BigInteger(mag);
+
+        if (scale < 0) {
+            scale &= 0x7FFFFFFF;
+
+            intVal = intVal.negate();
+        }
+
+        return new BigDecimal(intVal, scale);
+    }
+
+    /**
+     * @return Value.
+     */
+    public static String doReadString(PortableInputStream in) {
+        if (!in.hasArray())
+            return new String(doReadByteArray(in), UTF_8);
+
+        int strLen = in.readInt();
+
+        int pos = in.position();
+
+        // String will copy necessary array part for us.
+        String res = new String(in.array(), pos, strLen, UTF_8);
+
+        in.position(pos + strLen);
+
+        return res;
+    }
+
+    /**
+     * @return Value.
+     */
+    public static UUID doReadUuid(PortableInputStream in) {
+        return new UUID(in.readLong(), in.readLong());
+    }
+
+    /**
+     * @return Value.
+     */
+    public static Date doReadDate(PortableInputStream in) {
+        long time = in.readLong();
+
+        return new Date(time);
+    }
+
+    /**
+     * @return Value.
+     */
+    public static Timestamp doReadTimestamp(PortableInputStream in) {
+        long time = in.readLong();
+        int nanos = in.readInt();
+
+        Timestamp ts = new Timestamp(time);
+
+        ts.setNanos(ts.getNanos() + nanos);
+
+        return ts;
+    }
+
+    /**
+     * @return Value.
+     * @throws BinaryObjectException In case of error.
+     */
+    public static BigDecimal[] doReadDecimalArray(PortableInputStream in) throws BinaryObjectException {
+        int len = in.readInt();
+
+        BigDecimal[] arr = new BigDecimal[len];
+
+        for (int i = 0; i < len; i++) {
+            byte flag = in.readByte();
+
+            if (flag == NULL)
+                arr[i] = null;
+            else {
+                if (flag != DECIMAL)
+                    throw new BinaryObjectException("Invalid flag value: " + flag);
+
+                arr[i] = doReadDecimal(in);
+            }
+        }
+
+        return arr;
+    }
+
+    /**
+     * @return Value.
+     * @throws BinaryObjectException In case of error.
+     */
+    public static String[] doReadStringArray(PortableInputStream in) throws BinaryObjectException {
+        int len = in.readInt();
+
+        String[] arr = new String[len];
+
+        for (int i = 0; i < len; i++) {
+            byte flag = in.readByte();
+
+            if (flag == NULL)
+                arr[i] = null;
+            else {
+                if (flag != STRING)
+                    throw new BinaryObjectException("Invalid flag value: " + flag);
+
+                arr[i] = doReadString(in);
+            }
+        }
+
+        return arr;
+    }
+
+    /**
+     * @return Value.
+     * @throws BinaryObjectException In case of error.
+     */
+    public static UUID[] doReadUuidArray(PortableInputStream in) throws BinaryObjectException {
+        int len = in.readInt();
+
+        UUID[] arr = new UUID[len];
+
+        for (int i = 0; i < len; i++) {
+            byte flag = in.readByte();
+
+            if (flag == NULL)
+                arr[i] = null;
+            else {
+                if (flag != UUID)
+                    throw new BinaryObjectException("Invalid flag value: " + flag);
+
+                arr[i] = doReadUuid(in);
+            }
+        }
+
+        return arr;
+    }
+
+    /**
+     * @return Value.
+     * @throws BinaryObjectException In case of error.
+     */
+    public static Date[] doReadDateArray(PortableInputStream in) throws BinaryObjectException {
+        int len = in.readInt();
+
+        Date[] arr = new Date[len];
+
+        for (int i = 0; i < len; i++) {
+            byte flag = in.readByte();
+
+            if (flag == NULL)
+                arr[i] = null;
+            else {
+                if (flag != DATE)
+                    throw new BinaryObjectException("Invalid flag value: " + flag);
+
+                arr[i] = doReadDate(in);
+            }
+        }
+
+        return arr;
+    }
+
+    /**
+     * @return Value.
+     * @throws BinaryObjectException In case of error.
+     */
+    public static Timestamp[] doReadTimestampArray(PortableInputStream in) throws BinaryObjectException {
+        int len = in.readInt();
+
+        Timestamp[] arr = new Timestamp[len];
+
+        for (int i = 0; i < len; i++) {
+            byte flag = in.readByte();
+
+            if (flag == NULL)
+                arr[i] = null;
+            else {
+                if (flag != TIMESTAMP)
+                    throw new BinaryObjectException("Invalid flag value: " + flag);
+
+                arr[i] = doReadTimestamp(in);
+            }
+        }
+
+        return arr;
+    }
+
+    /**
+     * @return Value.
+     */
+    public static BinaryObject doReadPortableObject(PortableInputStream in, PortableContext ctx) {
+        if (in.offheapPointer() > 0) {
+            int len = in.readInt();
+
+            int pos = in.position();
+
+            in.position(in.position() + len);
+
+            int start = in.readInt();
+
+            return new BinaryObjectOffheapImpl(ctx, in.offheapPointer() + pos, start, len);
+        }
+        else {
+            byte[] arr = doReadByteArray(in);
+            int start = in.readInt();
+
+            return new BinaryObjectImpl(ctx, arr, start);
+        }
+    }
+
+    /**
+     * @return Value.
+     */
+    public static Class doReadClass(PortableInputStream in, PortableContext ctx, ClassLoader ldr)
+        throws BinaryObjectException {
+        int typeId = in.readInt();
+
+        return doReadClass(in, ctx, ldr, typeId);
+    }
+
+    /**
+     * @param typeId Type id.
+     * @return Value.
+     */
+    public static Class doReadClass(PortableInputStream in, PortableContext ctx, ClassLoader ldr, int typeId)
+        throws BinaryObjectException {
+        Class cls;
+
+        if (typeId == OBJECT_TYPE_ID)
+            return Object.class;
+
+        if (typeId != UNREGISTERED_TYPE_ID)
+            cls = ctx.descriptorForTypeId(true, typeId, ldr).describedClass();
+        else {
+            byte flag = in.readByte();
+
+            if (flag != STRING)
+                throw new BinaryObjectException("No class definition for typeId: " + typeId);
+
+            String clsName = doReadString(in);
+
+            try {
+                cls = U.forName(clsName, ldr);
+            }
+            catch (ClassNotFoundException e) {
+                throw new BinaryInvalidTypeException("Failed to load the class: " + clsName, e);
+            }
+
+            // forces registering of class by type id, at least locally
+            ctx.descriptorForClass(cls);
+        }
+
+        return cls;
+    }
+
+    /**
+     * Having target class in place we simply read ordinal and create final representation.
+     *
+     * @param cls Enum class.
+     * @return Value.
+     */
+    public static Enum<?> doReadEnum(PortableInputStream in, Class<?> cls) throws BinaryObjectException {
+        assert cls != null;
+
+        if (!cls.isEnum())
+            throw new BinaryObjectException("Class does not represent enum type: " + cls.getName());
+
+        int ord = in.readInt();
+
+        return BinaryEnumCache.get(cls, ord);
+    }
+
+    /**
+     * @param cls Enum class.
+     * @return Value.
+     */
+    public static Object[] doReadEnumArray(PortableInputStream in, PortableContext ctx, ClassLoader ldr, Class<?> cls)
+        throws BinaryObjectException {
+        int len = in.readInt();
+
+        Object[] arr = (Object[]) Array.newInstance(cls, len);
+
+        for (int i = 0; i < len; i++) {
+            byte flag = in.readByte();
+
+            if (flag == NULL)
+                arr[i] = null;
+            else
+                arr[i] = doReadEnum(in, doReadClass(in, ctx, ldr));
+        }
+
+        return arr;
+    }
+
+    /**
+     * Read object serialized using optimized marshaller.
+     *
+     * @return Result.
+     */
+    public static Object doReadOptimized(PortableInputStream in, PortableContext ctx) {
+        int len = in.readInt();
+
+        ByteArrayInputStream input = new ByteArrayInputStream(in.array(), in.position(), len);
+
+        try {
+            return ctx.optimizedMarsh().unmarshal(input, null);
+        }
+        catch (IgniteCheckedException e) {
+            throw new BinaryObjectException("Failed to unmarshal object with optimized marshaller", e);
+        }
+        finally {
+            in.position(in.position() + len);
+        }
+    }
+
+    /**
+     * @return Object.
+     * @throws BinaryObjectException In case of error.
+     */
+    @Nullable public static Object doReadObject(PortableInputStream in, PortableContext ctx, ClassLoader ldr,
+        BinaryReaderHandlesHolder handles) throws BinaryObjectException {
+        return new BinaryReaderExImpl(ctx, in, ldr, handles.handles()).deserialize();
+    }
+
+    /**
+     * @return Unmarshalled value.
+     * @throws BinaryObjectException In case of error.
+     */
+    @Nullable public static Object unmarshal(PortableInputStream in, PortableContext ctx, ClassLoader ldr)
+        throws BinaryObjectException {
+        return unmarshal(in, ctx, ldr, new BinaryReaderHandlesHolderImpl());
+    }
+
+    /**
+     * @return Unmarshalled value.
+     * @throws BinaryObjectException In case of error.
+     */
+    @Nullable public static Object unmarshal(PortableInputStream in, PortableContext ctx, ClassLoader ldr,
+        BinaryReaderHandlesHolder handles) throws BinaryObjectException {
+        return unmarshal(in, ctx, ldr, handles, false);
+    }
+
+    /**
+     * @return Unmarshalled value.
+     * @throws BinaryObjectException In case of error.
+     */
+    @Nullable public static Object unmarshal(PortableInputStream in, PortableContext ctx, ClassLoader ldr,
+        BinaryReaderHandlesHolder handles, boolean detach) throws BinaryObjectException {
+        int start = in.position();
+
+        byte flag = in.readByte();
+
+        switch (flag) {
+            case NULL:
+                return null;
+
+            case HANDLE: {
+                int handlePos = start - in.readInt();
+
+                Object obj = handles.getHandle(handlePos);
+
+                if (obj == null) {
+                    int retPos = in.position();
+
+                    in.position(handlePos);
+
+                    obj = unmarshal(in, ctx, ldr, handles);
+
+                    in.position(retPos);
+                }
+
+                return obj;
+            }
+
+            case OBJ: {
+                checkProtocolVersion(in.readByte());
+
+                int len = length(in, start);
+
+                BinaryObjectEx po;
+
+                if (detach) {
+                    // In detach mode we simply copy object's content.
+                    in.position(start);
+
+                    po = new BinaryObjectImpl(ctx, in.readByteArray(len), 0);
+                }
+                else {
+                    if (in.offheapPointer() == 0)
+                        po = new BinaryObjectImpl(ctx, in.array(), start);
+                    else
+                        po = new BinaryObjectOffheapImpl(ctx, in.offheapPointer(), start,
+                            in.remaining() + in.position());
+
+                    in.position(start + po.length());
+                }
+
+                handles.setHandle(po, start);
+
+                return po;
+            }
+
+            case BYTE:
+                return in.readByte();
+
+            case SHORT:
+                return in.readShort();
+
+            case INT:
+                return in.readInt();
+
+            case LONG:
+                return in.readLong();
+
+            case FLOAT:
+                return in.readFloat();
+
+            case DOUBLE:
+                return in.readDouble();
+
+            case CHAR:
+                return in.readChar();
+
+            case BOOLEAN:
+                return in.readBoolean();
+
+            case DECIMAL:
+                return doReadDecimal(in);
+
+            case STRING:
+                return doReadString(in);
+
+            case UUID:
+                return doReadUuid(in);
+
+            case DATE:
+                return doReadDate(in);
+
+            case TIMESTAMP:
+                return doReadTimestamp(in);
+
+            case BYTE_ARR:
+                return doReadByteArray(in);
+
+            case SHORT_ARR:
+                return doReadShortArray(in);
+
+            case INT_ARR:
+                return doReadIntArray(in);
+
+            case LONG_ARR:
+                return doReadLongArray(in);
+
+            case FLOAT_ARR:
+                return doReadFloatArray(in);
+
+            case DOUBLE_ARR:
+                return doReadDoubleArray(in);
+
+            case CHAR_ARR:
+                return doReadCharArray(in);
+
+            case BOOLEAN_ARR:
+                return doReadBooleanArray(in);
+
+            case DECIMAL_ARR:
+                return doReadDecimalArray(in);
+
+            case STRING_ARR:
+                return doReadStringArray(in);
+
+            case UUID_ARR:
+                return doReadUuidArray(in);
+
+            case DATE_ARR:
+                return doReadDateArray(in);
+
+            case TIMESTAMP_ARR:
+                return doReadTimestampArray(in);
+
+            case OBJ_ARR:
+                return doReadObjectArray(in, ctx, ldr, handles, false);
+
+            case COL:
+                return doReadCollection(in, ctx, ldr, handles, false, null);
+
+            case MAP:
+                return doReadMap(in, ctx, ldr, handles, false, null);
+
+            case MAP_ENTRY:
+                return doReadMapEntry(in, ctx, ldr, handles, false);
+
+            case PORTABLE_OBJ:
+                return doReadPortableObject(in, ctx);
+
+            case ENUM:
+                return doReadEnum(in, doReadClass(in, ctx, ldr));
+
+            case ENUM_ARR:
+                return doReadEnumArray(in, ctx, ldr, doReadClass(in, ctx, ldr));
+
+            case CLASS:
+                return doReadClass(in, ctx, ldr);
+
+            case OPTM_MARSH:
+                return doReadOptimized(in, ctx);
+
+            default:
+                throw new BinaryObjectException("Invalid flag value: " + flag);
+        }
+    }
+
+    /**
+     * @param deserialize Deep flag.
+     * @return Value.
+     * @throws BinaryObjectException In case of error.
+     */
+    public static Object[] doReadObjectArray(PortableInputStream in, PortableContext ctx, ClassLoader ldr,
+        BinaryReaderHandlesHolder handles, boolean deserialize) throws BinaryObjectException {
+        int hPos = positionForHandle(in);
+
+        Class compType = doReadClass(in, ctx, ldr);
+
+        int len = in.readInt();
+
+        Object[] arr = deserialize ? (Object[])Array.newInstance(compType, len) : new Object[len];
+
+        handles.setHandle(arr, hPos);
+
+        for (int i = 0; i < len; i++)
+            arr[i] = deserializeOrUnmarshal(in, ctx, ldr, handles, deserialize);
+
+        return arr;
+    }
+
+    /**
+     * @param deserialize Deep flag.
+     * @param cls Collection class.
+     * @return Value.
+     * @throws BinaryObjectException In case of error.
+     */
+    @SuppressWarnings("unchecked")
+    public static Collection<?> doReadCollection(PortableInputStream in, PortableContext ctx, ClassLoader ldr,
+        BinaryReaderHandlesHolder handles, boolean deserialize, @Nullable Class<? extends Collection> cls)
+        throws BinaryObjectException {
+        int hPos = positionForHandle(in);
+
+        int size = in.readInt();
+
+        assert size >= 0;
+
+        byte colType = in.readByte();
+
+        Collection<Object> col;
+
+        if (cls != null) {
+            try {
+                Constructor<? extends Collection> cons = cls.getConstructor();
+
+                col = cons.newInstance();
+            }
+            catch (NoSuchMethodException ignored) {
+                throw new BinaryObjectException("Collection class doesn't have public default constructor: " +
+                    cls.getName());
+            }
+            catch (InvocationTargetException | InstantiationException | IllegalAccessException e) {
+                throw new BinaryObjectException("Failed to instantiate collection: " + cls.getName(), e);
+            }
+        }
+        else {
+            switch (colType) {
+                case ARR_LIST:
+                    col = new ArrayList<>(size);
+
+                    break;
+
+                case LINKED_LIST:
+                    col = new LinkedList<>();
+
+                    break;
+
+                case HASH_SET:
+                    col = U.newHashSet(size);
+
+                    break;
+
+                case LINKED_HASH_SET:
+                    col = U.newLinkedHashSet(size);
+
+                    break;
+
+                case TREE_SET:
+                    col = new TreeSet<>();
+
+                    break;
+
+                case CONC_SKIP_LIST_SET:
+                    col = new ConcurrentSkipListSet<>();
+
+                    break;
+
+                case USER_SET:
+                    col = U.newHashSet(size);
+
+                    break;
+
+                case USER_COL:
+                    col = new ArrayList<>(size);
+
+                    break;
+
+                default:
+                    throw new BinaryObjectException("Invalid collection type: " + colType);
+            }
+        }
+
+        handles.setHandle(col, hPos);
+
+        for (int i = 0; i < size; i++)
+            col.add(deserializeOrUnmarshal(in, ctx, ldr, handles, deserialize));
+
+        return col;
+    }
+
+    /**
+     * @param deserialize Deep flag.
+     * @param cls Map class.
+     * @return Value.
+     * @throws BinaryObjectException In case of error.
+     */
+    @SuppressWarnings("unchecked")
+    public static Map<?, ?> doReadMap(PortableInputStream in, PortableContext ctx, ClassLoader ldr,
+        BinaryReaderHandlesHolder handles, boolean deserialize, @Nullable Class<? extends Map> cls)
+        throws BinaryObjectException {
+        int hPos = positionForHandle(in);
+
+        int size = in.readInt();
+
+        assert size >= 0;
+
+        byte mapType = in.readByte();
+
+        Map<Object, Object> map;
+
+        if (cls != null) {
+            try {
+                Constructor<? extends Map> cons = cls.getConstructor();
+
+                map = cons.newInstance();
+            }
+            catch (NoSuchMethodException ignored) {
+                throw new BinaryObjectException("Map class doesn't have public default constructor: " +
+                    cls.getName());
+            }
+            catch (InvocationTargetException | InstantiationException | IllegalAccessException e) {
+                throw new BinaryObjectException("Failed to instantiate map: " + cls.getName(), e);
+            }
+        }
+        else {
+            switch (mapType) {
+                case HASH_MAP:
+                    map = U.newHashMap(size);
+
+                    break;
+
+                case LINKED_HASH_MAP:
+                    map = U.newLinkedHashMap(size);
+
+                    break;
+
+                case TREE_MAP:
+                    map = new TreeMap<>();
+
+                    break;
+
+                case CONC_HASH_MAP:
+                    map = new ConcurrentHashMap<>(size);
+
+                    break;
+
+                case USER_COL:
+                    map = U.newHashMap(size);
+
+                    break;
+
+                case PROPERTIES_MAP:
+                    map = new Properties();
+
+                    break;
+
+                default:
+                    throw new BinaryObjectException("Invalid map type: " + mapType);
+            }
+        }
+
+        handles.setHandle(map, hPos);
+
+        for (int i = 0; i < size; i++) {
+            Object key = deserializeOrUnmarshal(in, ctx, ldr, handles, deserialize);
+            Object val = deserializeOrUnmarshal(in, ctx, ldr, handles, deserialize);
+
+            map.put(key, val);
+        }
+
+        return map;
+    }
+
+    /**
+     * @param deserialize Deserialize flag flag.
+     * @return Value.
+     * @throws BinaryObjectException In case of error.
+     */
+    public static Map.Entry<?, ?> doReadMapEntry(PortableInputStream in, PortableContext ctx, ClassLoader ldr,
+        BinaryReaderHandlesHolder handles, boolean deserialize) throws BinaryObjectException {
+        int hPos = positionForHandle(in);
+
+        Object val1 = deserializeOrUnmarshal(in, ctx, ldr, handles, deserialize);
+        Object val2 = deserializeOrUnmarshal(in, ctx, ldr, handles, deserialize);
+
+        GridMapEntry entry = new GridMapEntry<>(val1, val2);
+
+        handles.setHandle(entry, hPos);
+
+        return entry;
+    }
+
+    /**
+     * Deserialize or unmarshal the object.
+     *
+     * @param deserialize Deserialize.
+     * @return Result.
+     */
+    private static Object deserializeOrUnmarshal(PortableInputStream in, PortableContext ctx, ClassLoader ldr,
+        BinaryReaderHandlesHolder handles, boolean deserialize) {
+        return deserialize ? doReadObject(in, ctx, ldr, handles) : unmarshal(in, ctx, ldr, handles);
+    }
+
+    /**
+     * Get position to be used for handle. We assume here that the hdr byte was read, hence subtract -1.
+     *
+     * @return Position for handle.
+     */
+    public static int positionForHandle(PortableInputStream in) {
+        return in.position() - 1;
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/c6b2fa56/modules/core/src/main/java/org/apache/ignite/internal/portable/builder/PortableBuilderReader.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/portable/builder/PortableBuilderReader.java b/modules/core/src/main/java/org/apache/ignite/internal/portable/builder/PortableBuilderReader.java
index 538c26c..cf27da4 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/portable/builder/PortableBuilderReader.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/portable/builder/PortableBuilderReader.java
@@ -17,24 +17,23 @@
 
 package org.apache.ignite.internal.portable.builder;
 
-import java.sql.Timestamp;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.ignite.internal.portable.BinaryReaderHandles;
+import org.apache.ignite.binary.BinaryObjectException;
+import org.apache.ignite.internal.portable.BinaryObjectImpl;
+import org.apache.ignite.internal.portable.BinaryReaderExImpl;
+import org.apache.ignite.internal.portable.BinaryWriterExImpl;
 import org.apache.ignite.internal.portable.GridPortableMarshaller;
 import org.apache.ignite.internal.portable.PortableContext;
 import org.apache.ignite.internal.portable.PortablePositionReadable;
-import org.apache.ignite.internal.portable.BinaryObjectImpl;
 import org.apache.ignite.internal.portable.PortablePrimitives;
-import org.apache.ignite.internal.portable.BinaryReaderExImpl;
 import org.apache.ignite.internal.portable.PortableSchema;
 import org.apache.ignite.internal.portable.PortableUtils;
-import org.apache.ignite.internal.portable.BinaryWriterExImpl;
-import org.apache.ignite.binary.BinaryObjectException;
 import org.apache.ignite.internal.portable.streams.PortableHeapInputStream;
 
+import java.sql.Timestamp;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+
 import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.NULL;
 import static org.apache.ignite.internal.portable.GridPortableMarshaller.STRING;
@@ -69,7 +68,7 @@ public class PortableBuilderReader implements PortablePositionReadable {
         pos = objImpl.start();
 
         // TODO: IGNITE-1272 - Is class loader needed here?
-        reader = new BinaryReaderExImpl(ctx, PortableHeapInputStream.create(arr, pos), null, new BinaryReaderHandles());
+        reader = new BinaryReaderExImpl(ctx, PortableHeapInputStream.create(arr, pos), null);
 
         objMap = new HashMap<>();
     }

http://git-wip-us.apache.org/repos/asf/ignite/blob/c6b2fa56/modules/core/src/main/java/org/apache/ignite/internal/processors/platform/PlatformContextImpl.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/platform/PlatformContextImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/platform/PlatformContextImpl.java
index d999466..9a7f0df 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/platform/PlatformContextImpl.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/platform/PlatformContextImpl.java
@@ -36,6 +36,7 @@ import org.apache.ignite.events.TaskEvent;
 import org.apache.ignite.internal.GridKernalContext;
 import org.apache.ignite.internal.portable.BinaryRawReaderEx;
 import org.apache.ignite.internal.portable.BinaryRawWriterEx;
+import org.apache.ignite.internal.portable.BinaryReaderExImpl;
 import org.apache.ignite.internal.portable.BinaryTypeImpl;
 import org.apache.ignite.internal.portable.GridPortableMarshaller;
 import org.apache.ignite.internal.processors.cache.portable.CacheObjectBinaryProcessorImpl;
@@ -175,7 +176,8 @@ public class PlatformContextImpl implements PlatformContext {
 
     /** {@inheritDoc} */
     @Override public BinaryRawReaderEx reader(PlatformInputStream in) {
-        return marsh.reader(in);
+        // TODO: IGNITE-1272 - Is class loader needed here?
+        return new BinaryReaderExImpl(marsh.context(), in, null);
     }
 
     /** {@inheritDoc} */


[05/25] ignite git commit: IGNITE-1753 Refactored usages of deprectaed CacheTypeMetadata to JdbcType.

Posted by ag...@apache.org.
IGNITE-1753 Refactored usages of deprectaed CacheTypeMetadata to JdbcType.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/d71f6129
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/d71f6129
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/d71f6129

Branch: refs/heads/ignite-1282
Commit: d71f6129bc737539e61206c391fc25c776f36242
Parents: 19d2dd0
Author: AKuznetsov <ak...@gridgain.com>
Authored: Mon Nov 23 18:20:50 2015 +0700
Committer: AKuznetsov <ak...@gridgain.com>
Committed: Mon Nov 23 18:20:50 2015 +0700

----------------------------------------------------------------------
 examples/schema-import/bin/db-init.sql          |   3 +-
 .../org/apache/ignite/schema/CacheConfig.java   |   7 +-
 .../java/org/apache/ignite/schema/Demo.java     |  20 +-
 .../org/apache/ignite/cache/QueryIndex.java     |  53 +-
 .../store/jdbc/CacheAbstractJdbcStore.java      | 638 ++++++++++++-------
 .../store/jdbc/CacheJdbcBlobStoreFactory.java   |  14 +-
 .../cache/store/jdbc/CacheJdbcPojoStore.java    | 444 +++++++++----
 .../store/jdbc/CacheJdbcPojoStoreFactory.java   | 277 +++++++-
 .../ignite/cache/store/jdbc/JdbcType.java       | 255 ++++++++
 .../cache/store/jdbc/JdbcTypeDefaultHasher.java |  43 ++
 .../ignite/cache/store/jdbc/JdbcTypeField.java  | 172 +++++
 .../ignite/cache/store/jdbc/JdbcTypeHasher.java |  34 +
 .../processors/query/GridQueryProcessor.java    |   6 +-
 .../ignite/internal/visor/cache/VisorCache.java |   4 +-
 .../CacheJdbcPojoStoreAbstractSelfTest.java     | 395 ++++++++++++
 ...dbcPojoStoreOptimizedMarshallerSelfTest.java |  31 +
 ...JdbcPojoStorePortableMarshallerSelfTest.java |  85 +++
 .../store/jdbc/CacheJdbcPojoStoreTest.java      | 200 +++---
 ...eJdbcStoreAbstractMultithreadedSelfTest.java |   2 +-
 .../ignite/testsuites/IgniteCacheTestSuite.java |   6 +-
 modules/schema-import/README.txt                | 176 ++---
 .../ignite/schema/generator/CodeGenerator.java  | 198 +++---
 .../ignite/schema/generator/XmlGenerator.java   | 101 +--
 .../apache/ignite/schema/model/IndexItem.java   |  54 --
 .../ignite/schema/model/PojoDescriptor.java     |  72 +--
 .../ignite/schema/model/SchemaDescriptor.java   |   6 +-
 .../schema/parser/DatabaseMetadataParser.java   |  12 +-
 .../apache/ignite/schema/parser/DbTable.java    |  37 +-
 .../parser/dialect/DatabaseMetadataDialect.java |  32 +-
 .../parser/dialect/JdbcMetadataDialect.java     |  22 +-
 .../parser/dialect/OracleMetadataDialect.java   |  24 +-
 .../apache/ignite/schema/ui/ModalDialog.java    |   6 +-
 .../ignite/schema/ui/SchemaImportApp.java       |  13 +-
 .../schema/test/AbstractSchemaImportTest.java   |   4 +-
 .../schema/test/model/ignite-type-metadata.xml  | 610 +++++++++---------
 .../yardstick/config/ignite-store-config.xml    |  50 +-
 36 files changed, 2844 insertions(+), 1262 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/examples/schema-import/bin/db-init.sql
----------------------------------------------------------------------
diff --git a/examples/schema-import/bin/db-init.sql b/examples/schema-import/bin/db-init.sql
index f02236a..8a91a6a 100644
--- a/examples/schema-import/bin/db-init.sql
+++ b/examples/schema-import/bin/db-init.sql
@@ -17,7 +17,8 @@
 
 -- Script of database initialization for Schema Import Demo.
 drop table PERSON;
-create table PERSON(id integer not null, first_name varchar(50), last_name varchar(50), salary double not null, PRIMARY KEY(id));
+
+create table PERSON(id integer not null PRIMARY KEY, first_name varchar(50), last_name varchar(50), salary double not null);
 
 insert into PERSON(id, first_name, last_name, salary) values(1, 'Johannes', 'Kepler', 1000);
 insert into PERSON(id, first_name, last_name, salary) values(2, 'Galileo', 'Galilei', 2000);

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/examples/schema-import/src/main/java/org/apache/ignite/schema/CacheConfig.java
----------------------------------------------------------------------
diff --git a/examples/schema-import/src/main/java/org/apache/ignite/schema/CacheConfig.java b/examples/schema-import/src/main/java/org/apache/ignite/schema/CacheConfig.java
index cb316c5..c5801cc 100644
--- a/examples/schema-import/src/main/java/org/apache/ignite/schema/CacheConfig.java
+++ b/examples/schema-import/src/main/java/org/apache/ignite/schema/CacheConfig.java
@@ -17,8 +17,7 @@
 
 package org.apache.ignite.schema;
 
-import javax.cache.configuration.Factory;
-import org.apache.ignite.cache.store.CacheStore;
+import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactory;
 import org.apache.ignite.configuration.CacheConfiguration;
 
 /**
@@ -31,7 +30,7 @@ public class CacheConfig {
      * @param name Cache name.
      * @param storeFactory Cache store factory.
      */
-    public static <K, V> CacheConfiguration<K, V> cache(String name, Factory<CacheStore<K, V>> storeFactory) {
+    public static <K, V> CacheConfiguration<K, V> cache(String name, CacheJdbcPojoStoreFactory<K, V> storeFactory) {
         throw new IllegalStateException("Please run Ignite Schema Import Utility as described in README.txt");
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/examples/schema-import/src/main/java/org/apache/ignite/schema/Demo.java
----------------------------------------------------------------------
diff --git a/examples/schema-import/src/main/java/org/apache/ignite/schema/Demo.java b/examples/schema-import/src/main/java/org/apache/ignite/schema/Demo.java
index cade7f1..a981f5a 100644
--- a/examples/schema-import/src/main/java/org/apache/ignite/schema/Demo.java
+++ b/examples/schema-import/src/main/java/org/apache/ignite/schema/Demo.java
@@ -18,13 +18,13 @@
 package org.apache.ignite.schema;
 
 import javax.cache.Cache;
-import javax.cache.configuration.Factory;
 import org.apache.ignite.Ignite;
 import org.apache.ignite.IgniteCache;
 import org.apache.ignite.IgniteException;
 import org.apache.ignite.Ignition;
-import org.apache.ignite.cache.store.CacheStore;
 import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStore;
+import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactory;
+import org.apache.ignite.cache.store.jdbc.dialect.H2Dialect;
 import org.apache.ignite.configuration.CacheConfiguration;
 import org.apache.ignite.transactions.Transaction;
 import org.h2.jdbcx.JdbcConnectionPool;
@@ -38,16 +38,14 @@ import org.h2.jdbcx.JdbcConnectionPool;
  */
 public class Demo {
     /**
-     * Constructs and returns a fully configured instance of a {@link CacheJdbcPojoStore}.
+     * Constructs and returns a fully configured instance of a {@link CacheJdbcPojoStoreFactory}.
      */
-    private static class H2DemoStoreFactory<K, V> implements Factory<CacheStore<K, V>> {
-        /** {@inheritDoc} */
-        @Override public CacheStore<K, V> create() {
-            CacheJdbcPojoStore<K, V> store = new CacheJdbcPojoStore<>();
+    private static class H2DemoStoreFactory<K, V> extends CacheJdbcPojoStoreFactory<K, V> {
+        /** Default constructor. */
+        H2DemoStoreFactory() {
+            setDialect(new H2Dialect());
 
-            store.setDataSource(JdbcConnectionPool.create("jdbc:h2:tcp://localhost/~/schema-import/demo", "sa", ""));
-
-            return store;
+            setDataSource(JdbcConnectionPool.create("jdbc:h2:tcp://localhost/~/schema-import/demo", "sa", ""));
         }
     }
 
@@ -144,4 +142,4 @@ public class Demo {
 
         System.out.println(">>> Updated person: " + cache.get(key));
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/main/java/org/apache/ignite/cache/QueryIndex.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/QueryIndex.java b/modules/core/src/main/java/org/apache/ignite/cache/QueryIndex.java
index f12044d..af11999 100644
--- a/modules/core/src/main/java/org/apache/ignite/cache/QueryIndex.java
+++ b/modules/core/src/main/java/org/apache/ignite/cache/QueryIndex.java
@@ -50,20 +50,33 @@ public class QueryIndex implements Serializable {
     /**
      * Creates single-field sorted ascending index.
      *
-     * @param name Field name.
+     * @param field Field name.
      */
-    public QueryIndex(String name) {
-        this(name, QueryIndexType.SORTED, true);
+    public QueryIndex(String field) {
+        this(field, QueryIndexType.SORTED, true);
     }
 
     /**
      * Creates single-field sorted index.
      *
-     * @param name Field name.
+     * @param field Field name.
      * @param asc Ascending flag.
      */
-    public QueryIndex(String name, boolean asc) {
-        this(name, QueryIndexType.SORTED, asc);
+    public QueryIndex(String field, boolean asc) {
+        this(field, QueryIndexType.SORTED, asc);
+    }
+
+    /**
+     * Creates single-field sorted index.
+     *
+     * @param field Field name.
+     * @param asc Ascending flag.
+     * @param name Index name.
+     */
+    public QueryIndex(String field, boolean asc, String name) {
+        this(field, QueryIndexType.SORTED, asc);
+
+        this.name = name;
     }
 
     /**
@@ -71,14 +84,20 @@ public class QueryIndex implements Serializable {
      * If index is sorted, then ascending sorting is used by default.
      * To specify sort order, use the next method.
      * This constructor should also have a corresponding setter method.
+     *
+     * @param field Field name.
+     * @param type Index type.
      */
     public QueryIndex(String field, QueryIndexType type) {
         this(Arrays.asList(field), type);
     }
 
     /**
-     * Creates index for one field. The last boolean parameter
-     * is ignored for non-sorted indexes.
+     * Creates index for one field. The last boolean parameter is ignored for non-sorted indexes.
+     *
+     * @param field Field name.
+     * @param type Index type.
+     * @param asc Ascending flag.
      */
     public QueryIndex(String field, QueryIndexType type, boolean asc) {
         fields = new LinkedHashMap<>();
@@ -88,6 +107,22 @@ public class QueryIndex implements Serializable {
     }
 
     /**
+     * Creates index for one field. The last boolean parameter is ignored for non-sorted indexes.
+     *
+     * @param field Field name.
+     * @param type Index type.
+     * @param asc Ascending flag.
+     * @param name Index name.
+     */
+    public QueryIndex(String field, QueryIndexType type, boolean asc, String name) {
+        fields = new LinkedHashMap<>();
+        fields.put(field, asc);
+
+        this.type = type;
+        this.name = name;
+    }
+
+    /**
      * Creates index for a collection of fields. If index is sorted, fields will be sorted in
      * ascending order.
      *
@@ -189,4 +224,4 @@ public class QueryIndex implements Serializable {
     public void setIndexType(QueryIndexType type) {
         this.type = type;
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java
index 6e19234..6dc413b 100644
--- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java
+++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java
@@ -30,6 +30,8 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
 import java.util.Map;
 import java.util.UUID;
 import java.util.concurrent.Callable;
@@ -66,6 +68,7 @@ import org.apache.ignite.internal.util.typedef.internal.U;
 import org.apache.ignite.lang.IgniteBiInClosure;
 import org.apache.ignite.lang.IgnitePredicate;
 import org.apache.ignite.lifecycle.LifecycleAware;
+import org.apache.ignite.marshaller.portable.BinaryMarshaller;
 import org.apache.ignite.resources.CacheStoreSessionResource;
 import org.apache.ignite.resources.IgniteInstanceResource;
 import org.apache.ignite.resources.LoggerResource;
@@ -75,6 +78,10 @@ import org.jetbrains.annotations.Nullable;
 import static java.sql.Statement.EXECUTE_FAILED;
 import static java.sql.Statement.SUCCESS_NO_INFO;
 
+import static org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactory.DFLT_BATCH_SIZE;
+import static org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactory.DFLT_WRITE_ATTEMPTS;
+import static org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactory.DFLT_PARALLEL_LOAD_CACHE_MINIMUM_THRESHOLD;
+
 /**
  * Implementation of {@link CacheStore} backed by JDBC.
  * <p>
@@ -99,35 +106,43 @@ import static java.sql.Statement.SUCCESS_NO_INFO;
  * <h2 class="header">Java Example</h2>
  * <pre name="code" class="java">
  *    ...
- *    CacheConfiguration ccfg = new CacheConfiguration&lt;&gt;();
- *
- *    // Configure cache store.
- *    ccfg.setCacheStoreFactory(new FactoryBuilder.SingletonFactory(ConfigurationSnippet.store()));
+ *    // Create store factory.
+ *    CacheJdbcPojoStoreFactory storeFactory = new CacheJdbcPojoStoreFactory();
+ *    storeFactory.setDataSourceBean("your_data_source_name");
+ *    storeFactory.setDialect(new H2Dialect());
+ *    storeFactory.setTypes(array_with_your_types);
+ *    ...
+ *    ccfg.setCacheStoreFactory(storeFactory);
  *    ccfg.setReadThrough(true);
  *    ccfg.setWriteThrough(true);
  *
- *    // Configure cache types metadata.
- *    ccfg.setTypeMetadata(ConfigurationSnippet.typeMetadata());
- *
  *    cfg.setCacheConfiguration(ccfg);
  *    ...
  * </pre>
  */
 public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>, LifecycleAware {
-    /** Max attempt write count. */
-    protected static final int MAX_ATTEMPT_WRITE_COUNT = 2;
-
-    /** Default batch size for put and remove operations. */
-    protected static final int DFLT_BATCH_SIZE = 512;
-
-    /** Default batch size for put and remove operations. */
-    protected static final int DFLT_PARALLEL_LOAD_CACHE_MINIMUM_THRESHOLD = 512;
-
     /** Connection attribute property name. */
     protected static final String ATTR_CONN_PROP = "JDBC_STORE_CONNECTION";
 
-    /** Empty column value. */
-    protected static final Object[] EMPTY_COLUMN_VALUE = new Object[] { null };
+    /** Built in Java types names. */
+    protected static final Collection<String> BUILT_IN_TYPES = new HashSet<>();
+
+    static {
+        BUILT_IN_TYPES.add("java.math.BigDecimal");
+        BUILT_IN_TYPES.add("java.lang.Boolean");
+        BUILT_IN_TYPES.add("java.lang.Byte");
+        BUILT_IN_TYPES.add("java.lang.Character");
+        BUILT_IN_TYPES.add("java.lang.Double");
+        BUILT_IN_TYPES.add("java.util.Date");
+        BUILT_IN_TYPES.add("java.sql.Date");
+        BUILT_IN_TYPES.add("java.lang.Float");
+        BUILT_IN_TYPES.add("java.lang.Integer");
+        BUILT_IN_TYPES.add("java.lang.Long");
+        BUILT_IN_TYPES.add("java.lang.Short");
+        BUILT_IN_TYPES.add("java.lang.String");
+        BUILT_IN_TYPES.add("java.sql.Timestamp");
+        BUILT_IN_TYPES.add("java.util.UUID");
+    }
 
     /** Auto-injected store session. */
     @CacheStoreSessionResource
@@ -135,7 +150,7 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
 
     /** Auto injected ignite instance. */
     @IgniteInstanceResource
-    private Ignite ignite;
+    protected Ignite ignite;
 
     /** Auto-injected logger instance. */
     @LoggerResource
@@ -151,30 +166,40 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
     /** Cache with entry mapping description. (cache name, (key id, mapping description)). */
     protected volatile Map<String, Map<Object, EntryMapping>> cacheMappings = Collections.emptyMap();
 
+    /** Maximum batch size for writeAll and deleteAll operations. */
+    private int batchSize = DFLT_BATCH_SIZE;
+
     /** Database dialect. */
     protected JdbcDialect dialect;
 
-    /** Max workers thread count. These threads are responsible for load cache. */
-    private int maxPoolSz = Runtime.getRuntime().availableProcessors();
+    /** Maximum write attempts in case of database error. */
+    private int maxWrtAttempts = DFLT_WRITE_ATTEMPTS;
 
-    /** Maximum batch size for writeAll and deleteAll operations. */
-    private int batchSz = DFLT_BATCH_SIZE;
+    /** Max workers thread count. These threads are responsible for load cache. */
+    private int maxPoolSize = Runtime.getRuntime().availableProcessors();
 
     /** Parallel load cache minimum threshold. If {@code 0} then load sequentially. */
     private int parallelLoadCacheMinThreshold = DFLT_PARALLEL_LOAD_CACHE_MINIMUM_THRESHOLD;
 
+    /** Types that store could process. */
+    private JdbcType[] types;
+
+    /** Hash calculator.  */
+    protected JdbcTypeHasher hasher = JdbcTypeDefaultHasher.INSTANCE;
+
     /**
      * Get field value from object for use as query parameter.
      *
      * @param cacheName Cache name.
      * @param typeName Type name.
+     * @param typeKind Type kind.
      * @param fieldName Field name.
      * @param obj Cache object.
      * @return Field value from object.
      * @throws CacheException in case of error.
      */
-    @Nullable protected abstract Object extractParameter(@Nullable String cacheName, String typeName, String fieldName,
-        Object obj) throws CacheException;
+    @Nullable protected abstract Object extractParameter(@Nullable String cacheName, String typeName, TypeKind typeKind,
+        String fieldName, Object obj) throws CacheException;
 
     /**
      * Construct object from query result.
@@ -182,33 +207,36 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
      * @param <R> Type of result object.
      * @param cacheName Cache name.
      * @param typeName Type name.
-     * @param fields Fields descriptors.
+     * @param typeKind Type kind.
+     * @param flds Fields descriptors.
+     * @param hashFlds Field names for hash code calculation.
      * @param loadColIdxs Select query columns index.
      * @param rs ResultSet.
      * @return Constructed object.
      * @throws CacheLoaderException If failed to construct cache object.
      */
-    protected abstract <R> R buildObject(@Nullable String cacheName, String typeName,
-        Collection<CacheTypeFieldMetadata> fields, Map<String, Integer> loadColIdxs, ResultSet rs)
+    protected abstract <R> R buildObject(@Nullable String cacheName, String typeName, TypeKind typeKind,
+        JdbcTypeField[] flds, Collection<String> hashFlds, Map<String, Integer> loadColIdxs, ResultSet rs)
         throws CacheLoaderException;
 
     /**
-     * Extract key type id from key object.
+     * Calculate type ID for object.
      *
-     * @param key Key object.
-     * @return Key type id.
-     * @throws CacheException If failed to get type key id from object.
+     * @param obj Object to calculate type ID for.
+     * @return Type ID.
+     * @throws CacheException If failed to calculate type ID for given object.
      */
-    protected abstract Object keyTypeId(Object key) throws CacheException;
+    protected abstract Object typeIdForObject(Object obj) throws CacheException;
 
     /**
-     * Extract key type id from key class name.
+     * Calculate type ID for given type name.
      *
-     * @param type String description of key type.
-     * @return Key type id.
-     * @throws CacheException If failed to get type key id from object.
+     * @param kind If {@code true} then calculate type ID for POJO otherwise for binary object .
+     * @param typeName String description of type name.
+     * @return Type ID.
+     * @throws CacheException If failed to get type ID for given type name.
      */
-    protected abstract Object keyTypeId(String type) throws CacheException;
+    protected abstract Object typeIdForTypeName(TypeKind kind, String typeName) throws CacheException;
 
     /**
      * Prepare internal store specific builders for provided types metadata.
@@ -217,7 +245,7 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
      * @param types Collection of types.
      * @throws CacheException If failed to prepare internal builders for types.
      */
-    protected abstract void prepareBuilders(@Nullable String cacheName, Collection<CacheTypeMetadata> types)
+    protected abstract void prepareBuilders(@Nullable String cacheName, Collection<JdbcType> types)
         throws CacheException;
 
     /**
@@ -480,23 +508,23 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
                         ? em.loadCacheQry
                         : em.loadCacheRangeQuery(lowerBound != null, upperBound != null));
 
-                    int ix = 1;
+                    int idx = 1;
 
                     if (lowerBound != null)
                         for (int i = lowerBound.length; i > 0; i--)
                             for (int j = 0; j < i; j++)
-                                stmt.setObject(ix++, lowerBound[j]);
+                                stmt.setObject(idx++, lowerBound[j]);
 
                     if (upperBound != null)
                         for (int i = upperBound.length; i > 0; i--)
                             for (int j = 0; j < i; j++)
-                                stmt.setObject(ix++, upperBound[j]);
+                                stmt.setObject(idx++, upperBound[j]);
 
                     ResultSet rs = stmt.executeQuery();
 
                     while (rs.next()) {
-                        K key = buildObject(em.cacheName, em.keyType(), em.keyColumns(), em.loadColIdxs, rs);
-                        V val = buildObject(em.cacheName, em.valueType(), em.valueColumns(), em.loadColIdxs, rs);
+                        K key = buildObject(em.cacheName, em.keyType(), em.keyKind(), em.keyColumns(), em.keyCols, em.loadColIdxs, rs);
+                        V val = buildObject(em.cacheName, em.valueType(), em.valueKind(), em.valueColumns(), null, em.loadColIdxs, rs);
 
                         clo.apply(key, val);
                     }
@@ -527,58 +555,86 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
     }
 
     /**
-     * Object is a simple type.
+     * Checks if type configured properly.
      *
-     * @param cls Class.
-     * @return {@code True} if object is a simple type.
-     */
-    protected static boolean simpleType(Class<?> cls) {
-        return (Number.class.isAssignableFrom(cls) || String.class.isAssignableFrom(cls) ||
-            java.util.Date.class.isAssignableFrom(cls) || Boolean.class.isAssignableFrom(cls) ||
-            UUID.class.isAssignableFrom(cls));
-    }
-
-    /**
      * @param cacheName Cache name to check mapping for.
-     * @param clsName Class name.
-     * @param fields Fields descriptors.
-     * @throws CacheException If failed to check type metadata.
+     * @param typeName Type name.
+     * @param flds Fields descriptors.
+     * @throws CacheException If failed to check type configuration.
      */
-    private static void checkMapping(@Nullable String cacheName, String clsName,
-        Collection<CacheTypeFieldMetadata> fields) throws CacheException {
+    private void checkTypeConfiguration(@Nullable String cacheName, TypeKind kind, String typeName,
+        JdbcTypeField[] flds) throws CacheException {
         try {
-            Class<?> cls = Class.forName(clsName);
-
-            if (simpleType(cls)) {
-                if (fields.size() != 1)
-                    throw new CacheException("More than one field for simple type [cache name=" + cacheName
-                        + ", type=" + clsName + " ]");
+            if (kind == TypeKind.BUILT_IN) {
+                if (flds.length != 1)
+                    throw new CacheException("More than one field for built in type [cache=" +  U.maskName(cacheName) +
+                        ", type=" + typeName + " ]");
 
-                CacheTypeFieldMetadata field = F.first(fields);
+                JdbcTypeField field = flds[0];
 
-                if (field.getDatabaseName() == null)
-                    throw new CacheException("Missing database name in mapping description [cache name=" + cacheName
-                        + ", type=" + clsName + " ]");
+                if (field.getDatabaseFieldName() == null)
+                    throw new CacheException("Missing database name in mapping description [cache=" +
+                        U.maskName(cacheName) + ", type=" + typeName + " ]");
 
-                field.setJavaType(cls);
+                field.setJavaFieldType(Class.forName(typeName));
             }
             else
-                for (CacheTypeFieldMetadata field : fields) {
-                    if (field.getDatabaseName() == null)
-                        throw new CacheException("Missing database name in mapping description [cache name=" + cacheName
-                            + ", type=" + clsName + " ]");
-
-                    if (field.getJavaName() == null)
-                        throw new CacheException("Missing field name in mapping description [cache name=" + cacheName
-                            + ", type=" + clsName + " ]");
-
-                    if (field.getJavaType() == null)
-                        throw new CacheException("Missing field type in mapping description [cache name=" + cacheName
-                            + ", type=" + clsName + " ]");
+                for (JdbcTypeField field : flds) {
+                    if (field.getDatabaseFieldName() == null)
+                        throw new CacheException("Missing database name in mapping description [cache=" +
+                            U.maskName(cacheName) + ", type=" + typeName + " ]");
+
+                    if (field.getJavaFieldName() == null)
+                        throw new CacheException("Missing field name in mapping description [cache=" +
+                            U.maskName(cacheName) + ", type=" + typeName + " ]");
+
+                    if (field.getJavaFieldType() == null)
+                        throw new CacheException("Missing field type in mapping description [cache=" +
+                            U.maskName(cacheName) + ", type=" + typeName + " ]");
                 }
         }
         catch (ClassNotFoundException e) {
-            throw new CacheException("Failed to find class: " + clsName, e);
+            throw new CacheException("Failed to find class: " + typeName, e);
+        }
+    }
+
+    /**
+     * For backward compatibility translate old field type descriptors to new format.
+     *
+     * @param oldFlds Fields in old format.
+     * @return Fields in new format.
+     */
+    @Deprecated
+    private JdbcTypeField[] translateFields(Collection<CacheTypeFieldMetadata> oldFlds) {
+        JdbcTypeField[] newFlds = new JdbcTypeField[oldFlds.size()];
+
+        int idx = 0;
+
+        for (CacheTypeFieldMetadata oldField : oldFlds) {
+            newFlds[idx] = new JdbcTypeField(oldField.getDatabaseType(), oldField.getDatabaseName(),
+                oldField.getJavaType(), oldField.getJavaName());
+
+            idx++;
+        }
+
+        return newFlds;
+    }
+
+    /**
+     * @param type Type name to check.
+     * @return {@code True} if class not found.
+     */
+    protected TypeKind kindForName(String type) {
+        if (BUILT_IN_TYPES.contains(type))
+            return TypeKind.BUILT_IN;
+
+        try {
+            Class.forName(type);
+
+            return TypeKind.POJO;
+        }
+        catch(ClassNotFoundException ignored) {
+            return TypeKind.BINARY;
         }
     }
 
@@ -587,46 +643,104 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
      * @return Type mappings for specified cache name.
      * @throws CacheException If failed to initialize cache mappings.
      */
-    private Map<Object, EntryMapping> cacheMappings(@Nullable String cacheName) throws CacheException {
+    private Map<Object, EntryMapping> getOrCreateCacheMappings(@Nullable String cacheName) throws CacheException {
         Map<Object, EntryMapping> entryMappings = cacheMappings.get(cacheName);
 
         if (entryMappings != null)
             return entryMappings;
 
         cacheMappingsLock.lock();
-
         try {
             entryMappings = cacheMappings.get(cacheName);
 
             if (entryMappings != null)
                 return entryMappings;
 
-            CacheConfiguration ccfg = ignite().cache(cacheName).getConfiguration(CacheConfiguration.class);
+            // If no types configured, check CacheTypeMetadata for backward compatibility.
+            if (types == null) {
+                CacheConfiguration ccfg = ignite.cache(cacheName).getConfiguration(CacheConfiguration.class);
+
+                Collection<CacheTypeMetadata> oldTypes = ccfg.getTypeMetadata();
+
+                types = new JdbcType[oldTypes.size()];
+
+                int idx = 0;
+
+                for (CacheTypeMetadata oldType : oldTypes) {
+                    JdbcType newType = new JdbcType();
 
-            Collection<CacheTypeMetadata> types = ccfg.getTypeMetadata();
+                    newType.setCacheName(cacheName);
 
-            entryMappings = U.newHashMap(types.size());
+                    newType.setDatabaseSchema(oldType.getDatabaseSchema());
+                    newType.setDatabaseTable(oldType.getDatabaseTable());
 
-            for (CacheTypeMetadata type : types) {
-                Object keyTypeId = keyTypeId(type.getKeyType());
+                    newType.setKeyType(oldType.getKeyType());
+                    newType.setKeyFields(translateFields(oldType.getKeyFields()));
 
-                if (entryMappings.containsKey(keyTypeId))
-                    throw new CacheException("Key type must be unique in type metadata [cache name=" + cacheName +
-                        ", key type=" + type.getKeyType() + "]");
+                    newType.setValueType(oldType.getValueType());
+                    newType.setValueFields(translateFields(oldType.getValueFields()));
 
-                checkMapping(cacheName, type.getKeyType(), type.getKeyFields());
-                checkMapping(cacheName, type.getValueType(), type.getValueFields());
+                    types[idx] = newType;
 
-                entryMappings.put(keyTypeId(type.getKeyType()), new EntryMapping(cacheName, dialect, type));
+                    idx++;
+                }
             }
 
-            Map<String, Map<Object, EntryMapping>> mappings = new HashMap<>(cacheMappings);
+            List<JdbcType> cacheTypes = new ArrayList<>(types.length);
+
+            for (JdbcType type : types)
+                if ((cacheName != null && cacheName.equals(type.getCacheName())) ||
+                    (cacheName == null && type.getCacheName() == null))
+                    cacheTypes.add(type);
+
+            entryMappings = U.newHashMap(cacheTypes.size());
+
+            if (!cacheTypes.isEmpty()) {
+                boolean binarySupported = ignite.configuration().getMarshaller() instanceof BinaryMarshaller;
+
+                for (JdbcType type : cacheTypes) {
+                    String keyType = type.getKeyType();
+                    String valType = type.getValueType();
+
+                    TypeKind keyKind = kindForName(keyType);
+
+                    if (!binarySupported && keyKind == TypeKind.BINARY)
+                        throw new CacheException("Key type has no class [cache=" + U.maskName(cacheName) +
+                            ", type=" + keyType + "]");
+
+                    checkTypeConfiguration(cacheName, keyKind, keyType, type.getKeyFields());
+
+                    Object keyTypeId = typeIdForTypeName(keyKind, keyType);
 
-            mappings.put(cacheName, entryMappings);
+                    if (entryMappings.containsKey(keyTypeId))
+                        throw new CacheException("Key type must be unique in type metadata [cache=" +
+                            U.maskName(cacheName) + ", type=" + keyType + "]");
 
-            prepareBuilders(cacheName, types);
+                    TypeKind valKind = kindForName(valType);
 
-            cacheMappings = mappings;
+                    checkTypeConfiguration(cacheName, valKind, valType, type.getValueFields());
+
+                    entryMappings.put(keyTypeId, new EntryMapping(cacheName, dialect, type, keyKind, valKind));
+
+                    // Add one more binding to binary typeId for POJOs,
+                    // because object could be passed to store in binary format.
+                    if (binarySupported && keyKind == TypeKind.POJO) {
+                        keyTypeId = typeIdForTypeName(TypeKind.BINARY, keyType);
+
+                        valKind = valKind == TypeKind.POJO ? TypeKind.BINARY : valKind;
+
+                        entryMappings.put(keyTypeId, new EntryMapping(cacheName, dialect, type, TypeKind.BINARY, valKind));
+                    }
+                }
+
+                Map<String, Map<Object, EntryMapping>> mappings = new HashMap<>(cacheMappings);
+
+                mappings.put(cacheName, entryMappings);
+
+                prepareBuilders(cacheName, cacheTypes);
+
+                cacheMappings = mappings;
+            }
 
             return entryMappings;
         }
@@ -637,19 +751,21 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
 
     /**
      * @param cacheName Cache name.
-     * @param keyTypeId Key type id.
-     * @param key Key object.
+     * @param typeId Type id.
      * @return Entry mapping.
      * @throws CacheException If mapping for key was not found.
      */
-    private EntryMapping entryMapping(String cacheName, Object keyTypeId, Object key) throws CacheException {
-        EntryMapping em = cacheMappings(cacheName).get(keyTypeId);
+    private EntryMapping entryMapping(String cacheName, Object typeId) throws CacheException {
+        Map<Object, EntryMapping> mappings = getOrCreateCacheMappings(cacheName);
+
+        EntryMapping em = mappings.get(typeId);
 
         if (em == null) {
             String maskedCacheName = U.maskName(cacheName);
 
-            throw new CacheException("Failed to find mapping description [key=" + key +
-                ", cache=" + maskedCacheName + "]. Please configure CacheTypeMetadata to associate '" + maskedCacheName + "' with JdbcPojoStore.");
+            throw new CacheException("Failed to find mapping description [cache=" + maskedCacheName +
+                ", typeId=" + typeId + "]. Please configure JdbcType to associate cache '" + maskedCacheName +
+                "' with JdbcPojoStore.");
         }
 
         return em;
@@ -663,34 +779,37 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
         String cacheName = session().cacheName();
 
         try {
-            pool = Executors.newFixedThreadPool(maxPoolSz);
+            pool = Executors.newFixedThreadPool(maxPoolSize);
 
             Collection<Future<?>> futs = new ArrayList<>();
 
+            Map<Object, EntryMapping> mappings = getOrCreateCacheMappings(cacheName);
+
             if (args != null && args.length > 0) {
                 if (args.length % 2 != 0)
                     throw new CacheLoaderException("Expected even number of arguments, but found: " + args.length);
 
                 if (log.isDebugEnabled())
-                    log.debug("Start loading entries from db using user queries from arguments");
+                    log.debug("Start loading entries from db using user queries from arguments...");
 
                 for (int i = 0; i < args.length; i += 2) {
                     String keyType = args[i].toString();
 
                     String selQry = args[i + 1].toString();
 
-                    EntryMapping em = entryMapping(cacheName, keyTypeId(keyType), keyType);
+                    EntryMapping em = entryMapping(cacheName, typeIdForTypeName(kindForName(keyType), keyType));
 
                     futs.add(pool.submit(new LoadCacheCustomQueryWorker<>(em, selQry, clo)));
                 }
             }
             else {
-                Collection<EntryMapping> entryMappings = cacheMappings(session().cacheName()).values();
+                Collection<EntryMapping> entryMappings = mappings.values();
 
                 for (EntryMapping em : entryMappings) {
                     if (parallelLoadCacheMinThreshold > 0) {
-                        log.debug("Multithread loading entries from db [cache name=" + cacheName +
-                            ", key type=" + em.keyType() + " ]");
+                        if (log.isDebugEnabled())
+                            log.debug("Multithread loading entries from db [cache=" +  U.maskName(cacheName) +
+                                ", keyType=" + em.keyType() + " ]");
 
                         Connection conn = null;
 
@@ -738,8 +857,8 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
                     }
                     else {
                         if (log.isDebugEnabled())
-                            log.debug("Single thread loading entries from db [cache name=" + cacheName +
-                                ", key type=" + em.keyType() + " ]");
+                            log.debug("Single thread loading entries from db [cache=" +  U.maskName(cacheName) +
+                                ", keyType=" + em.keyType() + " ]");
 
                         futs.add(pool.submit(loadCacheFull(em, clo)));
                     }
@@ -750,10 +869,10 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
                 U.get(fut);
 
             if (log.isDebugEnabled())
-                log.debug("Cache loaded from db: " + cacheName);
+                log.debug("Cache loaded from db: " +  U.maskName(cacheName));
         }
         catch (IgniteCheckedException e) {
-            throw new CacheLoaderException("Failed to load cache: " + cacheName, e.getCause());
+            throw new CacheLoaderException("Failed to load cache: " + U.maskName(cacheName), e.getCause());
         }
         finally {
             U.shutdownNow(getClass(), pool, log);
@@ -764,7 +883,7 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
     @Nullable @Override public V load(K key) throws CacheLoaderException {
         assert key != null;
 
-        EntryMapping em = entryMapping(session().cacheName(), keyTypeId(key), key);
+        EntryMapping em = entryMapping(session().cacheName(), typeIdForObject(key));
 
         if (log.isDebugEnabled())
             log.debug("Load value from db [table= " + em.fullTableName() + ", key=" + key + "]");
@@ -783,7 +902,7 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
             ResultSet rs = stmt.executeQuery();
 
             if (rs.next())
-                return buildObject(em.cacheName, em.valueType(), em.valueColumns(), em.loadColIdxs, rs);
+                return buildObject(em.cacheName, em.valueType(), em.valueKind(), em.valueColumns(), null, em.loadColIdxs, rs);
         }
         catch (SQLException e) {
             throw new CacheLoaderException("Failed to load object [table=" + em.fullTableName() +
@@ -807,14 +926,14 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
 
             String cacheName = session().cacheName();
 
-            Map<Object, LoadWorker<K, V>> workers = U.newHashMap(cacheMappings(cacheName).size());
+            Map<Object, LoadWorker<K, V>> workers = U.newHashMap(getOrCreateCacheMappings(cacheName).size());
 
             Map<K, V> res = new HashMap<>();
 
             for (K key : keys) {
-                Object keyTypeId = keyTypeId(key);
+                Object keyTypeId = typeIdForObject(key);
 
-                EntryMapping em = entryMapping(cacheName, keyTypeId, key);
+                EntryMapping em = entryMapping(cacheName, keyTypeId);
 
                 LoadWorker<K, V> worker = workers.get(keyTypeId);
 
@@ -852,7 +971,7 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
         try {
             CacheWriterException we = null;
 
-            for (int attempt = 0; attempt < MAX_ATTEMPT_WRITE_COUNT; attempt++) {
+            for (int attempt = 0; attempt < maxWrtAttempts; attempt++) {
                 int paramIdx = fillValueParameters(updStmt, 1, em, entry.getValue());
 
                 fillKeyParameters(updStmt, paramIdx, em, entry.getKey());
@@ -921,7 +1040,7 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
 
         K key = entry.getKey();
 
-        EntryMapping em = entryMapping(session().cacheName(), keyTypeId(key), key);
+        EntryMapping em = entryMapping(session().cacheName(), typeIdForObject(key));
 
         if (log.isDebugEnabled())
             log.debug("Start write entry to database [table=" + em.fullTableName() + ", entry=" + entry + "]");
@@ -937,9 +1056,9 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
                 try {
                     stmt = conn.prepareStatement(em.mergeQry);
 
-                    int i = fillKeyParameters(stmt, em, key);
+                    int idx = fillKeyParameters(stmt, em, key);
 
-                    fillValueParameters(stmt, i, em, entry.getValue());
+                    fillValueParameters(stmt, idx, em, entry.getValue());
 
                     int updCnt = stmt.executeUpdate();
 
@@ -1010,15 +1129,15 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
                     for (Cache.Entry<? extends K, ? extends V> entry : entries) {
                         K key = entry.getKey();
 
-                        Object keyTypeId = keyTypeId(key);
+                        Object keyTypeId = typeIdForObject(key);
 
-                        em = entryMapping(cacheName, keyTypeId, key);
+                        em = entryMapping(cacheName, keyTypeId);
 
                         if (currKeyTypeId == null || !currKeyTypeId.equals(keyTypeId)) {
                             if (mergeStmt != null) {
                                 if (log.isDebugEnabled())
-                                    log.debug("Write entries to db [cache name=" + cacheName +
-                                        ", key type=" + em.keyType() + ", count=" + prepared + "]");
+                                    log.debug("Write entries to db [cache=" +  U.maskName(cacheName) +
+                                        ", keyType=" + em.keyType() + ", cnt=" + prepared + "]");
 
                                 executeBatch(em, mergeStmt, "writeAll", fromIdx, prepared, lazyEntries);
 
@@ -1034,16 +1153,16 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
                             prepared = 0;
                         }
 
-                        int i = fillKeyParameters(mergeStmt, em, key);
+                        int idx = fillKeyParameters(mergeStmt, em, key);
 
-                        fillValueParameters(mergeStmt, i, em, entry.getValue());
+                        fillValueParameters(mergeStmt, idx, em, entry.getValue());
 
                         mergeStmt.addBatch();
 
-                        if (++prepared % batchSz == 0) {
+                        if (++prepared % batchSize == 0) {
                             if (log.isDebugEnabled())
-                                log.debug("Write entries to db [cache name=" + cacheName +
-                                    ", key type=" + em.keyType() + ", count=" + prepared + "]");
+                                log.debug("Write entries to db [cache=" +  U.maskName(cacheName) +
+                                    ", keyType=" + em.keyType() + ", cnt=" + prepared + "]");
 
                             executeBatch(em, mergeStmt, "writeAll", fromIdx, prepared, lazyEntries);
 
@@ -1053,10 +1172,10 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
                         }
                     }
 
-                    if (mergeStmt != null && prepared % batchSz != 0) {
+                    if (mergeStmt != null && prepared % batchSize != 0) {
                         if (log.isDebugEnabled())
-                            log.debug("Write entries to db [cache name=" + cacheName +
-                                ", key type=" + em.keyType() + ", count=" + prepared + "]");
+                            log.debug("Write entries to db [cache=" +  U.maskName(cacheName) +
+                                ", keyType=" + em.keyType() + ", cnt=" + prepared + "]");
 
                         executeBatch(em, mergeStmt, "writeAll", fromIdx, prepared, lazyEntries);
 
@@ -1067,8 +1186,9 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
                 }
             }
             else {
-                log.debug("Write entries to db one by one using update and insert statements [cache name=" +
-                    cacheName + ", count=" + entries.size() + "]");
+                if (log.isDebugEnabled())
+                    log.debug("Write entries to db one by one using update and insert statements [cache=" +
+                        U.maskName(cacheName) + ", cnt=" + entries.size() + "]");
 
                 PreparedStatement insStmt = null;
 
@@ -1078,9 +1198,9 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
                     for (Cache.Entry<? extends K, ? extends V> entry : entries) {
                         K key = entry.getKey();
 
-                        Object keyTypeId = keyTypeId(key);
+                        Object keyTypeId = typeIdForObject(key);
 
-                        EntryMapping em = entryMapping(cacheName, keyTypeId, key);
+                        EntryMapping em = entryMapping(cacheName, keyTypeId);
 
                         if (currKeyTypeId == null || !currKeyTypeId.equals(keyTypeId)) {
                             U.closeQuiet(insStmt);
@@ -1116,7 +1236,7 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
     @Override public void delete(Object key) throws CacheWriterException {
         assert key != null;
 
-        EntryMapping em = entryMapping(session().cacheName(), keyTypeId(key), key);
+        EntryMapping em = entryMapping(session().cacheName(), typeIdForObject(key));
 
         if (log.isDebugEnabled())
             log.debug("Remove value from db [table=" + em.fullTableName() + ", key=" + key + "]");
@@ -1220,9 +1340,9 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
             int fromIdx = 0, prepared = 0;
 
             for (Object key : keys) {
-                Object keyTypeId = keyTypeId(key);
+                Object keyTypeId = typeIdForObject(key);
 
-                em = entryMapping(cacheName, keyTypeId, key);
+                em = entryMapping(cacheName, keyTypeId);
 
                 if (delStmt == null) {
                     delStmt = conn.prepareStatement(em.remQry);
@@ -1232,8 +1352,8 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
 
                 if (!currKeyTypeId.equals(keyTypeId)) {
                     if (log.isDebugEnabled())
-                        log.debug("Delete entries from db [cache name=" + cacheName +
-                            ", key type=" + em.keyType() + ", count=" + prepared + "]");
+                        log.debug("Delete entries from db [cache=" +  U.maskName(cacheName) +
+                            ", keyType=" + em.keyType() + ", cnt=" + prepared + "]");
 
                     executeBatch(em, delStmt, "deleteAll", fromIdx, prepared, lazyKeys);
 
@@ -1248,10 +1368,10 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
 
                 delStmt.addBatch();
 
-                if (++prepared % batchSz == 0) {
+                if (++prepared % batchSize == 0) {
                     if (log.isDebugEnabled())
-                        log.debug("Delete entries from db [cache name=" + cacheName +
-                            ", key type=" + em.keyType() + ", count=" + prepared + "]");
+                        log.debug("Delete entries from db [cache=" +  U.maskName(cacheName) +
+                            ", keyType=" + em.keyType() + ", cnt=" + prepared + "]");
 
                     executeBatch(em, delStmt, "deleteAll", fromIdx, prepared, lazyKeys);
 
@@ -1261,10 +1381,10 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
                 }
             }
 
-            if (delStmt != null && prepared % batchSz != 0) {
+            if (delStmt != null && prepared % batchSize != 0) {
                 if (log.isDebugEnabled())
-                    log.debug("Delete entries from db [cache name=" + cacheName +
-                        ", key type=" + em.keyType() + ", count=" + prepared + "]");
+                    log.debug("Delete entries from db [cache=" +  U.maskName(cacheName) +
+                        ", keyType=" + em.keyType() + ", cnt=" + prepared + "]");
 
                 executeBatch(em, delStmt, "deleteAll", fromIdx, prepared, lazyKeys);
             }
@@ -1281,17 +1401,17 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
      * Sets the value of the designated parameter using the given object.
      *
      * @param stmt Prepare statement.
-     * @param i Index for parameters.
+     * @param idx Index for parameters.
      * @param field Field descriptor.
      * @param fieldVal Field value.
      * @throws CacheException If failed to set statement parameter.
      */
-    protected void fillParameter(PreparedStatement stmt, int i, CacheTypeFieldMetadata field, @Nullable Object fieldVal)
+    protected void fillParameter(PreparedStatement stmt, int idx, JdbcTypeField field, @Nullable Object fieldVal)
         throws CacheException {
         try {
             if (fieldVal != null) {
-                if (field.getJavaType() == UUID.class) {
-                    switch (field.getDatabaseType()) {
+                if (field.getJavaFieldType() == UUID.class) {
+                    switch (field.getDatabaseFieldType()) {
                         case Types.BINARY:
                             fieldVal = U.uuidToBytes((UUID)fieldVal);
 
@@ -1304,13 +1424,13 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
                     }
                 }
 
-                stmt.setObject(i, fieldVal);
+                stmt.setObject(idx, fieldVal);
             }
             else
-                stmt.setNull(i, field.getDatabaseType());
+                stmt.setNull(idx, field.getDatabaseFieldType());
         }
         catch (SQLException e) {
-            throw new CacheException("Failed to set statement parameter name: " + field.getDatabaseName(), e);
+            throw new CacheException("Failed to set statement parameter name: " + field.getDatabaseFieldName(), e);
         }
     }
 
@@ -1324,8 +1444,8 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
      */
     protected int fillKeyParameters(PreparedStatement stmt, int idx, EntryMapping em,
         Object key) throws CacheException {
-        for (CacheTypeFieldMetadata field : em.keyColumns()) {
-            Object fieldVal = extractParameter(em.cacheName, em.keyType(), field.getJavaName(), key);
+        for (JdbcTypeField field : em.keyColumns()) {
+            Object fieldVal = extractParameter(em.cacheName, em.keyType(), em.keyKind(), field.getJavaFieldName(), key);
 
             fillParameter(stmt, idx++, field, fieldVal);
         }
@@ -1354,8 +1474,8 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
      */
     protected int fillValueParameters(PreparedStatement stmt, int idx, EntryMapping em, Object val)
         throws CacheWriterException {
-        for (CacheTypeFieldMetadata field : em.uniqValFields) {
-            Object fieldVal = extractParameter(em.cacheName, em.valueType(), field.getJavaName(), val);
+        for (JdbcTypeField field : em.uniqValFlds) {
+            Object fieldVal = extractParameter(em.cacheName, em.valueType(), em.valueKind(), field.getJavaFieldName(), val);
 
             fillParameter(stmt, idx++, field, fieldVal);
         }
@@ -1401,16 +1521,70 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
      * @return Max workers thread count.
      */
     public int getMaximumPoolSize() {
-        return maxPoolSz;
+        return maxPoolSize;
     }
 
     /**
      * Set Max workers thread count. These threads are responsible for execute query.
      *
-     * @param maxPoolSz Max workers thread count.
+     * @param maxPoolSize Max workers thread count.
      */
-    public void setMaximumPoolSize(int maxPoolSz) {
-        this.maxPoolSz = maxPoolSz;
+    public void setMaximumPoolSize(int maxPoolSize) {
+        this.maxPoolSize = maxPoolSize;
+    }
+
+    /**
+     * Gets maximum number of write attempts in case of database error.
+     *
+     * @return Maximum number of write attempts.
+     */
+    public int getMaximumWriteAttempts() {
+        return maxWrtAttempts;
+    }
+
+    /**
+     * Sets maximum number of write attempts in case of database error.
+     *
+     * @param maxWrtAttempts Number of write attempts.
+     */
+    public void setMaximumWriteAttempts(int maxWrtAttempts) {
+        this.maxWrtAttempts = maxWrtAttempts;
+    }
+
+    /**
+     * Gets types known by store.
+     *
+     * @return Types known by store.
+     */
+    public JdbcType[] getTypes() {
+        return types;
+    }
+
+    /**
+     * Sets store configurations.
+     *
+     * @param types Store should process.
+     */
+    public void setTypes(JdbcType... types) {
+        this.types = types;
+    }
+
+    /**
+     * Gets hash code calculator.
+     *
+     * @return Hash code calculator.
+     */
+    public JdbcTypeHasher getHasher() {
+        return hasher;
+    }
+
+    /**
+     * Sets hash code calculator.
+     *
+     * @param hasher Hash code calculator.
+     */
+    public void setHasher(JdbcTypeHasher hasher) {
+        this.hasher = hasher;
     }
 
     /**
@@ -1419,16 +1593,16 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
      * @return Maximum batch size.
      */
     public int getBatchSize() {
-        return batchSz;
+        return batchSize;
     }
 
     /**
      * Set maximum batch size for write and delete operations.
      *
-     * @param batchSz Maximum batch size.
+     * @param batchSize Maximum batch size.
      */
-    public void setBatchSize(int batchSz) {
-        this.batchSz = batchSz;
+    public void setBatchSize(int batchSize) {
+        this.batchSize = batchSize;
     }
 
     /**
@@ -1464,6 +1638,18 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
     }
 
     /**
+     * Type kind.
+     */
+    protected enum TypeKind {
+        /** Type is known as Java built in type, like {@link String} */
+        BUILT_IN,
+        /** Class for this type is available. */
+        POJO,
+        /** Class for this type is not available. */
+        BINARY
+    }
+
+    /**
      * Entry mapping description.
      */
     protected static class EntryMapping {
@@ -1510,10 +1696,16 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
         private final Map<String, Integer> loadColIdxs;
 
         /** Unique value fields. */
-        private final Collection<CacheTypeFieldMetadata> uniqValFields;
+        private final Collection<JdbcTypeField> uniqValFlds;
 
         /** Type metadata. */
-        private final CacheTypeMetadata typeMeta;
+        private final JdbcType typeMeta;
+
+        /** Key type kind. */
+        private final TypeKind keyKind;
+
+        /** Value type kind. */
+        private final TypeKind valKind;
 
         /** Full table name. */
         private final String fullTblName;
@@ -1523,22 +1715,27 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
          * @param dialect JDBC dialect.
          * @param typeMeta Type metadata.
          */
-        public EntryMapping(@Nullable String cacheName, JdbcDialect dialect, CacheTypeMetadata typeMeta) {
+        public EntryMapping(@Nullable String cacheName, JdbcDialect dialect, JdbcType typeMeta,
+            TypeKind keyKind, TypeKind valKind) {
             this.cacheName = cacheName;
 
             this.dialect = dialect;
 
             this.typeMeta = typeMeta;
 
-            Collection<CacheTypeFieldMetadata> keyFields = typeMeta.getKeyFields();
+            this.keyKind = keyKind;
+
+            this.valKind = valKind;
+
+            JdbcTypeField[] keyFields = typeMeta.getKeyFields();
 
-            Collection<CacheTypeFieldMetadata> valFields = typeMeta.getValueFields();
+            JdbcTypeField[] valFields = typeMeta.getValueFields();
 
-            keyCols = databaseColumns(keyFields);
+            keyCols = databaseColumns(F.asList(keyFields));
 
-            uniqValFields = F.view(valFields, new IgnitePredicate<CacheTypeFieldMetadata>() {
-                @Override public boolean apply(CacheTypeFieldMetadata col) {
-                    return !keyCols.contains(col.getDatabaseName());
+            uniqValFlds = F.view(F.asList(valFields), new IgnitePredicate<JdbcTypeField>() {
+                @Override public boolean apply(JdbcTypeField col) {
+                    return !keyCols.contains(col.getDatabaseFieldName());
                 }
             });
 
@@ -1548,7 +1745,7 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
 
             fullTblName = F.isEmpty(schema) ? tblName : schema + "." + tblName;
 
-            Collection<String> uniqValCols = databaseColumns(uniqValFields);
+            Collection<String> uniqValCols = databaseColumns(uniqValFlds);
 
             cols = F.concat(false, keyCols, uniqValCols);
 
@@ -1579,21 +1776,49 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
         }
 
         /**
-         * Extract database column names from {@link CacheTypeFieldMetadata}.
+         * Extract database column names from {@link JdbcTypeField}.
          *
-         * @param dsc collection of {@link CacheTypeFieldMetadata}.
+         * @param dsc collection of {@link JdbcTypeField}.
          * @return Collection with database column names.
          */
-        private static Collection<String> databaseColumns(Collection<CacheTypeFieldMetadata> dsc) {
-            return F.transform(dsc, new C1<CacheTypeFieldMetadata, String>() {
+        private static Collection<String> databaseColumns(Collection<JdbcTypeField> dsc) {
+            return F.transform(dsc, new C1<JdbcTypeField, String>() {
                 /** {@inheritDoc} */
-                @Override public String apply(CacheTypeFieldMetadata col) {
-                    return col.getDatabaseName();
+                @Override public String apply(JdbcTypeField col) {
+                    return col.getDatabaseFieldName();
                 }
             });
         }
 
         /**
+         * @return Key type.
+         */
+        protected String keyType() {
+            return typeMeta.getKeyType();
+        }
+
+        /**
+         * @return Key type kind.
+         */
+        protected TypeKind keyKind() {
+            return keyKind;
+        }
+
+        /**
+         * @return Value type.
+         */
+        protected String valueType() {
+            return typeMeta.getValueType();
+        }
+
+        /**
+         * @return Value type kind.
+         */
+        protected TypeKind valueKind() {
+            return valKind;
+        }
+
+        /**
          * Construct query for select values with key count less or equal {@code maxKeysPerStmt}
          *
          * @param keyCnt Key count.
@@ -1623,25 +1848,11 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
         }
 
         /**
-         * @return Key type.
-         */
-        protected String keyType() {
-            return typeMeta.getKeyType();
-        }
-
-        /**
-         * @return Value type.
-         */
-        protected String valueType() {
-            return typeMeta.getValueType();
-        }
-
-        /**
          * Gets key columns.
          *
          * @return Key columns.
          */
-        protected Collection<CacheTypeFieldMetadata> keyColumns() {
+        protected JdbcTypeField[] keyColumns() {
             return typeMeta.getKeyFields();
         }
 
@@ -1650,7 +1861,7 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
          *
          * @return Value columns.
          */
-        protected Collection<CacheTypeFieldMetadata> valueColumns() {
+        protected JdbcTypeField[] valueColumns() {
             return typeMeta.getValueFields();
         }
 
@@ -1694,8 +1905,8 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
         /** {@inheritDoc} */
         @Override public Void call() throws Exception {
             if (log.isDebugEnabled())
-                log.debug("Load cache using custom query [cache name= " + em.cacheName +
-                    ", key type=" + em.keyType() + ", query=" + qry + "]");
+                log.debug("Load cache using custom query [cache= " + U.maskName(em.cacheName) +
+                    ", keyType=" + em.keyType() + ", query=" + qry + "]");
 
             Connection conn = null;
 
@@ -1716,8 +1927,8 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
                     colIdxs.put(meta.getColumnLabel(i), i);
 
                 while (rs.next()) {
-                    K1 key = buildObject(em.cacheName, em.keyType(), em.keyColumns(), colIdxs, rs);
-                    V1 val = buildObject(em.cacheName, em.valueType(), em.valueColumns(), colIdxs, rs);
+                    K1 key = buildObject(em.cacheName, em.keyType(), em.keyKind(), em.keyColumns(), em.keyCols, colIdxs, rs);
+                    V1 val = buildObject(em.cacheName, em.valueType(), em.valueKind(), em.valueColumns(), null, colIdxs, rs);
 
                     clo.apply(key, val);
                 }
@@ -1790,8 +2001,7 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
         /** {@inheritDoc} */
         @Override public Map<K1, V1> call() throws Exception {
             if (log.isDebugEnabled())
-                log.debug("Load values from db [table= " + em.fullTableName() +
-                    ", key count=" + keys.size() + "]");
+                log.debug("Load values from db [table= " + em.fullTableName() + ", keysCnt=" + keys.size() + "]");
 
             PreparedStatement stmt = null;
 
@@ -1801,8 +2011,8 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
                 int idx = 1;
 
                 for (Object key : keys)
-                    for (CacheTypeFieldMetadata field : em.keyColumns()) {
-                        Object fieldVal = extractParameter(em.cacheName, em.keyType(), field.getJavaName(), key);
+                    for (JdbcTypeField field : em.keyColumns()) {
+                        Object fieldVal = extractParameter(em.cacheName, em.keyType(), em.keyKind(), field.getJavaFieldName(), key);
 
                         fillParameter(stmt, idx++, field, fieldVal);
                     }
@@ -1812,8 +2022,8 @@ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>,
                 Map<K1, V1> entries = U.newHashMap(keys.size());
 
                 while (rs.next()) {
-                    K1 key = buildObject(em.cacheName, em.keyType(), em.keyColumns(), em.loadColIdxs, rs);
-                    V1 val = buildObject(em.cacheName, em.valueType(), em.valueColumns(), em.loadColIdxs, rs);
+                    K1 key = buildObject(em.cacheName, em.keyType(), em.keyKind(), em.keyColumns(), em.keyCols, em.loadColIdxs, rs);
+                    V1 val = buildObject(em.cacheName, em.valueType(), em.valueKind(), em.valueColumns(), null, em.loadColIdxs, rs);
 
                     entries.put(key, val);
                 }

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcBlobStoreFactory.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcBlobStoreFactory.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcBlobStoreFactory.java
index 74ab30b..6a46619 100644
--- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcBlobStoreFactory.java
+++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcBlobStoreFactory.java
@@ -35,7 +35,7 @@ import org.apache.ignite.resources.SpringApplicationContextResource;
  *
  * <h2 class="header">Spring Example</h2>
  * <pre name="code" class="xml">
- *     &lt;bean id= "simpleDataSource" class="org.h2.jdbcx.JdbcDataSource"/&gt;
+ *     &lt;bean id= "myDataSource" class="org.h2.jdbcx.JdbcDataSource"/&gt;
  *
  *     &lt;bean id="ignite.cfg" class="org.apache.ignite.configuration.IgniteConfiguration"&gt;
  *          ...
@@ -46,7 +46,7 @@ import org.apache.ignite.resources.SpringApplicationContextResource;
  *                      &lt;property name="cacheStoreFactory"&gt;
  *                          &lt;bean class="org.apache.ignite.cache.store.jdbc.CacheJdbcBlobStoreFactory"&gt;
  *                              &lt;property name="user" value = "Ignite" /&gt;
- *                              &lt;property name="dataSourceBean" value = "simpleDataSource" /&gt;
+ *                              &lt;property name="dataSourceBean" value = "myDataSource" /&gt;
  *                          &lt;/bean&gt;
  *                      &lt;/property&gt;
  *                  &lt;/bean&gt;
@@ -99,7 +99,7 @@ public class CacheJdbcBlobStoreFactory<K, V> implements Factory<CacheJdbcBlobSto
 
     /** Application context. */
     @SpringApplicationContextResource
-    private Object appContext;
+    private Object appCtx;
 
     /** {@inheritDoc} */
     @Override public CacheJdbcBlobStore<K, V> create() {
@@ -118,7 +118,7 @@ public class CacheJdbcBlobStoreFactory<K, V> implements Factory<CacheJdbcBlobSto
         if (dataSrc != null)
             store.setDataSource(dataSrc);
         else if (dataSrcBean != null) {
-            if (appContext == null)
+            if (appCtx == null)
                 throw new IgniteException("Spring application context resource is not injected.");
 
             IgniteSpringHelper spring;
@@ -126,13 +126,13 @@ public class CacheJdbcBlobStoreFactory<K, V> implements Factory<CacheJdbcBlobSto
             try {
                 spring = IgniteComponentType.SPRING.create(false);
 
-                DataSource data = spring.loadBeanFromAppContext(appContext, dataSrcBean);
+                DataSource data = spring.loadBeanFromAppContext(appCtx, dataSrcBean);
 
                 store.setDataSource(data);
             }
             catch (IgniteCheckedException e) {
                 throw new IgniteException("Failed to load bean in application context [beanName=" + dataSrcBean +
-                    ", igniteConfig=" + appContext + ']');
+                    ", igniteConfig=" + appCtx + ']');
             }
         }
 
@@ -287,4 +287,4 @@ public class CacheJdbcBlobStoreFactory<K, V> implements Factory<CacheJdbcBlobSto
     @Override public String toString() {
         return S.toString(CacheJdbcBlobStoreFactory.class, this);
     }
-}
\ No newline at end of file
+}


[17/25] ignite git commit: Fixing tests.

Posted by ag...@apache.org.
Fixing tests.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/9922d83b
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/9922d83b
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/9922d83b

Branch: refs/heads/ignite-1282
Commit: 9922d83b8de24f70a89913bdf0c9739cc6dd9d35
Parents: a129439
Author: Alexey Goncharuk <al...@gmail.com>
Authored: Tue Nov 24 11:06:26 2015 +0300
Committer: Alexey Goncharuk <al...@gmail.com>
Committed: Tue Nov 24 11:06:26 2015 +0300

----------------------------------------------------------------------
 .../store/jdbc/CacheAbstractJdbcStore.java      |  2 +-
 .../configuration/CacheConfiguration.java       |  5 +-
 .../processors/cache/GridCacheMessage.java      |  2 +-
 .../processors/cache/GridCacheProcessor.java    |  9 ++-
 ...heJdbcPojoStoreBinaryMarshallerSelfTest.java | 85 ++++++++++++++++++++
 ...JdbcPojoStorePortableMarshallerSelfTest.java | 85 --------------------
 .../ignite/testsuites/IgniteCacheTestSuite.java |  4 +-
 7 files changed, 99 insertions(+), 93 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/9922d83b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java
index 6dc413b..7617e48 100644
--- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java
+++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheAbstractJdbcStore.java
@@ -68,7 +68,7 @@ import org.apache.ignite.internal.util.typedef.internal.U;
 import org.apache.ignite.lang.IgniteBiInClosure;
 import org.apache.ignite.lang.IgnitePredicate;
 import org.apache.ignite.lifecycle.LifecycleAware;
-import org.apache.ignite.marshaller.portable.BinaryMarshaller;
+import org.apache.ignite.internal.portable.BinaryMarshaller;
 import org.apache.ignite.resources.CacheStoreSessionResource;
 import org.apache.ignite.resources.IgniteInstanceResource;
 import org.apache.ignite.resources.LoggerResource;

http://git-wip-us.apache.org/repos/asf/ignite/blob/9922d83b/modules/core/src/main/java/org/apache/ignite/configuration/CacheConfiguration.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/configuration/CacheConfiguration.java b/modules/core/src/main/java/org/apache/ignite/configuration/CacheConfiguration.java
index 8c3ea19..26bfdbe 100644
--- a/modules/core/src/main/java/org/apache/ignite/configuration/CacheConfiguration.java
+++ b/modules/core/src/main/java/org/apache/ignite/configuration/CacheConfiguration.java
@@ -209,7 +209,7 @@ public class CacheConfiguration<K, V> extends MutableConfiguration<K, V> {
 
     /** Default value for keep binary in store behavior .*/
     @SuppressWarnings({"UnnecessaryBoxing", "BooleanConstructorCall"})
-    public static final Boolean DFLT_KEEP_BINARY_IN_STORE = new Boolean(true);
+    public static final Boolean DFLT_KEEP_BINARY_IN_STORE = new Boolean(false);
 
     /** Default threshold for concurrent loading of keys from {@link CacheStore}. */
     public static final int DFLT_CONCURRENT_LOAD_ALL_THRESHOLD = 5;
@@ -888,8 +888,7 @@ public class CacheConfiguration<K, V> extends MutableConfiguration<K, V> {
     /**
      * Flag indicating that {@link CacheStore} implementation
      * is working with binary objects instead of Java objects.
-     * Default value of this flag is {@link #DFLT_KEEP_BINARY_IN_STORE},
-     * because this is recommended behavior from performance standpoint.
+     * Default value of this flag is {@link #DFLT_KEEP_BINARY_IN_STORE}.
      * <p>
      * If set to {@code false}, Ignite will deserialize keys and
      * values stored in binary format before they are passed

http://git-wip-us.apache.org/repos/asf/ignite/blob/9922d83b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheMessage.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheMessage.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheMessage.java
index 61136bf..177454c 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheMessage.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheMessage.java
@@ -681,6 +681,6 @@ public abstract class GridCacheMessage implements Message {
 
     /** {@inheritDoc} */
     @Override public String toString() {
-        return S.toString(GridCacheMessage.class, this);
+        return S.toString(GridCacheMessage.class, this, "cacheId", cacheId);
     }
 }

http://git-wip-us.apache.org/repos/asf/ignite/blob/9922d83b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java
index 178c5f0..6822ded 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java
@@ -1021,7 +1021,7 @@ public class GridCacheProcessor extends GridProcessorAdapter {
         if (cfg.isKeepBinaryInStore() && cfg.isKeepBinaryInStore() != CacheConfiguration.DFLT_KEEP_BINARY_IN_STORE
             && !(ctx.config().getMarshaller() instanceof BinaryMarshaller))
             U.warn(log, "CacheConfiguration.isKeepBinaryInStore() configuration property will be ignored because " +
-                "PortableMarshaller is not used");
+                "BinaryMarshaller is not used");
 
         // Start managers.
         for (GridCacheManager mgr : F.view(cacheCtx.managers(), F.notContains(dhtExcludes(cacheCtx))))
@@ -1049,6 +1049,9 @@ public class GridCacheProcessor extends GridProcessorAdapter {
 
         cacheCtx.onStarted();
 
+        U.debug(log, "Started cache [name=" + U.maskName(cfg.getName()) + ", deploymentId=" +
+            cacheCtx.dynamicDeploymentId() + ']');
+
         if (log.isInfoEnabled())
             log.info("Started cache [name=" + U.maskName(cfg.getName()) + ", mode=" + cfg.getCacheMode() + ']');
     }
@@ -1601,6 +1604,10 @@ public class GridCacheProcessor extends GridProcessorAdapter {
         if (sharedCtx.cacheContext(CU.cacheId(cfg.getName())) != null)
             return;
 
+        U.debug(log, "prepare cache start [locNodeId=" + ctx.localNodeId() +
+            ", initiatingNodeId=" + initiatingNodeId + ", deploymentId=" + deploymentId + ", topVer=" + topVer +
+            ", name=" + cfg.getName() + ']');
+
         if (affNodeStart || clientNodeStart) {
             if (clientNodeStart && !affNodeStart) {
                 if (nearCfg != null)

http://git-wip-us.apache.org/repos/asf/ignite/blob/9922d83b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerSelfTest.java
new file mode 100644
index 0000000..659efb0
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreBinaryMarshallerSelfTest.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store.jdbc;
+
+import org.apache.ignite.marshaller.Marshaller;
+import org.apache.ignite.internal.portable.BinaryMarshaller;
+
+/**
+ * Class for {@code PojoCacheStore} tests.
+ */
+public class CacheJdbcPojoStoreBinaryMarshallerSelfTest extends CacheJdbcPojoStoreAbstractSelfTest {
+    /** {@inheritDoc} */
+    @Override protected Marshaller marshaller(){
+        return new BinaryMarshaller();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCacheNoKeyClasses() throws Exception {
+        startTestGrid(false, true, false, false);
+
+        checkCacheContent();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCacheNoKeyClassesTx() throws Exception {
+        startTestGrid(false, true, false, true);
+
+        checkCacheContent();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCacheNoValueClasses() throws Exception {
+        startTestGrid(false, false, true, false);
+
+        checkCacheContent();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCacheNoValueClassesTx() throws Exception {
+        startTestGrid(false, false, true, true);
+
+        checkCacheContent();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCacheNoKeyAndValueClasses() throws Exception {
+        startTestGrid(false, true, true, false);
+
+        checkCacheContent();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCacheNoKeyAndValueClassesTx() throws Exception {
+        startTestGrid(false, true, true, true);
+
+        checkCacheContent();
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/9922d83b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStorePortableMarshallerSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStorePortableMarshallerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStorePortableMarshallerSelfTest.java
deleted file mode 100644
index 39504b1..0000000
--- a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStorePortableMarshallerSelfTest.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.cache.store.jdbc;
-
-import org.apache.ignite.marshaller.Marshaller;
-import org.apache.ignite.marshaller.portable.BinaryMarshaller;
-
-/**
- * Class for {@code PojoCacheStore} tests.
- */
-public class CacheJdbcPojoStorePortableMarshallerSelfTest extends CacheJdbcPojoStoreAbstractSelfTest {
-    /** {@inheritDoc} */
-    @Override protected Marshaller marshaller(){
-        return new BinaryMarshaller();
-    }
-
-    /**
-     * @throws Exception If failed.
-     */
-    public void testLoadCacheNoKeyClasses() throws Exception {
-        startTestGrid(false, true, false, false);
-
-        checkCacheContent();
-    }
-
-    /**
-     * @throws Exception If failed.
-     */
-    public void testLoadCacheNoKeyClassesTx() throws Exception {
-        startTestGrid(false, true, false, true);
-
-        checkCacheContent();
-    }
-
-    /**
-     * @throws Exception If failed.
-     */
-    public void testLoadCacheNoValueClasses() throws Exception {
-        startTestGrid(false, false, true, false);
-
-        checkCacheContent();
-    }
-
-    /**
-     * @throws Exception If failed.
-     */
-    public void testLoadCacheNoValueClassesTx() throws Exception {
-        startTestGrid(false, false, true, true);
-
-        checkCacheContent();
-    }
-
-    /**
-     * @throws Exception If failed.
-     */
-    public void testLoadCacheNoKeyAndValueClasses() throws Exception {
-        startTestGrid(false, true, true, false);
-
-        checkCacheContent();
-    }
-
-    /**
-     * @throws Exception If failed.
-     */
-    public void testLoadCacheNoKeyAndValueClassesTx() throws Exception {
-        startTestGrid(false, true, true, true);
-
-        checkCacheContent();
-    }
-}

http://git-wip-us.apache.org/repos/asf/ignite/blob/9922d83b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java
index 8af9443..de1c9c4 100644
--- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java
+++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java
@@ -32,7 +32,7 @@ import org.apache.ignite.cache.store.StoreResourceInjectionSelfTest;
 import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreMultitreadedSelfTest;
 import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreOptimizedMarshallerSelfTest;
 import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreTest;
-import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStorePortableMarshallerSelfTest;
+import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreBinaryMarshallerSelfTest;
 import org.apache.ignite.cache.store.jdbc.GridCacheJdbcBlobStoreMultithreadedSelfTest;
 import org.apache.ignite.cache.store.jdbc.GridCacheJdbcBlobStoreSelfTest;
 import org.apache.ignite.internal.processors.cache.CacheAffinityCallSelfTest;
@@ -214,7 +214,7 @@ public class IgniteCacheTestSuite extends TestSuite {
         suite.addTestSuite(GridCacheJdbcBlobStoreMultithreadedSelfTest.class);
         suite.addTestSuite(CacheJdbcPojoStoreTest.class);
         suite.addTestSuite(CacheJdbcPojoStoreOptimizedMarshallerSelfTest.class);
-        suite.addTestSuite(CacheJdbcPojoStorePortableMarshallerSelfTest.class);
+        suite.addTestSuite(CacheJdbcPojoStoreBinaryMarshallerSelfTest.class);
         suite.addTestSuite(CacheJdbcPojoStoreMultitreadedSelfTest.class);
         suite.addTestSuite(GridCacheBalancingStoreSelfTest.class);
         suite.addTestSuite(GridCacheAffinityApiSelfTest.class);


[11/25] ignite git commit: Merge remote-tracking branch 'origin/ignite-1.5' into ignite-1.5

Posted by ag...@apache.org.
Merge remote-tracking branch 'origin/ignite-1.5' into ignite-1.5


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/80503465
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/80503465
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/80503465

Branch: refs/heads/ignite-1282
Commit: 805034658e20a5666abade26be1989723d171986
Parents: c6b2fa5 2ae1709
Author: vozerov-gridgain <vo...@gridgain.com>
Authored: Mon Nov 23 16:31:45 2015 +0300
Committer: vozerov-gridgain <vo...@gridgain.com>
Committed: Mon Nov 23 16:31:45 2015 +0300

----------------------------------------------------------------------
 examples/schema-import/bin/db-init.sql          |   3 +-
 .../org/apache/ignite/schema/CacheConfig.java   |   7 +-
 .../java/org/apache/ignite/schema/Demo.java     |  20 +-
 .../org/apache/ignite/cache/QueryIndex.java     |  53 +-
 .../store/jdbc/CacheAbstractJdbcStore.java      | 638 ++++++++++++-------
 .../store/jdbc/CacheJdbcBlobStoreFactory.java   |  14 +-
 .../cache/store/jdbc/CacheJdbcPojoStore.java    | 444 +++++++++----
 .../store/jdbc/CacheJdbcPojoStoreFactory.java   | 277 +++++++-
 .../ignite/cache/store/jdbc/JdbcType.java       | 255 ++++++++
 .../cache/store/jdbc/JdbcTypeDefaultHasher.java |  43 ++
 .../ignite/cache/store/jdbc/JdbcTypeField.java  | 172 +++++
 .../ignite/cache/store/jdbc/JdbcTypeHasher.java |  34 +
 .../processors/cache/GridCacheMapEntry.java     |   3 +-
 .../processors/query/GridQueryProcessor.java    |   6 +-
 .../ignite/internal/visor/cache/VisorCache.java |   4 +-
 .../CacheJdbcPojoStoreAbstractSelfTest.java     | 395 ++++++++++++
 ...dbcPojoStoreOptimizedMarshallerSelfTest.java |  31 +
 ...JdbcPojoStorePortableMarshallerSelfTest.java |  85 +++
 .../store/jdbc/CacheJdbcPojoStoreTest.java      | 200 +++---
 ...eJdbcStoreAbstractMultithreadedSelfTest.java |   2 +-
 ...chePartitionedAtomicSetFailoverSelfTest.java |   5 +
 .../ignite/testsuites/IgniteCacheTestSuite.java |   6 +-
 modules/schema-import/README.txt                | 176 ++---
 .../ignite/schema/generator/CodeGenerator.java  | 198 +++---
 .../ignite/schema/generator/XmlGenerator.java   | 101 +--
 .../apache/ignite/schema/model/IndexItem.java   |  54 --
 .../ignite/schema/model/PojoDescriptor.java     |  72 +--
 .../ignite/schema/model/SchemaDescriptor.java   |   6 +-
 .../schema/parser/DatabaseMetadataParser.java   |  12 +-
 .../apache/ignite/schema/parser/DbTable.java    |  37 +-
 .../parser/dialect/DatabaseMetadataDialect.java |  32 +-
 .../parser/dialect/JdbcMetadataDialect.java     |  22 +-
 .../parser/dialect/OracleMetadataDialect.java   |  24 +-
 .../apache/ignite/schema/ui/ModalDialog.java    |   6 +-
 .../ignite/schema/ui/SchemaImportApp.java       |  13 +-
 .../schema/test/AbstractSchemaImportTest.java   |   4 +-
 .../schema/test/model/ignite-type-metadata.xml  | 610 +++++++++---------
 .../yardstick/config/ignite-store-config.xml    |  50 +-
 .../IgniteAtomicInvokeRetryBenchmark.java       |   2 +-
 .../failover/IgniteConsistencyException.java    |  64 ++
 ...IgniteTransactionalInvokeRetryBenchmark.java |   2 +-
 ...IgniteTransactionalWriteInvokeBenchmark.java | 135 +++-
 .../IgniteTransactionalWriteReadBenchmark.java  |   2 +-
 43 files changed, 3022 insertions(+), 1297 deletions(-)
----------------------------------------------------------------------



[08/25] ignite git commit: Merge remote-tracking branch 'apache/ignite-1.5' into ignite-1.5

Posted by ag...@apache.org.
Merge remote-tracking branch 'apache/ignite-1.5' into ignite-1.5


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/2ae17094
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/2ae17094
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/2ae17094

Branch: refs/heads/ignite-1282
Commit: 2ae17094f58b4bfafd006dc2fc14bf9c0a6f8abc
Parents: fa7a4bc 59fc24f
Author: ashutak <as...@gridgain.com>
Authored: Mon Nov 23 16:27:01 2015 +0300
Committer: ashutak <as...@gridgain.com>
Committed: Mon Nov 23 16:27:01 2015 +0300

----------------------------------------------------------------------
 .../ignite/internal/processors/cache/GridCacheMapEntry.java     | 3 ++-
 .../GridCachePartitionedAtomicSetFailoverSelfTest.java          | 5 +++++
 2 files changed, 7 insertions(+), 1 deletion(-)
----------------------------------------------------------------------



[16/25] ignite git commit: IGNITE-1983: .NET: Fixed continuous query tests.

Posted by ag...@apache.org.
IGNITE-1983: .NET: Fixed continuous query tests.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/dcbfbd29
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/dcbfbd29
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/dcbfbd29

Branch: refs/heads/ignite-1282
Commit: dcbfbd290eebfaaf23ea1ec48edec2dc59227121
Parents: e4109f9
Author: vozerov-gridgain <vo...@gridgain.com>
Authored: Tue Nov 24 10:53:43 2015 +0300
Committer: vozerov-gridgain <vo...@gridgain.com>
Committed: Tue Nov 24 10:53:43 2015 +0300

----------------------------------------------------------------------
 .../Continuous/ContinuousQueryAbstractTest.cs   | 26 ++++++++++++--------
 1 file changed, 16 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/dcbfbd29/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Query/Continuous/ContinuousQueryAbstractTest.cs
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Query/Continuous/ContinuousQueryAbstractTest.cs b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Query/Continuous/ContinuousQueryAbstractTest.cs
index cb9542f..720483a 100644
--- a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Query/Continuous/ContinuousQueryAbstractTest.cs
+++ b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Query/Continuous/ContinuousQueryAbstractTest.cs
@@ -76,7 +76,7 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
 
         /** Cache name. */
         private readonly string cacheName;
-        
+
         /// <summary>
         /// Constructor.
         /// </summary>
@@ -308,7 +308,7 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
                 // Put from local node.
                 int key1 = PrimaryKey(cache1);
                 cache1.GetAndPut(key1, Entry(key1));
-                CheckFilterSingle(key1, null, Entry(key1));
+                CheckFilterSingle(key1, null, Entry(key1), !loc);
                 CheckCallbackSingle(key1, null, Entry(key1));
 
                 // Put from remote node.
@@ -322,7 +322,7 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
                 }
                 else
                 {
-                    CheckFilterSingle(key2, null, Entry(key2));
+                    CheckFilterSingle(key2, null, Entry(key2), true);
                     CheckCallbackSingle(key2, null, Entry(key2));
                 }
 
@@ -330,7 +330,7 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
 
                 // Ignored put from local node.
                 cache1.GetAndPut(key1, Entry(key1 + 1));
-                CheckFilterSingle(key1, Entry(key1), Entry(key1 + 1));
+                CheckFilterSingle(key1, Entry(key1), Entry(key1 + 1), !loc);
                 CheckNoCallback(100);
 
                 // Ignored put from remote node.
@@ -339,7 +339,7 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
                 if (loc)
                     CheckNoFilter(100);
                 else
-                    CheckFilterSingle(key2, Entry(key2), Entry(key2 + 1));
+                    CheckFilterSingle(key2, Entry(key2), Entry(key2 + 1), true);
 
                 CheckNoCallback(100);
             }
@@ -868,9 +868,15 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
         /// <param name="expKey">Expected key.</param>
         /// <param name="expOldVal">Expected old value.</param>
         /// <param name="expVal">Expected value.</param>
-        private void CheckFilterSingle(int expKey, BinarizableEntry expOldVal, BinarizableEntry expVal)
+        /// <param name="hasBackup">Whether there is a backup node to check..</param>
+        private void CheckFilterSingle(int expKey, BinarizableEntry expOldVal, BinarizableEntry expVal, 
+            bool hasBackup = false)
         {
             CheckFilterSingle(expKey, expOldVal, expVal, 1000);
+
+            // Filter is called on each cache node (primary and backup)
+            if (hasBackup)
+                CheckFilterSingle(expKey, expOldVal, expVal, 1000);
         }
 
         /// <summary>
@@ -880,7 +886,7 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
         /// <param name="expOldVal">Expected old value.</param>
         /// <param name="expVal">Expected value.</param>
         /// <param name="timeout">Timeout.</param>
-        private void CheckFilterSingle(int expKey, BinarizableEntry expOldVal, BinarizableEntry expVal, int timeout)
+        private static void CheckFilterSingle(int expKey, BinarizableEntry expOldVal, BinarizableEntry expVal, int timeout)
         {
             FilterEvent evt;
 
@@ -895,7 +901,7 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
         /// Ensure that no filter events are logged.
         /// </summary>
         /// <param name="timeout">Timeout.</param>
-        private void CheckNoFilter(int timeout)
+        private static void CheckNoFilter(int timeout)
         {
             FilterEvent evt;
 
@@ -908,7 +914,7 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
         /// <param name="expKey">Expected key.</param>
         /// <param name="expOldVal">Expected old value.</param>
         /// <param name="expVal">Expected new value.</param>
-        private void CheckCallbackSingle(int expKey, BinarizableEntry expOldVal, BinarizableEntry expVal)
+        private static void CheckCallbackSingle(int expKey, BinarizableEntry expOldVal, BinarizableEntry expVal)
         {
             CheckCallbackSingle(expKey, expOldVal, expVal, 1000);
         }
@@ -920,7 +926,7 @@ namespace Apache.Ignite.Core.Tests.Cache.Query.Continuous
         /// <param name="expOldVal">Expected old value.</param>
         /// <param name="expVal">Expected new value.</param>
         /// <param name="timeout">Timeout.</param>
-        private void CheckCallbackSingle(int expKey, BinarizableEntry expOldVal, BinarizableEntry expVal, int timeout)
+        private static void CheckCallbackSingle(int expKey, BinarizableEntry expOldVal, BinarizableEntry expVal, int timeout)
         {
             CallbackEvent evt;
 


[23/25] ignite git commit: Merge branch ignite-1.5 into ignite-1282

Posted by ag...@apache.org.
Merge branch ignite-1.5 into ignite-1282


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/6dc6ffe1
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/6dc6ffe1
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/6dc6ffe1

Branch: refs/heads/ignite-1282
Commit: 6dc6ffe12d6587a2b8b1dcc497828f16be45aa19
Parents: e52b267 eee3b21
Author: Alexey Goncharuk <al...@gmail.com>
Authored: Tue Nov 24 12:39:07 2015 +0300
Committer: Alexey Goncharuk <al...@gmail.com>
Committed: Tue Nov 24 12:39:07 2015 +0300

----------------------------------------------------------------------
 .../java/org/apache/ignite/IgniteBinary.java    |  2 +-
 .../internal/portable/BinaryTypeImpl.java       |  4 ++-
 .../processors/cache/CacheObjectContext.java    | 26 +++++++++++++++---
 .../datastructures/GridCacheAtomicLongImpl.java |  6 +++-
 .../portable/BinaryMarshallerSelfTest.java      |  6 ++--
 .../examples/project/vs/ignite-examples.vcxproj |  6 ++--
 .../Apache.Ignite.Benchmarks.csproj             |  3 ++
 .../Apache.Ignite.Core.Tests.TestDll.csproj     |  2 ++
 .../Apache.Ignite.Core.Tests.csproj             |  2 ++
 .../Continuous/ContinuousQueryAbstractTest.cs   | 29 ++++++++++++--------
 .../Apache.Ignite.Core.csproj                   |  2 ++
 .../Apache.Ignite.Core/Impl/Events/Events.cs    |  8 ++++--
 modules/platforms/dotnet/Apache.Ignite.sln      |  6 ++--
 .../dotnet/Apache.Ignite/Apache.Ignite.csproj   |  2 ++
 .../Apache.Ignite.Examples.csproj               |  2 ++
 .../Apache.Ignite.ExamplesDll.csproj            |  2 ++
 16 files changed, 77 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/6dc6ffe1/modules/core/src/main/java/org/apache/ignite/IgniteBinary.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ignite/blob/6dc6ffe1/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/CacheObjectContext.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ignite/blob/6dc6ffe1/modules/core/src/test/java/org/apache/ignite/internal/portable/BinaryMarshallerSelfTest.java
----------------------------------------------------------------------


[13/25] ignite git commit: ignite-1.5 Reworked code generation in order to avoid compiler error: "java: code too large".

Posted by ag...@apache.org.
ignite-1.5 Reworked code generation in order to avoid compiler error: "java: code too large".


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/fe537533
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/fe537533
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/fe537533

Branch: refs/heads/ignite-1282
Commit: fe53753368b001ea2c3d568f77b7f2b03c937bb9
Parents: d71f612
Author: AKuznetsov <ak...@gridgain.com>
Authored: Mon Nov 23 22:06:37 2015 +0700
Committer: AKuznetsov <ak...@gridgain.com>
Committed: Mon Nov 23 22:06:37 2015 +0700

----------------------------------------------------------------------
 .../ignite/schema/generator/CodeGenerator.java  | 185 +++++++++----------
 1 file changed, 91 insertions(+), 94 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/fe537533/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/CodeGenerator.java
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/CodeGenerator.java b/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/CodeGenerator.java
index 283ccc1..92167e5 100644
--- a/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/CodeGenerator.java
+++ b/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/CodeGenerator.java
@@ -71,11 +71,11 @@ public class CodeGenerator {
         "void",         "volatile",      "while"
     ));
 
-    /** java.lang.*  */
-    private  static final String JAVA_LANG_PKG = "java.lang.";
+    /** java.lang.* */
+    private static final String JAVA_LANG_PKG = "java.lang.";
 
-    /** java.util.*  */
-    private  static final String JAVA_UTIL_PKG = "java.util.";
+    /** java.util.* */
+    private static final String JAVA_UTIL_PKG = "java.util.";
 
     /** Regexp to validate java identifier. */
     private static final Pattern VALID_JAVA_IDENTIFIER =
@@ -547,43 +547,15 @@ public class CodeGenerator {
 
             if (javaTypeName.startsWith(JAVA_LANG_PKG))
                 javaTypeName = javaTypeName.substring(JAVA_LANG_PKG.length());
-            else  if (javaTypeName.startsWith(JAVA_UTIL_PKG))
+            else if (javaTypeName.startsWith(JAVA_UTIL_PKG))
                 javaTypeName = javaTypeName.substring(JAVA_UTIL_PKG.length());
 
             add2(src, owner + ".add(new JdbcTypeField(Types." + field.dbTypeName() + ", \"" + field.dbName() + "\", " +
-                    javaTypeName + ".class, \"" + field.javaName() + "\"));");
+                javaTypeName + ".class, \"" + field.javaName() + "\"));");
         }
     }
 
     /**
-     * Add query fields.
-     *
-     * @param src Source code lines.
-     * @param fields List of fields to add.
-     * @param comment Commentary text.
-     * @param first {@code true} if variable should be declared.
-     * @return {@code false} if variable was declared.
-     */
-    private static boolean addQueryFields(Collection<String> src, Collection<PojoField> fields, String comment,
-        boolean first) {
-        if (fields.isEmpty())
-            return first;
-
-        add2(src, comment);
-        add2(src, (first ? "LinkedHashMap<String, String> " : "") + "fields = new LinkedHashMap<>();");
-        add0(src, "");
-
-        for (PojoField field : fields)
-            add2(src, "fields.put(\"" + field.javaName() + "\", \"" + javaTypeName(field) + "\");");
-
-        add0(src, "");
-        add2(src, "qryEntity.setFields(fields);");
-        add0(src, "");
-
-        return false;
-    }
-
-    /**
      * Generate java snippet for cache configuration with JDBC store and types metadata.
      *
      * @param pojos POJO descriptors.
@@ -612,39 +584,23 @@ public class CodeGenerator {
         Collection<String> src = new ArrayList<>(256);
 
         header(src, pkg, "java.sql.*;java.util.*;" +
-            "org.apache.ignite.cache.*;org.apache.ignite.cache.store.jdbc.*;" +
-            "org.apache.ignite.configuration.*;" + pkg + ".*",
+                "org.apache.ignite.cache.*;org.apache.ignite.cache.store.jdbc.*;" +
+                "org.apache.ignite.configuration.*",
             "CacheConfig", "CacheConfig");
 
-        add1(src, "/**");
-        add1(src, "* Configure cache.");
-        add1(src, "*");
-        add1(src, "* @param cacheName Cache name.");
-        add1(src, "* @param storeFactory Cache store factory.");
-        add1(src, "*/");
-        add1(src, "public static <K, V> CacheConfiguration<K, V> cache(String cacheName," +
-            " CacheJdbcPojoStoreFactory<K, V> storeFactory) {");
-        add2(src, "if (storeFactory == null)");
-        add3(src, " throw new IllegalArgumentException(\"Cache store factory cannot be null.\");");
-        add0(src, "");
-        add2(src, "CacheConfiguration<K, V> ccfg = new CacheConfiguration<>(cacheName);");
-        add0(src, "");
-        add2(src, "ccfg.setCacheStoreFactory(storeFactory);");
-        add2(src, "ccfg.setReadThrough(true);");
-        add2(src, "ccfg.setWriteThrough(true);");
-        add0(src, "");
-
-        add2(src, "// Configure JDBC types. ");
-        add2(src, "Collection<JdbcType> jdbcTypes = new ArrayList<>();");
-        add0(src, "");
-
-        boolean first = true;
-
+        // Generate methods for each type in order to avoid compiler error "java: code too large".
         for (PojoDescriptor pojo : pojos) {
             String tbl = pojo.table();
+            String valClsName = pojo.valueClassName();
 
-            add2(src, "// " + tbl + ".");
-            add2(src, (first ? "JdbcType " : "") + "jdbcType = new JdbcType();");
+            add1(src, "/**");
+            add1(src, "* Create JDBC type for " + tbl + ".");
+            add1(src, "*");
+            add1(src, "* @param cacheName Cache name.");
+            add1(src, "*/");
+            add1(src, "private static JdbcType jdbcType" + valClsName + "(String cacheName) {");
+
+            add2(src, "JdbcType jdbcType = new JdbcType();");
             add0(src, "");
 
             add2(src, "jdbcType.setCacheName(cacheName);");
@@ -654,70 +610,68 @@ public class CodeGenerator {
             add2(src, "jdbcType.setDatabaseTable(\"" + tbl + "\");");
 
             // Java info.
-            add2(src, "jdbcType.setKeyType(" + pojo.keyClassName() + ".class.getName());");
-            add2(src, "jdbcType.setValueType(" + pojo.valueClassName() + ".class.getName());");
+            add2(src, "jdbcType.setKeyType(\"" + pkg + "." + pojo.keyClassName() + "\");");
+            add2(src, "jdbcType.setValueType(\"" + pkg + "." + valClsName + "\");");
             add0(src, "");
 
             // Key fields.
             add2(src, "// Key fields for " + tbl + ".");
-            add2(src, (first ? "Collection<JdbcTypeField> " : "") + "keys = new ArrayList<>();");
+            add2(src, "Collection<JdbcTypeField> keys = new ArrayList<>();");
             addFields(src, "keys", pojo.keyFields());
             add2(src, "jdbcType.setKeyFields(keys.toArray(new JdbcTypeField[keys.size()]));");
             add0(src, "");
 
             // Value fields.
             add2(src, "// Value fields for " + tbl + ".");
-            add2(src, (first ? "Collection<JdbcTypeField> " : "") + "vals = new ArrayList<>();");
+            add2(src, "Collection<JdbcTypeField> vals = new ArrayList<>();");
             addFields(src, "vals", pojo.valueFields(includeKeys));
             add2(src, "jdbcType.setValueFields(vals.toArray(new JdbcTypeField[vals.size()]));");
             add0(src, "");
-
-            add2(src, "jdbcTypes.add(jdbcType);");
+            add2(src, "return jdbcType;");
+            add1(src, "}");
             add0(src, "");
 
-            first = false;
-        }
+            add1(src, "/**");
+            add1(src, "* Create SQL Query descriptor for " + tbl + ".");
+            add1(src, "*/");
+            add1(src, "private static QueryEntity queryEntity" + valClsName + "() {");
 
-        add2(src, "storeFactory.setTypes(jdbcTypes.toArray(new JdbcType[jdbcTypes.size()]));");
-        add0(src, "");
+            // Query entity.
+            add2(src, "QueryEntity qryEntity = new QueryEntity();");
+            add0(src, "");
+            add2(src, "qryEntity.setKeyType(\"" + pkg + "." + pojo.keyClassName() + "\");");
+            add2(src, "qryEntity.setValueType(\"" + pkg + "." + valClsName + "\");");
 
-        // Queries entities.
-        add2(src, "// Configure query entities. ");
-        add2(src, "Collection<QueryEntity> qryEntities = new ArrayList<>();");
-        add0(src, "");
+            add0(src, "");
 
-        first = true;
-        boolean firstIdxs = true;
-        boolean firstIdx = true;
+            // Query fields.
+            add2(src, "// Query fields for " + tbl + ".");
+            add2(src, "LinkedHashMap<String, String> fields = new LinkedHashMap<>();");
+            add0(src, "");
 
-        for (PojoDescriptor pojo : pojos) {
-            String tbl = pojo.table();
+            for (PojoField field : pojo.fields())
+                add2(src, "fields.put(\"" + field.javaName() + "\", \"" + javaTypeName(field) + "\");");
 
-            add2(src, (first ? "QueryEntity " : "") + "qryEntity = new QueryEntity();");
             add0(src, "");
-            add2(src, "qryEntity.setKeyType(" + pojo.keyClassName() + ".class.getName());");
-            add2(src, "qryEntity.setValueType(" + pojo.valueClassName() + ".class.getName());");
+            add2(src, "qryEntity.setFields(fields);");
             add0(src, "");
 
-            // Query fields.
-            addQueryFields(src, pojo.fields(), "// Query fields for " + tbl + ".", first);
-
             // Indexes.
             Collection<QueryIndex> idxs = pojo.indexes();
 
             if (!idxs.isEmpty()) {
                 add2(src, "// Indexes for " + tbl + ".");
-                add2(src, (firstIdxs ? "Collection<QueryIndex> " : "") + "idxs = new ArrayList<>();");
+                add2(src, "Collection<QueryIndex> idxs = new ArrayList<>();");
                 add0(src, "");
 
-                firstIdxs = false;
+                boolean firstIdx = true;
 
                 for (QueryIndex idx : idxs) {
                     if (idx.getFields().size() == 1) {
                         Map.Entry<String, Boolean> fld = F.first(idx.getFields().entrySet());
 
                         add2(src, "idxs.add(new QueryIndex(\"" + fld.getKey() + "\", " + fld.getValue() + ", \"" +
-                            idx.getName()  + "\"));");
+                            idx.getName() + "\"));");
                         add0(src, "");
                     }
                     else {
@@ -732,7 +686,7 @@ public class CodeGenerator {
                         add0(src, "");
 
                         for (Map.Entry<String, Boolean> idxFld : idx.getFields().entrySet())
-                            add2(src, "idxFlds.put(\"" + idxFld.getKey()  + "\", " + idxFld.getValue() + ");");
+                            add2(src, "idxFlds.put(\"" + idxFld.getKey() + "\", " + idxFld.getValue() + ");");
 
                         add0(src, "");
 
@@ -750,12 +704,55 @@ public class CodeGenerator {
                 add0(src, "");
             }
 
-            add2(src, "ccfg.setQueryEntities(qryEntities);");
-            add0(src, "");
+            add2(src, "return qryEntity;");
 
-            first = false;
+            add1(src, "}");
+            add0(src, "");
         }
 
+        add1(src, "/**");
+        add1(src, "* Configure cache.");
+        add1(src, "*");
+        add1(src, "* @param cacheName Cache name.");
+        add1(src, "* @param storeFactory Cache store factory.");
+        add1(src, "*/");
+        add1(src, "public static <K, V> CacheConfiguration<K, V> cache(String cacheName," +
+            " CacheJdbcPojoStoreFactory<K, V> storeFactory) {");
+        add2(src, "if (storeFactory == null)");
+        add3(src, " throw new IllegalArgumentException(\"Cache store factory cannot be null.\");");
+        add0(src, "");
+        add2(src, "CacheConfiguration<K, V> ccfg = new CacheConfiguration<>(cacheName);");
+        add0(src, "");
+        add2(src, "ccfg.setCacheStoreFactory(storeFactory);");
+        add2(src, "ccfg.setReadThrough(true);");
+        add2(src, "ccfg.setWriteThrough(true);");
+        add0(src, "");
+
+        add2(src, "// Configure JDBC types. ");
+        add2(src, "Collection<JdbcType> jdbcTypes = new ArrayList<>();");
+        add0(src, "");
+
+        for (PojoDescriptor pojo : pojos)
+            add2(src, "jdbcTypes.add(jdbcType" + pojo.valueClassName() + "(cacheName));");
+
+        add0(src, "");
+
+        add2(src, "storeFactory.setTypes(jdbcTypes.toArray(new JdbcType[jdbcTypes.size()]));");
+        add0(src, "");
+
+
+        add2(src, "// Configure query entities. ");
+        add2(src, "Collection<QueryEntity> qryEntities = new ArrayList<>();");
+        add0(src, "");
+
+        for (PojoDescriptor pojo : pojos)
+            add2(src, "qryEntities.add(queryEntity" + pojo.valueClassName() + "());");
+
+        add0(src, "");
+
+        add2(src, "ccfg.setQueryEntities(qryEntities);");
+        add0(src, "");
+
         add2(src, "return ccfg;");
         add1(src, "}");
 


[06/25] ignite git commit: Muted GridCachePartitionedAtomicSetFailoverSelfTest.testNodeRestart

Posted by ag...@apache.org.
Muted GridCachePartitionedAtomicSetFailoverSelfTest.testNodeRestart


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/59fc24f4
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/59fc24f4
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/59fc24f4

Branch: refs/heads/ignite-1282
Commit: 59fc24f40b61efd00542a89b47ca3155b6793943
Parents: d71f612
Author: nikolay tikhonov <nt...@gridgain.com>
Authored: Mon Nov 23 16:04:54 2015 +0300
Committer: Tikhonov Nikolay <ti...@gmail.com>
Committed: Mon Nov 23 16:04:54 2015 +0300

----------------------------------------------------------------------
 .../ignite/internal/processors/cache/GridCacheMapEntry.java     | 3 ++-
 .../GridCachePartitionedAtomicSetFailoverSelfTest.java          | 5 +++++
 2 files changed, 7 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/59fc24f4/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheMapEntry.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheMapEntry.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheMapEntry.java
index 2b40351..ac42121 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheMapEntry.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheMapEntry.java
@@ -3296,7 +3296,8 @@ public abstract class GridCacheMapEntry extends GridMetadataAwareAdapter impleme
         if (!cctx.isLocal() && !isNear()) {
             GridDhtLocalPartition locPart = cctx.topology().localPartition(partition(), topVer, false);
 
-            assert locPart != null;
+            if (locPart == null)
+                return 0;
 
             updateCntr = locPart.nextUpdateCounter();
         }

http://git-wip-us.apache.org/repos/asf/ignite/blob/59fc24f4/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/datastructures/partitioned/GridCachePartitionedAtomicSetFailoverSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/datastructures/partitioned/GridCachePartitionedAtomicSetFailoverSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/datastructures/partitioned/GridCachePartitionedAtomicSetFailoverSelfTest.java
index d542af9..8ff95c7 100644
--- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/datastructures/partitioned/GridCachePartitionedAtomicSetFailoverSelfTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/datastructures/partitioned/GridCachePartitionedAtomicSetFailoverSelfTest.java
@@ -37,4 +37,9 @@ public class GridCachePartitionedAtomicSetFailoverSelfTest extends GridCacheSetF
     @Override protected CacheAtomicityMode collectionCacheAtomicityMode() {
         return ATOMIC;
     }
+
+    /** {@inheritDoc} */
+    @Override public void testNodeRestart() throws Exception {
+        fail("https://issues.apache.org/jira/browse/IGNITE-170");
+    }
 }
\ No newline at end of file


[04/25] ignite git commit: IGNITE-1753 Refactored usages of deprectaed CacheTypeMetadata to JdbcType.

Posted by ag...@apache.org.
http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStore.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStore.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStore.java
index d78ea48..aa013b9 100644
--- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStore.java
+++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStore.java
@@ -21,226 +21,392 @@ import java.lang.reflect.Constructor;
 import java.lang.reflect.Method;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 import javax.cache.CacheException;
 import javax.cache.integration.CacheLoaderException;
-import org.apache.ignite.cache.CacheTypeFieldMetadata;
-import org.apache.ignite.cache.CacheTypeMetadata;
+import org.apache.ignite.binary.BinaryObject;
+import org.apache.ignite.binary.BinaryObjectBuilder;
 import org.apache.ignite.cache.store.CacheStore;
 import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.internal.util.typedef.F;
 import org.apache.ignite.internal.util.typedef.internal.U;
 import org.jetbrains.annotations.Nullable;
 
 /**
  * Implementation of {@link CacheStore} backed by JDBC and POJO via reflection.
  *
- * This implementation stores objects in underlying database using java beans mapping description via reflection.
- * <p>
+ * This implementation stores objects in underlying database using java beans mapping description via reflection. <p>
  * Use {@link CacheJdbcPojoStoreFactory} factory to pass {@link CacheJdbcPojoStore} to {@link CacheConfiguration}.
  */
 public class CacheJdbcPojoStore<K, V> extends CacheAbstractJdbcStore<K, V> {
+    /** POJO methods cache. */
+    private volatile Map<String, Map<String, PojoMethodsCache>> pojosMthds = Collections.emptyMap();
+
     /**
-     * POJO methods cache.
+     * Get field value from object for use as query parameter.
+     *
+     * @param cacheName Cache name.
+     * @param typeName Type name.
+     * @param fldName Field name.
+     * @param obj Cache object.
+     * @return Field value from object.
+     * @throws CacheException in case of error.
      */
-    protected static class PojoMethodsCache {
-        /** POJO class. */
-        protected final Class<?> cls;
-
-        /** Constructor for POJO object. */
-        private Constructor ctor;
-
-        /** {@code true} if object is a simple type. */
-        private final boolean simple;
-
-        /** Cached setters for POJO object. */
-        private Map<String, Method> getters;
-
-        /** Cached getters for POJO object. */
-        private Map<String, Method> setters;
-
-        /**
-         * POJO methods cache.
-         *
-         * @param clsName Class name.
-         * @param fields Fields.
-         *
-         * @throws CacheException If failed to construct type cache.
-         */
-        public PojoMethodsCache(String clsName, Collection<CacheTypeFieldMetadata> fields) throws CacheException {
-            try {
-                cls = Class.forName(clsName);
-
-                if (simple = simpleType(cls))
-                    return;
+    @Override @Nullable protected Object extractParameter(@Nullable String cacheName, String typeName, TypeKind typeKind,
+        String fldName, Object obj) throws CacheException {
+        switch (typeKind) {
+            case BUILT_IN:
+                return obj;
+            case POJO:
+                return extractPojoParameter(cacheName, typeName, fldName, obj);
+            default:
+                return extractBinaryParameter(fldName, obj);
+        }
+    }
 
-                ctor = cls.getDeclaredConstructor();
+    /**
+     * Get field value from POJO for use as query parameter.
+     *
+     * @param cacheName Cache name.
+     * @param typeName Type name.
+     * @param fldName Field name.
+     * @param obj Cache object.
+     * @return Field value from object.
+     * @throws CacheException in case of error.
+     */
+    @Nullable private Object extractPojoParameter(@Nullable String cacheName, String typeName, String fldName,
+        Object obj) throws CacheException {
+        try {
+            Map<String, PojoMethodsCache> cacheMethods = pojosMthds.get(cacheName);
 
-                if (!ctor.isAccessible())
-                    ctor.setAccessible(true);
-            }
-            catch (ClassNotFoundException e) {
-                throw new CacheException("Failed to find class: " + clsName, e);
-            }
-            catch (NoSuchMethodException e) {
-                throw new CacheException("Failed to find default constructor for class: " + clsName, e);
-            }
+            if (cacheMethods == null)
+                throw new CacheException("Failed to find POJO type metadata for cache: " + U.maskName(cacheName));
 
-            setters = U.newHashMap(fields.size());
+            PojoMethodsCache mc = cacheMethods.get(typeName);
 
-            getters = U.newHashMap(fields.size());
+            if (mc == null)
+                throw new CacheException("Failed to find POJO type metadata for type: " + typeName);
 
-            for (CacheTypeFieldMetadata field : fields) {
-                String prop = capitalFirst(field.getJavaName());
+            Method getter = mc.getters.get(fldName);
 
-                try {
-                    getters.put(field.getJavaName(), cls.getMethod("get" + prop));
-                }
-                catch (NoSuchMethodException ignored) {
-                    try {
-                        getters.put(field.getJavaName(), cls.getMethod("is" + prop));
-                    }
-                    catch (NoSuchMethodException e) {
-                        throw new CacheException("Failed to find getter in POJO class [clsName=" + clsName +
-                            ", prop=" + field.getJavaName() + "]", e);
-                    }
-                }
+            if (getter == null)
+                throw new CacheLoaderException("Failed to find getter in POJO class [class=" + typeName +
+                    ", prop=" + fldName + "]");
 
-                try {
-                    setters.put(field.getJavaName(), cls.getMethod("set" + prop, field.getJavaType()));
-                }
-                catch (NoSuchMethodException e) {
-                    throw new CacheException("Failed to find setter in POJO class [clsName=" + clsName +
-                        ", prop=" + field.getJavaName() + "]", e);
-                }
-            }
+            return getter.invoke(obj);
         }
-
-        /**
-         * Capitalizes the first character of the given string.
-         *
-         * @param str String.
-         * @return String with capitalized first character.
-         */
-        @Nullable private String capitalFirst(@Nullable String str) {
-            return str == null ? null :
-                str.isEmpty() ? "" : Character.toUpperCase(str.charAt(0)) + str.substring(1);
+        catch (Exception e) {
+            throw new CacheException("Failed to read object of class: " + typeName, e);
         }
     }
 
-    /** Methods cache. */
-    protected volatile Map<String, Map<String, PojoMethodsCache>> mtdsCache = Collections.emptyMap();
+    /**
+     * Get field value from Binary object for use as query parameter.
+     *
+     * @param fieldName Field name to extract query parameter for.
+     * @param obj Object to process.
+     * @return Field value from object.
+     * @throws CacheException in case of error.
+     */
+    private Object extractBinaryParameter(String fieldName, Object obj) throws CacheException {
+        if (obj instanceof BinaryObject)
+            return ((BinaryObject)obj).field(fieldName);
+
+        throw new CacheException("Failed to read property value from non binary object [class=" +
+            obj.getClass() + ", property=" + fieldName + "]");
+    }
 
     /** {@inheritDoc} */
-    @Override protected void prepareBuilders(@Nullable String cacheName, Collection<CacheTypeMetadata> types)
-        throws CacheException {
-        Map<String, PojoMethodsCache> typeMethods = U.newHashMap(types.size() * 2);
+    @Override protected <R> R buildObject(@Nullable String cacheName, String typeName, TypeKind typeKind,
+        JdbcTypeField[] flds, Collection<String> hashFlds, Map<String, Integer> loadColIdxs, ResultSet rs)
+        throws CacheLoaderException {
+        switch (typeKind) {
+            case BUILT_IN:
+                return (R)buildBuiltinObject(typeName, flds, loadColIdxs, rs);
+            case POJO:
+                return (R)buildPojoObject(cacheName, typeName, flds, loadColIdxs, rs);
+            default:
+                return (R)buildBinaryObject(typeName, flds, hashFlds, loadColIdxs, rs);
+        }
+    }
 
-        for (CacheTypeMetadata type : types) {
-            String keyType = type.getKeyType();
-            typeMethods.put(keyType, new PojoMethodsCache(keyType, type.getKeyFields()));
+    /**
+     * Construct Java built in object from query result.
+     *
+     * @param typeName Type name.
+     * @param fields Fields descriptors.
+     * @param loadColIdxs Select query columns indexes.
+     * @param rs ResultSet to take data from.
+     * @return Constructed object.
+     * @throws CacheLoaderException If failed to construct POJO.
+     */
+    private Object buildBuiltinObject(String typeName, JdbcTypeField[] fields, Map<String, Integer> loadColIdxs,
+        ResultSet rs) throws CacheLoaderException {
+        try {
+            JdbcTypeField field = fields[0];
 
-            String valType = type.getValueType();
-            typeMethods.put(valType, new PojoMethodsCache(valType, type.getValueFields()));
+            return getColumnValue(rs, loadColIdxs.get(field.getDatabaseFieldName()), field.getJavaFieldType());
         }
+        catch (SQLException e) {
+            throw new CacheLoaderException("Failed to read object of class: " + typeName, e);
+        }
+    }
 
-        Map<String, Map<String, PojoMethodsCache>> newMtdsCache = new HashMap<>(mtdsCache);
+    /**
+     * Construct POJO from query result.
+     *
+     * @param cacheName Cache name.
+     * @param typeName Type name.
+     * @param flds Fields descriptors.
+     * @param loadColIdxs Select query columns index.
+     * @param rs ResultSet.
+     * @return Constructed POJO.
+     * @throws CacheLoaderException If failed to construct POJO.
+     */
+    private Object buildPojoObject(@Nullable String cacheName, String typeName,
+        JdbcTypeField[] flds, Map<String, Integer> loadColIdxs, ResultSet rs)
+        throws CacheLoaderException {
 
-        newMtdsCache.put(cacheName, typeMethods);
+        Map<String, PojoMethodsCache> cacheMethods = pojosMthds.get(cacheName);
 
-        mtdsCache = newMtdsCache;
-    }
+        if (cacheMethods == null)
+            throw new CacheLoaderException("Failed to find POJO types metadata for cache: " + U.maskName(cacheName));
 
-    /** {@inheritDoc} */
-    @Override protected <R> R buildObject(String cacheName, String typeName, Collection<CacheTypeFieldMetadata> fields,
-        Map<String, Integer> loadColIdxs, ResultSet rs) throws CacheLoaderException {
-        PojoMethodsCache mc = mtdsCache.get(cacheName).get(typeName);
+        PojoMethodsCache mc = cacheMethods.get(typeName);
 
         if (mc == null)
-            throw new CacheLoaderException("Failed to find cache type metadata for type: " + typeName);
+            throw new CacheLoaderException("Failed to find POJO type metadata for type: " + typeName);
 
         try {
-            if (mc.simple) {
-                CacheTypeFieldMetadata field = F.first(fields);
-
-                return (R)getColumnValue(rs, loadColIdxs.get(field.getDatabaseName()), mc.cls);
-            }
-
             Object obj = mc.ctor.newInstance();
 
-            for (CacheTypeFieldMetadata field : fields) {
-                String fldJavaName = field.getJavaName();
+            for (JdbcTypeField fld : flds) {
+                String fldJavaName = fld.getJavaFieldName();
 
                 Method setter = mc.setters.get(fldJavaName);
 
                 if (setter == null)
-                    throw new IllegalStateException("Failed to find setter in POJO class [clsName=" + typeName +
+                    throw new IllegalStateException("Failed to find setter in POJO class [type=" + typeName +
                         ", prop=" + fldJavaName + "]");
 
-                String fldDbName = field.getDatabaseName();
+                String fldDbName = fld.getDatabaseFieldName();
 
                 Integer colIdx = loadColIdxs.get(fldDbName);
 
                 try {
-                    setter.invoke(obj, getColumnValue(rs, colIdx, field.getJavaType()));
+                    Object colVal = getColumnValue(rs, colIdx, fld.getJavaFieldType());
+
+                    try {
+                        setter.invoke(obj, colVal);
+                    }
+                    catch (Exception e) {
+                        throw new CacheLoaderException("Failed to set property in POJO class [type=" + typeName +
+                            ", prop=" + fldJavaName + ", col=" + colIdx + ", dbName=" + fldDbName + "]", e);
+                    }
                 }
-                catch (Exception e) {
-                    throw new IllegalStateException("Failed to set property in POJO class [clsName=" + typeName +
+                catch (SQLException e) {
+                    throw new CacheLoaderException("Failed to read object property [type= " + typeName +
                         ", prop=" + fldJavaName + ", col=" + colIdx + ", dbName=" + fldDbName + "]", e);
                 }
             }
 
-            return (R)obj;
-        }
-        catch (SQLException e) {
-            throw new CacheLoaderException("Failed to read object of class: " + typeName, e);
+            return obj;
         }
         catch (Exception e) {
             throw new CacheLoaderException("Failed to construct instance of class: " + typeName, e);
         }
     }
 
-    /** {@inheritDoc} */
-    @Nullable @Override protected Object extractParameter(String cacheName, String typeName, String fieldName,
-        Object obj)
-        throws CacheException {
+    /**
+     * Construct binary object from query result.
+     *
+     * @param typeName Type name.
+     * @param fields Fields descriptors.
+     * @param hashFields Collection of fields to build hash for.
+     * @param loadColIdxs Select query columns index.
+     * @param rs ResultSet.
+     * @return Constructed binary object.
+     * @throws CacheLoaderException If failed to construct binary object.
+     */
+    protected Object buildBinaryObject(String typeName, JdbcTypeField[] fields,
+        Collection<String> hashFields, Map<String, Integer> loadColIdxs, ResultSet rs) throws CacheLoaderException {
         try {
-            PojoMethodsCache mc = mtdsCache.get(cacheName).get(typeName);
+            BinaryObjectBuilder builder = ignite.binary().builder(typeName);
 
-            if (mc == null)
-                throw new CacheException("Failed to find cache type metadata for type: " + typeName);
+            boolean calcHash = hashFields != null;
 
-            if (mc.simple)
-                return obj;
+            Collection<Object> hashValues = calcHash ? new ArrayList<>(hashFields.size()) : null;
 
-            Method getter = mc.getters.get(fieldName);
+            for (JdbcTypeField field : fields) {
+                Integer colIdx = loadColIdxs.get(field.getDatabaseFieldName());
 
-            if (getter == null)
-                throw new CacheLoaderException("Failed to find getter in POJO class [clsName=" + typeName +
-                    ", prop=" + fieldName + "]");
+                Object colVal = getColumnValue(rs, colIdx, field.getJavaFieldType());
 
-            return getter.invoke(obj);
+                builder.setField(field.getJavaFieldName(), colVal);
+
+                if (calcHash)
+                    hashValues.add(colVal);
+            }
+
+            if (calcHash)
+                builder.hashCode(hasher.hashCode(hashValues));
+
+            return builder.build();
         }
-        catch (Exception e) {
-            throw new CacheException("Failed to read object of class: " + typeName, e);
+        catch (SQLException e) {
+            throw new CacheException("Failed to read binary object", e);
         }
     }
 
-    /** {@inheritDoc} */
-    @Override protected Object keyTypeId(Object key) throws CacheException {
-        return key.getClass();
+    /**
+     * Calculate type ID for object.
+     *
+     * @param obj Object to calculate type ID for.
+     * @return Type ID.
+     * @throws CacheException If failed to calculate type ID for given object.
+     */
+    @Override protected Object typeIdForObject(Object obj) throws CacheException {
+        if (obj instanceof BinaryObject)
+            return ((BinaryObject)obj).typeId();
+
+        return obj.getClass();
     }
 
     /** {@inheritDoc} */
-    @Override protected Object keyTypeId(String type) throws CacheException {
+    @Override protected Object typeIdForTypeName(TypeKind kind, String typeName) throws CacheException {
+        if (kind == TypeKind.BINARY)
+            return ignite.binary().typeId(typeName);
+
         try {
-            return Class.forName(type);
+            return Class.forName(typeName);
         }
         catch (ClassNotFoundException e) {
-            throw new CacheException("Failed to find class: " + type, e);
+            throw new CacheException("Failed to find class: " + typeName, e);
+        }
+    }
+
+    /**
+     * Prepare internal store specific builders for provided types metadata.
+     *
+     * @param cacheName Cache name to prepare builders for.
+     * @param types Collection of types.
+     * @throws CacheException If failed to prepare internal builders for types.
+     */
+    @Override protected void prepareBuilders(@Nullable String cacheName, Collection<JdbcType> types)
+        throws CacheException {
+        Map<String, PojoMethodsCache> pojoMethods = U.newHashMap(types.size() * 2);
+
+        for (JdbcType type : types) {
+            String keyTypeName = type.getKeyType();
+
+            TypeKind keyKind = kindForName(keyTypeName);
+
+            if (keyKind == TypeKind.POJO) {
+                if (pojoMethods.containsKey(keyTypeName))
+                    throw new CacheException("Found duplicate key type [cache=" + U.maskName(cacheName) +
+                        ", keyType=" + keyTypeName + "]");
+
+                pojoMethods.put(keyTypeName, new PojoMethodsCache(keyTypeName, type.getKeyFields()));
+            }
+
+            String valTypeName = type.getValueType();
+
+            TypeKind valKind = kindForName(valTypeName);
+
+            if (valKind == TypeKind.POJO)
+                pojoMethods.put(valTypeName, new PojoMethodsCache(valTypeName, type.getValueFields()));
+        }
+
+        if (!pojoMethods.isEmpty()) {
+            Map<String, Map<String, PojoMethodsCache>> newPojosMethods = new HashMap<>(pojosMthds);
+
+            newPojosMethods.put(cacheName, pojoMethods);
+
+            pojosMthds = newPojosMethods;
+        }
+    }
+
+    /**
+     * POJO methods cache.
+     */
+    private static class PojoMethodsCache {
+        /** POJO class. */
+        private final Class<?> cls;
+
+        /** Constructor for POJO object. */
+        private Constructor ctor;
+
+        /** Cached setters for POJO object. */
+        private Map<String, Method> getters;
+
+        /** Cached getters for POJO object. */
+        private Map<String, Method> setters;
+
+        /**
+         * POJO methods cache.
+         *
+         * @param clsName Class name.
+         * @param fields Fields.
+         * @throws CacheException If failed to construct type cache.
+         */
+        private PojoMethodsCache(String clsName, JdbcTypeField[] fields) throws CacheException {
+            try {
+                cls = Class.forName(clsName);
+
+                ctor = cls.getDeclaredConstructor();
+
+                if (!ctor.isAccessible())
+                    ctor.setAccessible(true);
+            }
+            catch (ClassNotFoundException e) {
+                throw new CacheException("Failed to find class: " + clsName, e);
+            }
+            catch (NoSuchMethodException e) {
+                throw new CacheException("Failed to find default constructor for class: " + clsName, e);
+            }
+
+            setters = U.newHashMap(fields.length);
+
+            getters = U.newHashMap(fields.length);
+
+            for (JdbcTypeField field : fields) {
+                String prop = capitalFirst(field.getJavaFieldName());
+
+                try {
+                    getters.put(field.getJavaFieldName(), cls.getMethod("get" + prop));
+                }
+                catch (NoSuchMethodException ignored) {
+                    try {
+                        getters.put(field.getJavaFieldName(), cls.getMethod("is" + prop));
+                    }
+                    catch (NoSuchMethodException e) {
+                        throw new CacheException("Failed to find getter in POJO class [class=" + clsName +
+                            ", prop=" + field.getJavaFieldName() + "]", e);
+                    }
+                }
+
+                try {
+                    setters.put(field.getJavaFieldName(), cls.getMethod("set" + prop, field.getJavaFieldType()));
+                }
+                catch (NoSuchMethodException e) {
+                    throw new CacheException("Failed to find setter in POJO class [class=" + clsName +
+                        ", prop=" + field.getJavaFieldName() + "]", e);
+                }
+            }
+        }
+
+        /**
+         * Capitalizes the first character of the given string.
+         *
+         * @param str String.
+         * @return String with capitalized first character.
+         */
+        @Nullable private String capitalFirst(@Nullable String str) {
+            return str == null ? null :
+                str.isEmpty() ? "" : Character.toUpperCase(str.charAt(0)) + str.substring(1);
         }
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreFactory.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreFactory.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreFactory.java
index c90a69b..ded83ce 100644
--- a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreFactory.java
+++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreFactory.java
@@ -34,17 +34,53 @@ import org.apache.ignite.resources.SpringApplicationContextResource;
  *
  * <h2 class="header">Spring Example</h2>
  * <pre name="code" class="xml">
- *     &lt;bean id= "simpleDataSource" class="org.h2.jdbcx.JdbcDataSource"/&gt;
+ *     &lt;bean id= "myDataSource" class="org.h2.jdbcx.JdbcDataSource"/&gt;
  *
  *     &lt;bean id="ignite.cfg" class="org.apache.ignite.configuration.IgniteConfiguration"&gt;
  *          ...
  *          &lt;property name="cacheConfiguration"&gt;
  *               &lt;list&gt;
  *                  &lt;bean class="org.apache.ignite.configuration.CacheConfiguration"&gt;
+ *                      &lt;property name="name" value="myCache" /&gt;
  *                      ...
  *                      &lt;property name="cacheStoreFactory"&gt;
  *                          &lt;bean class="org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactory"&gt;
- *                              &lt;property name="dataSourceBean" value = "simpleDataSource" /&gt;
+ *                              &lt;property name="dataSourceBean" value="myDataSource" /&gt;
+ *                              &lt;property name="types"&gt;
+ *                                  &lt;list&gt;
+ *                                      &lt;bean class="org.apache.ignite.cache.store.jdbc.JdbcType"&gt;
+ *                                          &lt;property name="cacheName" value="myCache" /&gt;
+ *                                          &lt;property name="databaseSchema" value="MY_DB_SCHEMA" /&gt;
+ *                                          &lt;property name="databaseTable" value="PERSON" /&gt;
+ *                                          &lt;property name="keyType" value="java.lang.Integer" /&gt;
+ *                                          &lt;property name="keyFields"&gt;
+ *                                              &lt;list&gt;
+ *                                                  &lt;bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField"&gt;
+ *                                                      &lt;property name="databaseFieldType" &gt;
+ *                                                          &lt;util:constant static-field="java.sql.Types.INTEGER"/&gt;
+ *                                                      &lt;/property&gt;
+ *                                                      &lt;property name="databaseFieldName" value="ID" /&gt;
+ *                                                      &lt;property name="javaFieldType" value="java.lang.Integer" /&gt;
+ *                                                      &lt;property name="javaFieldName" value="id" /&gt;
+ *                                                  &lt;/bean&gt;
+ *                                              &lt;/list&gt;
+ *                                          &lt;/property&gt;
+ *                                          &lt;property name="valueType" value="my.company.Person" /&gt;
+ *                                          &lt;property name="valueFields"&gt;
+ *                                              &lt;list&gt;
+ *                                                  &lt;bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField"&gt;
+ *                                                      &lt;property name="databaseFieldType" &gt;
+ *                                                          &lt;util:constant static-field="java.sql.Types.VARCHAR"/&gt;
+ *                                                      &lt;/property&gt;
+ *                                                      &lt;property name="databaseFieldName" value="NAME" /&gt;
+ *                                                      &lt;property name="javaFieldType" value="java.lang.String" /&gt;
+ *                                                      &lt;property name="javaFieldName" value="name" /&gt;
+ *                                                  &lt;/bean&gt;
+ *                                              &lt;/list&gt;
+ *                                          &lt;/property&gt;
+ *                                      &lt;/bean&gt;
+ *                                  &lt;/list&gt;
+ *                              &lt;/property&gt;
  *                          &lt;/bean&gt;
  *                      &lt;/property&gt;
  *                  &lt;/bean&gt;
@@ -57,47 +93,81 @@ import org.apache.ignite.resources.SpringApplicationContextResource;
  * <br>
  * For information about Spring framework visit <a href="http://www.springframework.org/">www.springframework.org</a>
  */
-public class CacheJdbcPojoStoreFactory<K, V> implements Factory<CacheJdbcPojoStore<K, V>> {
+public class CacheJdbcPojoStoreFactory<K, V> implements Factory<CacheAbstractJdbcStore<K, V>> {
     /** */
     private static final long serialVersionUID = 0L;
 
+    /** Default value for write attempts. */
+    public static final int DFLT_WRITE_ATTEMPTS = 2;
+
+    /** Default batch size for put and remove operations. */
+    public static final int DFLT_BATCH_SIZE = 512;
+
+    /** Default batch size for put and remove operations. */
+    public static final int DFLT_PARALLEL_LOAD_CACHE_MINIMUM_THRESHOLD = 512;
+
+    /** Maximum batch size for writeAll and deleteAll operations. */
+    private int batchSizw = DFLT_BATCH_SIZE;
+
     /** Name of data source bean. */
     private String dataSrcBean;
 
-    /** Data source. */
-    private transient DataSource dataSrc;
-
     /** Database dialect. */
     private JdbcDialect dialect;
 
+    /** Max workers thread count. These threads are responsible for load cache. */
+    private int maxPoolSize = Runtime.getRuntime().availableProcessors();
+
+    /** Maximum write attempts in case of database error. */
+    private int maxWriteAttempts = DFLT_WRITE_ATTEMPTS;
+
+    /** Parallel load cache minimum threshold. If {@code 0} then load sequentially. */
+    private int parallelLoadCacheMinThreshold = DFLT_PARALLEL_LOAD_CACHE_MINIMUM_THRESHOLD;
+
+    /** Hash calculator.  */
+    private JdbcTypeHasher hasher = JdbcTypeDefaultHasher.INSTANCE;
+
+    /** Types that store could process. */
+    private JdbcType[] types;
+
+    /** Data source. */
+    private transient DataSource dataSrc;
+
     /** Application context. */
     @SpringApplicationContextResource
-    private transient Object appContext;
+    private transient Object appCtx;
 
     /** {@inheritDoc} */
     @Override public CacheJdbcPojoStore<K, V> create() {
         CacheJdbcPojoStore<K, V> store = new CacheJdbcPojoStore<>();
 
+        store.setBatchSize(batchSizw);
         store.setDialect(dialect);
+        store.setMaximumPoolSize(maxPoolSize);
+        store.setMaximumWriteAttempts(maxWriteAttempts);
+        store.setParallelLoadCacheMinimumThreshold(parallelLoadCacheMinThreshold);
+        store.setTypes(types);
 
         if (dataSrc != null)
             store.setDataSource(dataSrc);
-        else if (dataSrcBean != null) {
-            if (appContext == null)
-                throw new IgniteException("Spring application context resource is not injected.");
+        else {
+            if (dataSrcBean != null) {
+                if (appCtx == null)
+                    throw new IgniteException("Spring application context resource is not injected.");
 
-            IgniteSpringHelper spring;
+                IgniteSpringHelper spring;
 
-            try {
-                spring = IgniteComponentType.SPRING.create(false);
+                try {
+                    spring = IgniteComponentType.SPRING.create(false);
 
-                DataSource data = spring.loadBeanFromAppContext(appContext, dataSrcBean);
+                    DataSource data = spring.loadBeanFromAppContext(appCtx, dataSrcBean);
 
-                store.setDataSource(data);
-            }
-            catch (Exception e) {
-                throw new IgniteException("Failed to load bean in application context [beanName=" + dataSrcBean +
-                    ", igniteConfig=" + appContext + ']', e);
+                    store.setDataSource(data);
+                }
+                catch (Exception e) {
+                    throw new IgniteException("Failed to load bean in application context [beanName=" + dataSrcBean +
+                        ", igniteConfig=" + appCtx + ']', e);
+                }
             }
         }
 
@@ -105,43 +175,188 @@ public class CacheJdbcPojoStoreFactory<K, V> implements Factory<CacheJdbcPojoSto
     }
 
     /**
-     * Sets name of the data source bean.
+     * Sets data source. Data source should be fully configured and ready-to-use.
      *
-     * @param dataSrcBean Data source bean name.
+     * @param dataSrc Data source.
      * @return {@code This} for chaining.
      * @see CacheJdbcPojoStore#setDataSource(DataSource)
      */
-    public CacheJdbcPojoStoreFactory<K, V> setDataSourceBean(String dataSrcBean) {
-        this.dataSrcBean = dataSrcBean;
+    public CacheJdbcPojoStoreFactory<K, V> setDataSource(DataSource dataSrc) {
+        this.dataSrc = dataSrc;
 
         return this;
     }
 
     /**
-     * Sets data source. Data source should be fully configured and ready-to-use.
+     * Get maximum batch size for delete and delete operations.
      *
-     * @param dataSrc Data source.
+     * @return Maximum batch size.
+     */
+    public int getBatchSize() {
+        return batchSizw;
+    }
+
+    /**
+     * Set maximum batch size for write and delete operations.
+     *
+     * @param batchSize Maximum batch size.
      * @return {@code This} for chaining.
-     * @see CacheJdbcPojoStore#setDataSource(DataSource)
      */
-    public CacheJdbcPojoStoreFactory<K, V> setDataSource(DataSource dataSrc) {
-        this.dataSrc = dataSrc;
+    public CacheJdbcPojoStoreFactory setBatchSize(int batchSize) {
+        this.batchSizw = batchSize;
+
+        return this;
+    }
+
+    /**
+     * Gets name of the data source bean.
+     *
+     * @return Data source bean name.
+     */
+    public String getDataSourceBean() {
+        return dataSrcBean;
+    }
+
+    /**
+     * Sets name of the data source bean.
+     *
+     * @param dataSrcBean Data source bean name.
+     * @return {@code This} for chaining.
+     */
+    public CacheJdbcPojoStoreFactory setDataSourceBean(String dataSrcBean) {
+        this.dataSrcBean = dataSrcBean;
 
         return this;
     }
 
     /**
+     * Get database dialect.
+     *
+     * @return Database dialect.
+     */
+    public JdbcDialect getDialect() {
+        return dialect;
+    }
+
+    /**
      * Set database dialect.
      *
      * @param dialect Database dialect.
-     * @see CacheJdbcPojoStore#setDialect(JdbcDialect)
+     * @return {@code This} for chaining.
      */
-    public void setDialect(JdbcDialect dialect) {
+    public CacheJdbcPojoStoreFactory setDialect(JdbcDialect dialect) {
         this.dialect = dialect;
+
+        return this;
+    }
+
+    /**
+     * Get maximum workers thread count. These threads are responsible for queries execution.
+     *
+     * @return Maximum workers thread count.
+     */
+    public int getMaximumPoolSize() {
+        return maxPoolSize;
+    }
+
+    /**
+     * Set Maximum workers thread count. These threads are responsible for queries execution.
+     *
+     * @param maxPoolSize Max workers thread count.
+     * @return {@code This} for chaining.
+     */
+    public CacheJdbcPojoStoreFactory setMaximumPoolSize(int maxPoolSize) {
+        this.maxPoolSize = maxPoolSize;
+
+        return this;
+    }
+
+    /**
+     * Gets maximum number of write attempts in case of database error.
+     *
+     * @return Maximum number of write attempts.
+     */
+    public int getMaximumWriteAttempts() {
+        return maxWriteAttempts;
+    }
+
+    /**
+     * Sets maximum number of write attempts in case of database error.
+     *
+     * @param maxWrtAttempts Number of write attempts.
+     * @return {@code This} for chaining.
+     */
+    public CacheJdbcPojoStoreFactory setMaximumWriteAttempts(int maxWrtAttempts) {
+        this.maxWriteAttempts = maxWrtAttempts;
+
+        return this;
+    }
+
+    /**
+     * Parallel load cache minimum row count threshold.
+     *
+     * @return If {@code 0} then load sequentially.
+     */
+    public int getParallelLoadCacheMinimumThreshold() {
+        return parallelLoadCacheMinThreshold;
+    }
+
+    /**
+     * Parallel load cache minimum row count threshold.
+     *
+     * @param parallelLoadCacheMinThreshold Minimum row count threshold. If {@code 0} then load sequentially.
+     * @return {@code This} for chaining.
+     */
+    public CacheJdbcPojoStoreFactory setParallelLoadCacheMinimumThreshold(int parallelLoadCacheMinThreshold) {
+        this.parallelLoadCacheMinThreshold = parallelLoadCacheMinThreshold;
+
+        return this;
+    }
+
+    /**
+     * Gets types known by store.
+     *
+     * @return Types known by store.
+     */
+    public JdbcType[] getTypes() {
+        return types;
+    }
+
+    /**
+     * Sets store configurations.
+     *
+     * @param types Store should process.
+     * @return {@code This} for chaining.
+     */
+    public CacheJdbcPojoStoreFactory setTypes(JdbcType... types) {
+        this.types = types;
+
+        return this;
+    }
+
+    /**
+     * Gets hash code calculator.
+     *
+     * @return Hash code calculator.
+     */
+    public JdbcTypeHasher getHasher() {
+        return hasher;
+    }
+
+    /**
+     * Sets hash code calculator.
+     *
+     * @param hasher Hash code calculator.
+     * @return {@code This} for chaining.
+     */
+    public CacheJdbcPojoStoreFactory setHasher(JdbcTypeHasher hasher) {
+        this.hasher = hasher;
+
+        return this;
     }
 
     /** {@inheritDoc} */
     @Override public String toString() {
         return S.toString(CacheJdbcPojoStoreFactory.class, this);
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcType.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcType.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcType.java
new file mode 100644
index 0000000..2107240
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcType.java
@@ -0,0 +1,255 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store.jdbc;
+
+import java.io.Serializable;
+import org.apache.ignite.internal.util.tostring.GridToStringInclude;
+import org.apache.ignite.internal.util.typedef.internal.S;
+
+/**
+ * Description for type that could be stored into database by store.
+ */
+public class JdbcType implements Serializable {
+    /** */
+    private static final long serialVersionUID = 0L;
+
+    /** Cache name. */
+    private String cacheName;
+
+    /** Schema name in database. */
+    private String dbSchema;
+
+    /** Table name in database. */
+    private String dbTbl;
+
+    /** Key class used to store key in cache. */
+    private String keyType;
+
+    /** List of fields descriptors for key object. */
+    @GridToStringInclude
+    private JdbcTypeField[] keyFields;
+
+    /** Value class used to store value in cache. */
+    private String valType;
+
+    /** List of fields descriptors for value object. */
+    @GridToStringInclude
+    private JdbcTypeField[] valFlds;
+
+    /** Custom type hasher. */
+    private JdbcTypeHasher hasher;
+
+    /**
+     * Empty constructor (all values are initialized to their defaults).
+     */
+    public JdbcType() {
+        /* No-op. */
+    }
+
+    /**
+     * Copy constructor.
+     *
+     * @param type Type to copy.
+     */
+    public JdbcType(JdbcType type) {
+        cacheName = type.getCacheName();
+
+        dbSchema = type.getDatabaseSchema();
+        dbTbl = type.getDatabaseTable();
+
+        keyType = type.getKeyType();
+        keyFields = type.getKeyFields();
+
+        valType = type.getValueType();
+        valFlds = type.getValueFields();
+    }
+
+    /**
+     * Gets associated cache name.
+     *
+     * @return Сache name.
+     */
+    public String getCacheName() {
+        return cacheName;
+    }
+
+    /**
+     * Sets associated cache name.
+     *
+     * @param cacheName Cache name.
+     */
+    public JdbcType setCacheName(String cacheName) {
+        this.cacheName = cacheName;
+
+        return this;
+    }
+
+    /**
+     * Gets database schema name.
+     *
+     * @return Schema name.
+     */
+    public String getDatabaseSchema() {
+        return dbSchema;
+    }
+
+    /**
+     * Sets database schema name.
+     *
+     * @param dbSchema Schema name.
+     */
+    public JdbcType setDatabaseSchema(String dbSchema) {
+        this.dbSchema = dbSchema;
+
+        return this;
+    }
+
+    /**
+     * Gets table name in database.
+     *
+     * @return Table name in database.
+     */
+    public String getDatabaseTable() {
+        return dbTbl;
+    }
+
+    /**
+     * Table name in database.
+     *
+     * @param dbTbl Table name in database.
+     * @return {@code this} for chaining.
+     */
+    public JdbcType setDatabaseTable(String dbTbl) {
+        this.dbTbl = dbTbl;
+
+        return this;
+    }
+
+    /**
+     * Gets key type.
+     *
+     * @return Key type.
+     */
+    public String getKeyType() {
+        return keyType;
+    }
+
+    /**
+     * Sets key type.
+     *
+     * @param keyType Key type.
+     * @return {@code this} for chaining.
+     */
+    public JdbcType setKeyType(String keyType) {
+        this.keyType = keyType;
+
+        return this;
+    }
+
+    /**
+     * Sets key type.
+     *
+     * @param cls Key type class.
+     * @return {@code this} for chaining.
+     */
+    public JdbcType setKeyType(Class<?> cls) {
+        setKeyType(cls.getName());
+
+        return this;
+    }
+
+    /**
+     * Gets value type.
+     *
+     * @return Key type.
+     */
+    public String getValueType() {
+        return valType;
+    }
+
+    /**
+     * Sets value type.
+     *
+     * @param valType Value type.
+     * @return {@code this} for chaining.
+     */
+    public JdbcType setValueType(String valType) {
+        this.valType = valType;
+
+        return this;
+    }
+
+    /**
+     * Sets value type.
+     *
+     * @param cls Value type class.
+     * @return {@code this} for chaining.
+     */
+    public JdbcType setValueType(Class<?> cls) {
+        setValueType(cls.getName());
+
+        return this;
+    }
+
+    /**
+     * Gets optional persistent key fields (needed only if {@link CacheJdbcPojoStore} is used).
+     *
+     * @return Persistent key fields.
+     */
+    public JdbcTypeField[] getKeyFields() {
+        return keyFields;
+    }
+
+    /**
+     * Sets optional persistent key fields (needed only if {@link CacheJdbcPojoStore} is used).
+     *
+     * @param keyFlds Persistent key fields.
+     * @return {@code this} for chaining.
+     */
+    public JdbcType setKeyFields(JdbcTypeField... keyFlds) {
+        this.keyFields = keyFlds;
+
+        return this;
+    }
+
+    /**
+     * Gets optional persistent value fields (needed only if {@link CacheJdbcPojoStore} is used).
+     *
+     * @return Persistent value fields.
+     */
+    public JdbcTypeField[] getValueFields() {
+        return valFlds;
+    }
+
+    /**
+     * Sets optional persistent value fields (needed only if {@link CacheJdbcPojoStore} is used).
+     *
+     * @param valFlds Persistent value fields.
+     * @return {@code this} for chaining.
+     */
+    public JdbcType setValueFields(JdbcTypeField... valFlds) {
+        this.valFlds = valFlds;
+
+        return this;
+    }
+
+    /** {@inheritDoc} */
+    @Override public String toString() {
+        return S.toString(JdbcType.class, this);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcTypeDefaultHasher.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcTypeDefaultHasher.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcTypeDefaultHasher.java
new file mode 100644
index 0000000..3baad3b
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcTypeDefaultHasher.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store.jdbc;
+
+import java.util.Collection;
+
+/**
+ * Default implementation of {@link JdbcTypeHasher}.
+ *
+ * This implementation ignores type and field names.
+ */
+public class JdbcTypeDefaultHasher implements JdbcTypeHasher {
+    /** */
+    private static final long serialVersionUID = 0L;
+
+    /** Singleton instance to use. */
+    public static final JdbcTypeHasher INSTANCE = new JdbcTypeDefaultHasher();
+
+    /** {@inheritDoc} */
+    @Override public int hashCode(Collection<?> values) {
+        int hash = 0;
+
+        for (Object val : values)
+            hash = 31 * hash + (val != null ? val.hashCode() : 0);
+
+        return hash;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcTypeField.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcTypeField.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcTypeField.java
new file mode 100644
index 0000000..3396b81
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcTypeField.java
@@ -0,0 +1,172 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store.jdbc;
+
+import java.io.Serializable;
+import org.apache.ignite.internal.util.typedef.internal.S;
+
+/**
+ * Description of how field declared in database and in cache.
+ */
+public class JdbcTypeField implements Serializable {
+    /** */
+    private static final long serialVersionUID = 0L;
+
+    /** Field JDBC type in database. */
+    private int dbFldType;
+
+    /** Field name in database. */
+    private String dbFldName;
+
+    /** Field java type. */
+    private Class<?> javaFldType;
+
+    /** Field name in java object. */
+    private String javaFldName;
+
+    /**
+     * Default constructor.
+     */
+    public JdbcTypeField() {
+        // No-op.
+    }
+
+    /**
+     * Full constructor.
+     *
+     * @param dbFldType Field JDBC type in database.
+     * @param dbFldName Field name in database.
+     * @param javaFldType Field java type.
+     * @param javaFldName Field name in java object.
+     */
+    public JdbcTypeField(int dbFldType, String dbFldName, Class<?> javaFldType, String javaFldName) {
+        this.dbFldType = dbFldType;
+        this.dbFldName = dbFldName;
+        this.javaFldType = javaFldType;
+        this.javaFldName = javaFldName;
+    }
+
+    /**
+     * Copy constructor.
+     *
+     * @param field Field to copy.
+     */
+    public JdbcTypeField(JdbcTypeField field) {
+        this(field.getDatabaseFieldType(), field.getDatabaseFieldName(),
+            field.getJavaFieldType(), field.getJavaFieldName());
+    }
+
+    /**
+     * @return Column JDBC type in database.
+     */
+    public int getDatabaseFieldType() {
+        return dbFldType;
+    }
+
+    /**
+     * @param dbFldType Column JDBC type in database.
+     * @return {@code this} for chaining.
+     */
+    public JdbcTypeField setDatabaseFieldType(int dbFldType) {
+        this.dbFldType = dbFldType;
+
+        return this;
+    }
+
+
+    /**
+     * @return Column name in database.
+     */
+    public String getDatabaseFieldName() {
+        return dbFldName;
+    }
+
+    /**
+     * @param dbFldName Column name in database.
+     * @return {@code this} for chaining.
+     */
+    public JdbcTypeField setDatabaseFieldName(String dbFldName) {
+        this.dbFldName = dbFldName;
+
+        return this;
+    }
+
+    /**
+     * @return Field java type.
+     */
+    public Class<?> getJavaFieldType() {
+        return javaFldType;
+    }
+
+    /**
+     * @param javaFldType Corresponding java type.
+     * @return {@code this} for chaining.
+     */
+    public JdbcTypeField setJavaFieldType(Class<?> javaFldType) {
+        this.javaFldType = javaFldType;
+
+        return this;
+    }
+
+    /**
+     * @return Field name in java object.
+     */
+    public String getJavaFieldName() {
+        return javaFldName;
+    }
+
+    /**
+     * @param javaFldName Field name in java object.
+     * @return {@code this} for chaining.
+     */
+    public JdbcTypeField setJavaFieldName(String javaFldName) {
+        this.javaFldName = javaFldName;
+
+        return this;
+    }
+
+    /** {@inheritDoc} */
+    @Override public boolean equals(Object o) {
+        if (this == o)
+            return true;
+
+        if (!(o instanceof JdbcTypeField))
+            return false;
+
+        JdbcTypeField that = (JdbcTypeField)o;
+
+        return dbFldType == that.dbFldType && dbFldName.equals(that.dbFldName) &&
+            javaFldType == that.javaFldType && javaFldName.equals(that.javaFldName);
+    }
+
+    /** {@inheritDoc} */
+    @Override public int hashCode() {
+        int res = dbFldType;
+        res = 31 * res + dbFldName.hashCode();
+
+        res = 31 * res + javaFldType.hashCode();
+        res = 31 * res + javaFldName.hashCode();
+
+        return res;
+    }
+
+    /** {@inheritDoc} */
+    @Override public String toString() {
+        return S.toString(JdbcTypeField.class, this);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcTypeHasher.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcTypeHasher.java b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcTypeHasher.java
new file mode 100644
index 0000000..9d1fcea
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/cache/store/jdbc/JdbcTypeHasher.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store.jdbc;
+
+import java.io.Serializable;
+import java.util.Collection;
+
+/**
+ * API for implementing custom hashing logic for binary objects on server side.
+ */
+public interface JdbcTypeHasher extends Serializable {
+    /**
+     * Calculate hash code for specified object and fields.
+     *
+     * @param values Collection of values that should participate in hash code calculation.
+     * @return Hash code.
+     */
+    public int hashCode(Collection<?> values);
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/main/java/org/apache/ignite/internal/processors/query/GridQueryProcessor.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/query/GridQueryProcessor.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/query/GridQueryProcessor.java
index fbe54e0..d0eeeb1 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/query/GridQueryProcessor.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/query/GridQueryProcessor.java
@@ -260,6 +260,10 @@ public class GridQueryProcessor extends GridProcessorAdapter {
                     if (F.isEmpty(meta.getValueType()))
                         throw new IgniteCheckedException("Value type is not set: " + meta);
 
+                    if (meta.getQueryFields().isEmpty() && meta.getAscendingFields().isEmpty() &&
+                        meta.getDescendingFields().isEmpty() && meta.getGroups().isEmpty())
+                        continue;
+
                     TypeDescriptor desc = new TypeDescriptor();
 
                     // Key and value classes still can be available if they are primitive or JDK part.
@@ -2314,4 +2318,4 @@ public class GridQueryProcessor extends GridProcessorAdapter {
     private enum IndexType {
         ASC, DESC, TEXT
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/main/java/org/apache/ignite/internal/visor/cache/VisorCache.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/visor/cache/VisorCache.java b/modules/core/src/main/java/org/apache/ignite/internal/visor/cache/VisorCache.java
index 6171ff8..1bbcf2f 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/visor/cache/VisorCache.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/visor/cache/VisorCache.java
@@ -229,7 +229,9 @@ public class VisorCache implements Serializable {
     protected void estimateMemorySize(IgniteEx ignite, GridCacheAdapter ca, int sample) throws IgniteCheckedException {
         int size = ca.size();
 
-        Set<GridCacheEntryEx> set = ca.map().entries0();
+        Set<GridCacheEntryEx> set = ca.context().isNear()
+            ? ((GridNearCacheAdapter)ca).dht().map().entries0()
+            : ca.map().entries0();
 
         long memSz = 0;
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreAbstractSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreAbstractSelfTest.java
new file mode 100644
index 0000000..0f40bd9
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreAbstractSelfTest.java
@@ -0,0 +1,395 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store.jdbc;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Types;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.cache.store.jdbc.dialect.H2Dialect;
+import org.apache.ignite.cache.store.jdbc.model.Person;
+import org.apache.ignite.cache.store.jdbc.model.PersonKey;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.configuration.ConnectorConfiguration;
+import org.apache.ignite.configuration.IgniteConfiguration;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.apache.ignite.marshaller.Marshaller;
+import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.h2.jdbcx.JdbcConnectionPool;
+
+import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC;
+import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
+import static org.apache.ignite.cache.CacheMode.PARTITIONED;
+
+/**
+ * Class for {@code PojoCacheStore} tests.
+ */
+public abstract class CacheJdbcPojoStoreAbstractSelfTest extends GridCommonAbstractTest {
+    /** IP finder. */
+    protected static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true);
+
+    /** DB connection URL. */
+    protected static final String DFLT_CONN_URL = "jdbc:h2:mem:TestDatabase;DB_CLOSE_DELAY=-1";
+
+    /** Organization count. */
+    protected static final int ORGANIZATION_CNT = 1000;
+
+    /** Person count. */
+    protected static final int PERSON_CNT = 100000;
+
+    /** Flag indicating that tests should use transactional cache. */
+    protected static boolean transactional;
+
+    /** Flag indicating that tests should use primitive classes like java.lang.Integer for keys. */
+    protected static boolean builtinKeys;
+
+    /** Flag indicating that classes for keys available on class path or not. */
+    protected static boolean noKeyClasses;
+
+    /** Flag indicating that classes for values available on class path or not. */
+    protected static boolean noValClasses;
+
+    /**
+     * @return Connection to test in-memory H2 database.
+     * @throws SQLException
+     */
+    protected Connection getConnection() throws SQLException {
+        return DriverManager.getConnection(DFLT_CONN_URL, "sa", "");
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void beforeTest() throws Exception {
+        Connection conn = getConnection();
+
+        Statement stmt = conn.createStatement();
+
+        stmt.executeUpdate("DROP TABLE IF EXISTS Organization");
+        stmt.executeUpdate("DROP TABLE IF EXISTS Person");
+
+        stmt.executeUpdate("CREATE TABLE Organization (" +
+            " id INTEGER PRIMARY KEY," +
+            " name VARCHAR(50)," +
+            " city VARCHAR(50))");
+
+        stmt.executeUpdate("CREATE TABLE Person (" +
+            " id INTEGER PRIMARY KEY," +
+            " org_id INTEGER," +
+            " name VARCHAR(50))");
+
+        conn.commit();
+
+        U.closeQuiet(stmt);
+
+        fillSampleDatabase(conn);
+
+        U.closeQuiet(conn);
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void afterTest() throws Exception {
+        stopAllGrids();
+    }
+
+    /** {@inheritDoc} */
+    @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
+        IgniteConfiguration cfg = super.getConfiguration(gridName);
+
+        TcpDiscoverySpi disco = new TcpDiscoverySpi();
+
+        disco.setIpFinder(IP_FINDER);
+
+        cfg.setDiscoverySpi(disco);
+
+        cfg.setCacheConfiguration(cacheConfiguration());
+
+        cfg.setMarshaller(marshaller());
+
+        ConnectorConfiguration connCfg = new ConnectorConfiguration();
+        cfg.setConnectorConfiguration(connCfg);
+
+        return cfg;
+    }
+
+    /**
+     * @return Marshaller to be used in test.
+     */
+    protected abstract Marshaller marshaller();
+
+    /**
+     * @return Types to be used in test.
+     */
+    protected JdbcType[] storeTypes() {
+        JdbcType[] storeTypes = new JdbcType[2];
+
+        storeTypes[0] = new JdbcType();
+        storeTypes[0].setDatabaseSchema("PUBLIC");
+        storeTypes[0].setDatabaseTable("ORGANIZATION");
+
+        if (builtinKeys) {
+            storeTypes[0].setKeyType("java.lang.Integer");
+            storeTypes[0].setKeyFields(new JdbcTypeField(Types.INTEGER, "ID", Integer.class, "id"));
+        }
+        else {
+            storeTypes[0].setKeyType("org.apache.ignite.cache.store.jdbc.model.OrganizationKey" + (noKeyClasses ? "1" : ""));
+            storeTypes[0].setKeyFields(new JdbcTypeField(Types.INTEGER, "ID", Integer.class, "id"));
+        }
+
+        storeTypes[0].setValueType("org.apache.ignite.cache.store.jdbc.model.Organization" + (noValClasses ? "1" : ""));
+        storeTypes[0].setValueFields(
+            new JdbcTypeField(Types.INTEGER, "ID", Integer.class, "id"),
+            new JdbcTypeField(Types.VARCHAR, "NAME", String.class, "name"),
+            new JdbcTypeField(Types.VARCHAR, "CITY", String.class, "city"));
+
+        storeTypes[1] = new JdbcType();
+        storeTypes[1].setDatabaseSchema("PUBLIC");
+        storeTypes[1].setDatabaseTable("PERSON");
+
+        if (builtinKeys) {
+            storeTypes[1].setKeyType("java.lang.Long");
+            storeTypes[1].setKeyFields(new JdbcTypeField(Types.INTEGER, "ID", Long.class, "id"));
+        }
+        else {
+            storeTypes[1].setKeyType("org.apache.ignite.cache.store.jdbc.model.PersonKey" + (noKeyClasses ? "1" : ""));
+            storeTypes[1].setKeyFields(new JdbcTypeField(Types.INTEGER, "ID", Integer.class, "id"));
+        }
+
+        storeTypes[1].setValueType("org.apache.ignite.cache.store.jdbc.model.Person" + (noValClasses ? "1" : ""));
+        storeTypes[1].setValueFields(
+            new JdbcTypeField(Types.INTEGER, "ID", Integer.class, "id"),
+            new JdbcTypeField(Types.INTEGER, "ORG_ID", Integer.class, "orgId"),
+            new JdbcTypeField(Types.VARCHAR, "NAME", String.class, "name"));
+
+        return storeTypes;
+    }
+
+    /**
+     * @return Cache configuration for test.
+     * @throws Exception In case when failed to create cache configuration.
+     */
+    protected CacheConfiguration cacheConfiguration() throws Exception {
+        CacheConfiguration cc = defaultCacheConfiguration();
+
+        cc.setCacheMode(PARTITIONED);
+        cc.setAtomicityMode(transactional ? TRANSACTIONAL : ATOMIC);
+        cc.setSwapEnabled(false);
+        cc.setWriteBehindEnabled(false);
+
+        CacheJdbcPojoStoreFactory<Object, Object> storeFactory = new CacheJdbcPojoStoreFactory<>();
+        storeFactory.setDialect(new H2Dialect());
+        storeFactory.setTypes(storeTypes());
+        storeFactory.setDataSource(JdbcConnectionPool.create(DFLT_CONN_URL, "sa", "")); // H2 DataSource
+
+        cc.setCacheStoreFactory(storeFactory);
+        cc.setReadThrough(true);
+        cc.setWriteThrough(true);
+        cc.setLoadPreviousValue(true);
+
+        return cc;
+    }
+
+    /**
+     * Fill in-memory database with sample data.
+     *
+     * @param conn Connection to database.
+     * @throws SQLException In case of filling database with sample data failed.
+     */
+    protected void fillSampleDatabase(Connection conn) throws SQLException {
+        info("Start to fill sample database...");
+
+        PreparedStatement orgStmt = conn.prepareStatement("INSERT INTO Organization(id, name, city) VALUES (?, ?, ?)");
+
+        for (int i = 0; i < ORGANIZATION_CNT; i++) {
+            orgStmt.setInt(1, i);
+            orgStmt.setString(2, "name" + i);
+            orgStmt.setString(3, "city" + i % 10);
+
+            orgStmt.addBatch();
+        }
+
+        orgStmt.executeBatch();
+
+        U.closeQuiet(orgStmt);
+
+        conn.commit();
+
+        PreparedStatement prnStmt = conn.prepareStatement("INSERT INTO Person(id, org_id, name) VALUES (?, ?, ?)");
+
+        for (int i = 0; i < PERSON_CNT; i++) {
+            prnStmt.setInt(1, i);
+            prnStmt.setInt(2, i % 100);
+            prnStmt.setString(3, "name" + i);
+
+            prnStmt.addBatch();
+        }
+
+        prnStmt.executeBatch();
+
+        conn.commit();
+
+        U.closeQuiet(prnStmt);
+
+        info("Sample database prepared.");
+    }
+
+    /**
+     * Start test grid with specified options.
+     *
+     * @param builtin {@code True} if keys are built in java types.
+     * @param noKeyCls {@code True} if keys classes are not on class path.
+     * @param noValCls {@code True} if values classes are not on class path.
+     * @param trn {@code True} if cache should be started in transactional mode.
+     * @throws Exception
+     */
+    protected void startTestGrid(boolean builtin, boolean noKeyCls, boolean noValCls, boolean trn) throws Exception {
+        builtinKeys = builtin;
+        noKeyClasses = noKeyCls;
+        noValClasses = noValCls;
+        transactional = trn;
+
+        startGrid();
+    }
+
+    /**
+     * Check that data was loaded correctly.
+     */
+    protected void checkCacheContent() {
+        IgniteCache<Object, Object> c1 = grid().cache(null);
+
+        c1.loadCache(null);
+
+        assertEquals(ORGANIZATION_CNT + PERSON_CNT, c1.size());
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCache() throws Exception {
+        startTestGrid(false, false, false, false);
+
+        checkCacheContent();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCacheTx() throws Exception {
+        startTestGrid(false, false, false, true);
+
+        checkCacheContent();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCachePrimitiveKeys() throws Exception {
+        startTestGrid(true, false, false, false);
+
+        checkCacheContent();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCachePrimitiveKeysTx() throws Exception {
+        startTestGrid(true, false, false, true);
+
+        checkCacheContent();
+    }
+
+    /**
+     * Check put in cache and store it in db.
+     *
+     * @throws Exception If failed.
+     */
+    private void checkPut() throws Exception {
+        IgniteCache<PersonKey, Person> c1 = grid().cache(null);
+
+        Connection conn = getConnection();
+        try {
+            PreparedStatement stmt = conn.prepareStatement("SELECT ID, ORG_ID, NAME FROM PERSON WHERE ID = ?");
+
+            stmt.setInt(1, -1);
+
+            ResultSet rs = stmt.executeQuery();
+
+            assertFalse("Unexpected non empty result set", rs.next());
+
+            U.closeQuiet(rs);
+
+            // Test put-insert.
+            PersonKey key = new PersonKey(-1);
+
+            c1.put(key, new Person(-1, -2, "Person-to-test-put-insert", 999));
+
+            rs = stmt.executeQuery();
+
+            assertTrue("Unexpected empty result set", rs.next());
+
+            assertEquals(-1, rs.getInt(1));
+            assertEquals(-2, rs.getInt(2));
+            assertEquals("Person-to-test-put-insert", rs.getString(3));
+
+            assertFalse("Unexpected more data in result set", rs.next());
+
+            U.closeQuiet(rs);
+
+            // Test put-update.
+            c1.put(key, new Person(-1, -3, "Person-to-test-put-update", 999));
+
+            rs = stmt.executeQuery();
+
+            assertTrue("Unexpected empty result set", rs.next());
+
+            assertEquals(-1, rs.getInt(1));
+            assertEquals(-3, rs.getInt(2));
+            assertEquals("Person-to-test-put-update", rs.getString(3));
+
+            assertFalse("Unexpected more data in result set", rs.next());
+
+            U.closeQuiet(rs);
+        }
+        finally {
+            U.closeQuiet(conn);
+        }
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testPut() throws Exception {
+        startTestGrid(false, false, false, false);
+
+        checkPut();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testPutTx() throws Exception {
+        startTestGrid(false, false, false, true);
+
+        checkPut();
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreOptimizedMarshallerSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreOptimizedMarshallerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreOptimizedMarshallerSelfTest.java
new file mode 100644
index 0000000..f40f7d7
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreOptimizedMarshallerSelfTest.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store.jdbc;
+
+import org.apache.ignite.marshaller.Marshaller;
+import org.apache.ignite.marshaller.optimized.OptimizedMarshaller;
+
+/**
+ * Class for {@code PojoCacheStore} tests.
+ */
+public class CacheJdbcPojoStoreOptimizedMarshallerSelfTest extends CacheJdbcPojoStoreAbstractSelfTest {
+    /** {@inheritDoc} */
+    @Override protected Marshaller marshaller(){
+        return new OptimizedMarshaller();
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStorePortableMarshallerSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStorePortableMarshallerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStorePortableMarshallerSelfTest.java
new file mode 100644
index 0000000..39504b1
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStorePortableMarshallerSelfTest.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store.jdbc;
+
+import org.apache.ignite.marshaller.Marshaller;
+import org.apache.ignite.marshaller.portable.BinaryMarshaller;
+
+/**
+ * Class for {@code PojoCacheStore} tests.
+ */
+public class CacheJdbcPojoStorePortableMarshallerSelfTest extends CacheJdbcPojoStoreAbstractSelfTest {
+    /** {@inheritDoc} */
+    @Override protected Marshaller marshaller(){
+        return new BinaryMarshaller();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCacheNoKeyClasses() throws Exception {
+        startTestGrid(false, true, false, false);
+
+        checkCacheContent();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCacheNoKeyClassesTx() throws Exception {
+        startTestGrid(false, true, false, true);
+
+        checkCacheContent();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCacheNoValueClasses() throws Exception {
+        startTestGrid(false, false, true, false);
+
+        checkCacheContent();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCacheNoValueClassesTx() throws Exception {
+        startTestGrid(false, false, true, true);
+
+        checkCacheContent();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCacheNoKeyAndValueClasses() throws Exception {
+        startTestGrid(false, true, true, false);
+
+        checkCacheContent();
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testLoadCacheNoKeyAndValueClassesTx() throws Exception {
+        startTestGrid(false, true, true, true);
+
+        checkCacheContent();
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreTest.java
index 0ad2cad..d8f75d3 100644
--- a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcPojoStoreTest.java
@@ -17,23 +17,19 @@
 
 package org.apache.ignite.cache.store.jdbc;
 
-import java.net.MalformedURLException;
-import java.net.URL;
 import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.sql.Timestamp;
+import java.sql.Types;
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
+import java.util.UUID;
 import java.util.concurrent.ConcurrentLinkedQueue;
 import javax.cache.integration.CacheWriterException;
-import org.apache.ignite.IgniteException;
-import org.apache.ignite.cache.CacheTypeMetadata;
-import org.apache.ignite.cache.store.jdbc.dialect.BasicJdbcDialect;
-import org.apache.ignite.cache.store.jdbc.dialect.JdbcDialect;
+
+import org.apache.ignite.cache.store.jdbc.dialect.H2Dialect;
 import org.apache.ignite.cache.store.jdbc.model.Organization;
 import org.apache.ignite.cache.store.jdbc.model.OrganizationKey;
 import org.apache.ignite.cache.store.jdbc.model.Person;
@@ -41,16 +37,11 @@ import org.apache.ignite.cache.store.jdbc.model.PersonComplexKey;
 import org.apache.ignite.cache.store.jdbc.model.PersonKey;
 import org.apache.ignite.internal.processors.cache.CacheEntryImpl;
 import org.apache.ignite.internal.util.typedef.CI2;
-import org.apache.ignite.internal.util.typedef.X;
 import org.apache.ignite.internal.util.typedef.internal.U;
 import org.apache.ignite.lang.IgniteBiInClosure;
 import org.apache.ignite.testframework.GridTestUtils;
 import org.apache.ignite.testframework.junits.cache.GridAbstractCacheStoreSelfTest;
 import org.h2.jdbcx.JdbcConnectionPool;
-import org.springframework.beans.BeansException;
-import org.springframework.beans.factory.xml.XmlBeanDefinitionReader;
-import org.springframework.context.support.GenericApplicationContext;
-import org.springframework.core.io.UrlResource;
 
 /**
  * Class for {@code PojoCacheStore} tests.
@@ -59,9 +50,6 @@ public class CacheJdbcPojoStoreTest extends GridAbstractCacheStoreSelfTest<Cache
     /** DB connection URL. */
     private static final String DFLT_CONN_URL = "jdbc:h2:mem:autoCacheStore;DB_CLOSE_DELAY=-1";
 
-    /** Default config with mapping. */
-    private static final String DFLT_MAPPING_CONFIG = "modules/core/src/test/config/store/jdbc/ignite-type-metadata.xml";
-
     /** Organization count. */
     protected static final int ORGANIZATION_CNT = 1000;
 
@@ -77,71 +65,86 @@ public class CacheJdbcPojoStoreTest extends GridAbstractCacheStoreSelfTest<Cache
 
     /** {@inheritDoc} */
     @Override protected CacheJdbcPojoStore<Object, Object> store() {
-        CacheJdbcPojoStore<Object, Object> store = new CacheJdbcPojoStore<>();
-
-//        PGPoolingDataSource ds = new PGPoolingDataSource();
-//        ds.setUser("postgres");
-//        ds.setPassword("postgres");
-//        ds.setServerName("ip");
-//        ds.setDatabaseName("postgres");
-//        store.setDataSource(ds);
-
-//        MysqlDataSource ds = new MysqlDataSource();
-//        ds.setURL("jdbc:mysql://ip:port/dbname");
-//        ds.setUser("mysql");
-//        ds.setPassword("mysql");
-
+        CacheJdbcPojoStoreFactory<Object, Object> storeFactory = new CacheJdbcPojoStoreFactory<>();
+
+        JdbcType[] storeTypes = new JdbcType[6];
+
+        storeTypes[0] = new JdbcType();
+        storeTypes[0].setDatabaseSchema("PUBLIC");
+        storeTypes[0].setDatabaseTable("ORGANIZATION");
+        storeTypes[0].setKeyType("org.apache.ignite.cache.store.jdbc.model.OrganizationKey");
+        storeTypes[0].setKeyFields(new JdbcTypeField(Types.INTEGER, "ID", Integer.class, "id"));
+
+        storeTypes[0].setValueType("org.apache.ignite.cache.store.jdbc.model.Organization");
+        storeTypes[0].setValueFields(
+            new JdbcTypeField(Types.INTEGER, "ID", Integer.class, "id"),
+            new JdbcTypeField(Types.VARCHAR, "NAME", String.class, "name"),
+            new JdbcTypeField(Types.VARCHAR, "CITY", String.class, "city"));
+
+        storeTypes[1] = new JdbcType();
+        storeTypes[1].setDatabaseSchema("PUBLIC");
+        storeTypes[1].setDatabaseTable("PERSON");
+        storeTypes[1].setKeyType("org.apache.ignite.cache.store.jdbc.model.PersonKey");
+        storeTypes[1].setKeyFields(new JdbcTypeField(Types.INTEGER, "ID", Integer.class, "id"));
+
+        storeTypes[1].setValueType("org.apache.ignite.cache.store.jdbc.model.Person");
+        storeTypes[1].setValueFields(
+            new JdbcTypeField(Types.INTEGER, "ID", Integer.class, "id"),
+            new JdbcTypeField(Types.INTEGER, "ORG_ID", Integer.class, "orgId"),
+            new JdbcTypeField(Types.VARCHAR, "NAME", String.class, "name"));
+
+        storeTypes[2] = new JdbcType();
+        storeTypes[2].setDatabaseSchema("PUBLIC");
+        storeTypes[2].setDatabaseTable("PERSON_COMPLEX");
+        storeTypes[2].setKeyType("org.apache.ignite.cache.store.jdbc.model.PersonComplexKey");
+        storeTypes[2].setKeyFields(
+            new JdbcTypeField(Types.INTEGER, "ID", int.class, "id"),
+            new JdbcTypeField(Types.INTEGER, "ORG_ID", int.class, "orgId"),
+            new JdbcTypeField(Types.INTEGER, "CITY_ID", int.class, "cityId"));
+
+        storeTypes[2].setValueType("org.apache.ignite.cache.store.jdbc.model.Person");
+        storeTypes[2].setValueFields(
+            new JdbcTypeField(Types.INTEGER, "ID", Integer.class, "id"),
+            new JdbcTypeField(Types.INTEGER, "ORG_ID", Integer.class, "orgId"),
+            new JdbcTypeField(Types.VARCHAR, "NAME", String.class, "name"),
+            new JdbcTypeField(Types.INTEGER, "SALARY", Integer.class, "salary"));
+
+        storeTypes[3] = new JdbcType();
+        storeTypes[3].setDatabaseSchema("PUBLIC");
+        storeTypes[3].setDatabaseTable("TIMESTAMP_ENTRIES");
+        storeTypes[3].setKeyType("java.sql.Timestamp");
+        storeTypes[3].setKeyFields(new JdbcTypeField(Types.TIMESTAMP, "KEY", Timestamp.class, null));
+
+        storeTypes[3].setValueType("java.lang.Integer");
+        storeTypes[3].setValueFields(new JdbcTypeField(Types.INTEGER, "VAL", Integer.class, null));
+
+        storeTypes[4] = new JdbcType();
+        storeTypes[4].setDatabaseSchema("PUBLIC");
+        storeTypes[4].setDatabaseTable("STRING_ENTRIES");
+        storeTypes[4].setKeyType("java.lang.String");
+        storeTypes[4].setKeyFields(new JdbcTypeField(Types.VARCHAR, "KEY", String.class, null));
+
+        storeTypes[4].setValueType("java.lang.String");
+        storeTypes[4].setValueFields(new JdbcTypeField(Types.VARCHAR, "VAL", Integer.class, null));
+
+        storeTypes[5] = new JdbcType();
+        storeTypes[5].setDatabaseSchema("PUBLIC");
+        storeTypes[5].setDatabaseTable("UUID_ENTRIES");
+        storeTypes[5].setKeyType("java.util.UUID");
+        storeTypes[5].setKeyFields(new JdbcTypeField(Types.BINARY, "KEY", UUID.class, null));
+
+        storeTypes[5].setValueType("java.util.UUID");
+        storeTypes[5].setValueFields(new JdbcTypeField(Types.BINARY, "VAL", UUID.class, null));
+
+        storeFactory.setTypes(storeTypes);
+
+        storeFactory.setDialect(new H2Dialect());
+
+        CacheJdbcPojoStore<Object, Object> store = storeFactory.create();
+
+        // H2 DataSource
         store.setDataSource(JdbcConnectionPool.create(DFLT_CONN_URL, "sa", ""));
 
-        URL cfgUrl;
-
-        try {
-            cfgUrl = new URL(DFLT_MAPPING_CONFIG);
-        }
-        catch (MalformedURLException ignore) {
-            cfgUrl = U.resolveIgniteUrl(DFLT_MAPPING_CONFIG);
-        }
-
-        if (cfgUrl == null)
-            throw new IgniteException("Failed to resolve metadata path: " + DFLT_MAPPING_CONFIG);
-
-        try {
-            GenericApplicationContext springCtx = new GenericApplicationContext();
-
-            new XmlBeanDefinitionReader(springCtx).loadBeanDefinitions(new UrlResource(cfgUrl));
-
-            springCtx.refresh();
-
-            Collection<CacheTypeMetadata> typeMeta = springCtx.getBeansOfType(CacheTypeMetadata.class).values();
-
-            Map<Integer, Map<Object, CacheAbstractJdbcStore.EntryMapping>> cacheMappings = new HashMap<>();
-
-            JdbcDialect dialect = store.resolveDialect();
-
-            GridTestUtils.setFieldValue(store, CacheAbstractJdbcStore.class, "dialect", dialect);
-
-            Map<Object, CacheAbstractJdbcStore.EntryMapping> entryMappings = U.newHashMap(typeMeta.size());
-
-            for (CacheTypeMetadata type : typeMeta)
-                entryMappings.put(store.keyTypeId(type.getKeyType()),
-                    new CacheAbstractJdbcStore.EntryMapping(null, dialect, type));
-
-            store.prepareBuilders(null, typeMeta);
-
-            cacheMappings.put(null, entryMappings);
-
-            GridTestUtils.setFieldValue(store, CacheAbstractJdbcStore.class, "cacheMappings", cacheMappings);
-        }
-        catch (BeansException e) {
-            if (X.hasCause(e, ClassNotFoundException.class))
-                throw new IgniteException("Failed to instantiate Spring XML application context " +
-                    "(make sure all classes used in Spring configuration are present at CLASSPATH) " +
-                    "[springUrl=" + cfgUrl + ']', e);
-            else
-                throw new IgniteException("Failed to instantiate Spring XML application context [springUrl=" +
-                    cfgUrl + ", err=" + e.getMessage() + ']', e);
-        }
-
         return store;
     }
 
@@ -224,7 +227,6 @@ public class CacheJdbcPojoStoreTest extends GridAbstractCacheStoreSelfTest<Cache
         super.beforeTest();
     }
 
-
     /**
      * @throws Exception If failed.
      */
@@ -274,7 +276,7 @@ public class CacheJdbcPojoStoreTest extends GridAbstractCacheStoreSelfTest<Cache
             if (i > 0)
                 prnComplexStmt.setInt(5, 1000 + i * 500);
             else // Add person with null salary
-                prnComplexStmt.setNull(5, java.sql.Types.INTEGER);
+                prnComplexStmt.setNull(5, Types.INTEGER);
 
             prnComplexStmt.addBatch();
         }
@@ -302,9 +304,9 @@ public class CacheJdbcPojoStoreTest extends GridAbstractCacheStoreSelfTest<Cache
 
                     Person val = (Person)v;
 
-                    assert key.getId() == val.getId();
-                    assert key.getOrgId() == val.getOrgId();
-                    assertEquals("name"  + key.getId(), val.getName());
+                    assertTrue("Key ID should be the same as value ID", key.getId() == val.getId());
+                    assertTrue("Key orgID should be the same as value orgID", key.getOrgId() == val.getOrgId());
+                    assertEquals("name" + key.getId(), val.getName());
 
                     prnComplexKeys.add((PersonComplexKey)k);
                 }
@@ -351,25 +353,23 @@ public class CacheJdbcPojoStoreTest extends GridAbstractCacheStoreSelfTest<Cache
      * @throws Exception If failed.
      */
     public void testWriteRetry() throws Exception {
+        CacheJdbcPojoStore<Object, Object> store = store();
+
         // Special dialect that will skip updates, to test write retry.
-        BasicJdbcDialect dialect = new BasicJdbcDialect() {
+        store.setDialect(new H2Dialect() {
             /** {@inheritDoc} */
-            @Override public String updateQuery(String tblName, Collection<String> keyCols, Iterable<String> valCols) {
-                return super.updateQuery(tblName, keyCols, valCols) + " AND 1 = 0";
+            @Override public boolean hasMerge() {
+                return false;
             }
-        };
-
-        store.setDialect(dialect);
-
-        Map<String, Map<Object, CacheAbstractJdbcStore.EntryMapping>> cacheMappings =
-            GridTestUtils.getFieldValue(store, CacheAbstractJdbcStore.class, "cacheMappings");
-
-        CacheAbstractJdbcStore.EntryMapping em = cacheMappings.get(null).get(OrganizationKey.class);
 
-        CacheTypeMetadata typeMeta = GridTestUtils.getFieldValue(em, CacheAbstractJdbcStore.EntryMapping.class, "typeMeta");
+            /** {@inheritDoc} */
+            @Override public String updateQuery(String tblName, Collection<String> keyCols,
+                Iterable<String> valCols) {
+                return super.updateQuery(tblName, keyCols, valCols) + " AND 1 = 0";
+            }
+        });
 
-        cacheMappings.get(null).put(OrganizationKey.class,
-            new CacheAbstractJdbcStore.EntryMapping(null, dialect, typeMeta));
+        inject(store);
 
         Connection conn = store.openConnection(false);
 
@@ -392,6 +392,8 @@ public class CacheJdbcPojoStoreTest extends GridAbstractCacheStoreSelfTest<Cache
 
         try {
             store.write(new CacheEntryImpl<>(k1, v1));
+
+            fail("CacheWriterException wasn't thrown.");
         }
         catch (CacheWriterException e) {
             if (!e.getMessage().startsWith("Failed insert entry in database, violate a unique index or primary key") ||
@@ -418,4 +420,4 @@ public class CacheJdbcPojoStoreTest extends GridAbstractCacheStoreSelfTest<Cache
 
         assertNull(store.load(k));
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcStoreAbstractMultithreadedSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcStoreAbstractMultithreadedSelfTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcStoreAbstractMultithreadedSelfTest.java
index 757cedd..4c4bd58 100644
--- a/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcStoreAbstractMultithreadedSelfTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/cache/store/jdbc/CacheJdbcStoreAbstractMultithreadedSelfTest.java
@@ -308,4 +308,4 @@ public abstract class CacheJdbcStoreAbstractMultithreadedSelfTest<T extends Cach
             }
         }, 8, "tx");
     }
-}
\ No newline at end of file
+}


[03/25] ignite git commit: IGNITE-1753 Refactored usages of deprectaed CacheTypeMetadata to JdbcType.

Posted by ag...@apache.org.
http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java
index f6432a7..8af9443 100644
--- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java
+++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java
@@ -30,7 +30,9 @@ import org.apache.ignite.cache.store.GridCacheBalancingStoreSelfTest;
 import org.apache.ignite.cache.store.GridCacheLoadOnlyStoreAdapterSelfTest;
 import org.apache.ignite.cache.store.StoreResourceInjectionSelfTest;
 import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreMultitreadedSelfTest;
+import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreOptimizedMarshallerSelfTest;
 import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreTest;
+import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStorePortableMarshallerSelfTest;
 import org.apache.ignite.cache.store.jdbc.GridCacheJdbcBlobStoreMultithreadedSelfTest;
 import org.apache.ignite.cache.store.jdbc.GridCacheJdbcBlobStoreSelfTest;
 import org.apache.ignite.internal.processors.cache.CacheAffinityCallSelfTest;
@@ -139,7 +141,7 @@ public class IgniteCacheTestSuite extends TestSuite {
     }
 
     /**
-     * @param ignoredTests
+     * @param ignoredTests Tests to ignore.
      * @return Test suite.
      * @throws Exception Thrown in case of the failure.
      */
@@ -211,6 +213,8 @@ public class IgniteCacheTestSuite extends TestSuite {
         suite.addTestSuite(GridCacheJdbcBlobStoreSelfTest.class);
         suite.addTestSuite(GridCacheJdbcBlobStoreMultithreadedSelfTest.class);
         suite.addTestSuite(CacheJdbcPojoStoreTest.class);
+        suite.addTestSuite(CacheJdbcPojoStoreOptimizedMarshallerSelfTest.class);
+        suite.addTestSuite(CacheJdbcPojoStorePortableMarshallerSelfTest.class);
         suite.addTestSuite(CacheJdbcPojoStoreMultitreadedSelfTest.class);
         suite.addTestSuite(GridCacheBalancingStoreSelfTest.class);
         suite.addTestSuite(GridCacheAffinityApiSelfTest.class);

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/README.txt
----------------------------------------------------------------------
diff --git a/modules/schema-import/README.txt b/modules/schema-import/README.txt
index 9b0a44d..d4f2dbf 100644
--- a/modules/schema-import/README.txt
+++ b/modules/schema-import/README.txt
@@ -17,19 +17,19 @@ Use Schema Import Utility for generation of type mapping and domain model in Jav
 
 For example you may use the following script for create sample type 'Person' in your RDBMS system:
 
-create table PERSON(id integer not null, firstName varchar(50), lastName varchar(50), PRIMARY KEY(id));
+create table PERSON(id integer not null PRIMARY KEY, first_name varchar(50), last_name varchar(50), salary double);
 
-insert into PERSON(id, first_name, last_name) values(1, 'Johannes', 'Kepler');
-insert into PERSON(id, first_name, last_name) values(2, 'Galileo', 'Galilei');
-insert into PERSON(id, first_name, last_name) values(3, 'Henry', 'More');
-insert into PERSON(id, first_name, last_name) values(4, 'Polish', 'Brethren');
-insert into PERSON(id, first_name, last_name) values(5, 'Robert', 'Boyle');
-insert into PERSON(id, first_name, last_name) values(6, 'Isaac', 'Newton');
+insert into PERSON(id, first_name, first_name, salary) values(1, 'Johannes', 'Kepler', 1000);
+insert into PERSON(id, first_name, first_name, salary) values(2, 'Galileo', 'Galilei', 1200);
+insert into PERSON(id, first_name, first_name, salary) values(3, 'Henry', 'More', 1150);
+insert into PERSON(id, first_name, first_name, salary) values(4, 'Polish', 'Brethren', 2000);
+insert into PERSON(id, first_name, first_name, salary) values(5, 'Robert', 'Boyle', 2500);
+insert into PERSON(id, first_name, first_name, salary) values(6, 'Isaac', 'Newton', 1300);
 
 The Ignite Schema Import utility generates the following artifacts:
- # Java POJO key and value classes
- # XML CacheTypeMetadata configuration
- # Java configuration snippet (alternative to XML)
+ # Java POJO key and value classes (enter "org.apache.ignite.schema" package name before generation).
+ # XML CacheTypeMetadata configuration.
+ # Java configuration snippet (alternative to XML).
 
 After you exit from the wizard, you should:
  # Copy generated POJO java classes to you project source folder.
@@ -42,6 +42,9 @@ After you exit from the wizard, you should:
 
 Example of spring configuration:
 
+<!-- Sample data source. -->
+<bean id="myDataSource" class="org.h2.jdbcx.JdbcDataSource"/>
+
 <bean class="org.apache.ignite.configuration.IgniteConfiguration">
     ...
     <!-- Cache configuration. -->
@@ -52,67 +55,63 @@ Example of spring configuration:
 
                 <!-- Cache store. -->
                 <property name="cacheStoreFactory">
-                    <bean class="javax.cache.configuration.FactoryBuilder$SingletonFactory">
-                        <constructor-arg>
-                            <bean class="org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStore">
-                                <property name="dataSource">
-                                    <!-- TODO: Need specify connection pooling DataSource to your RDBMS system. -->
-                                    ...
-                                </property>
-                            </bean>
-                        </constructor-arg>
-                    </bean>
-                </property>
-
-                <!-- Type mapping description. -->
-                <property name="typeMetadata">
-                    <list>
-                        <bean class="org.apache.ignite.cache.CacheTypeMetadata">
-                            <property name="databaseTable" value="PERSON"/>
-                            <property name="keyType" value="org.apache.ignite.examples.datagrid.store.model.PersonKey"/>
-                            <property name="valueType" value="org.apache.ignite.examples.datagrid.store.Person"/>
-                            <property name="keyFields">
-                                <list>
-                                    <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                                        <property name="databaseName" value="ID"/>
-                                        <property name="databaseType">
-                                            <util:constant static-field="java.sql.Types.BIGINT"/>
-                                        </property>
-                                        <property name="javaName" value="id"/>
-                                        <property name="javaType" value="long"/>
-                                    </bean>
-                                </list>
-                            </property>
-                            <property name="valueFields">
+                    <bean class="org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactory">
+                        <property name="dataSourceBean" value="myDataSource"/>
+                        <bean class="org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStore">
+                            <property name="dataSourceBean" value="myDataSource" />
+                            <property name="types">
                                 <list>
-                                    <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                                        <property name="databaseName" value="ID"/>
-                                        <property name="databaseType">
-                                            <util:constant static-field="java.sql.Types.BIGINT"/>
+                                    <bean class="org.apache.ignite.cache.store.jdbc.JdbcType">
+                                        <property name="cacheName" value="myCache" />
+                                        <property name="databaseSchema" value="MY_DB_SCHEMA" />
+                                        <property name="databaseTable" value="PERSON" />
+                                        <property name="keyType" value="java.lang.Integer" />
+                                        <property name="keyFields">
+                                            <list>
+                                                <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                                    <property name="databaseFieldType" >
+                                                        <util:constant static-field="java.sql.Types.INTEGER"/>
+                                                    </property>
+                                                    <property name="databaseFieldName" value="ID" />
+                                                    <property name="javaFieldType" value="java.lang.Integer" />
+                                                    <property name="javaFieldName" value="id" />
+                                                </bean>
+                                            </list>
                                         </property>
-                                        <property name="javaName" value="id"/>
-                                        <property name="javaType" value="long"/>
-                                    </bean>
-                                    <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                                        <property name="databaseName" value="FIRST_NAME"/>
-                                        <property name="databaseType">
-                                            <util:constant static-field="java.sql.Types.VARCHAR"/>
+                                        <property name="valueType" value="my.company.Person" />
+                                        <property name="valueFields">
+                                            <list>
+                                                <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                                    <property name="databaseFieldType" >
+                                                        <util:constant static-field="java.sql.Types.VARCHAR"/>
+                                                    </property>
+                                                    <property name="databaseFieldName" value="first_name" />
+                                                    <property name="javaFieldType" value="java.lang.String" />
+                                                    <property name="javaFieldName" value="firstName" />
+                                                </bean>
+                                                <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                                    <property name="databaseFieldType" >
+                                                        <util:constant static-field="java.sql.Types.VARCHAR"/>
+                                                    </property>
+                                                    <property name="databaseFieldName" value="last_name" />
+                                                    <property name="javaFieldType" value="java.lang.String" />
+                                                    <property name="javaFieldName" value="lastName" />
+                                                </bean>
+                                                <bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
+                                                    <property name="databaseFieldType" >
+                                                        <util:constant static-field="java.sql.Types.DOUBLE"/>
+                                                    </property>
+                                                    <property name="databaseFieldName" value="salary" />
+                                                    <property name="javaFieldType" value="java.lang.Double" />
+                                                    <property name="javaFieldName" value="salary" />
+                                                </bean>
+                                            </list>
                                         </property>
-                                        <property name="javaName" value="firstName"/>
-                                        <property name="javaType" value="java.lang.String"/>
-                                    </bean>
-                                    <bean class="org.apache.ignite.cache.CacheTypeFieldMetadata">
-                                        <property name="databaseName" value="LAST_NAME"/>
-                                        <property name="databaseType">
-                                            <util:constant static-field="java.sql.Types.VARCHAR"/>
-                                        </property>
-                                        <property name="javaName" value="lastName"/>
-                                        <property name="javaType" value="java.lang.String"/>
                                     </bean>
                                 </list>
                             </property>
                         </bean>
-                    </list>
+                    </bean>
                 </property>
                 ...
             </bean>
@@ -127,44 +126,46 @@ IgniteConfiguration cfg = new IgniteConfiguration();
 ...
 CacheConfiguration ccfg = new CacheConfiguration<>();
 
-DataSource dataSource = null; // TODO: Need specify connection pooling DataSource to your RDBMS system.
-
-// Create store.
-CacheJdbcPojoStore store = new CacheJdbcPojoStore();
-store.setDataSource(dataSource);
-
 // Create store factory.
-ccfg.setCacheStoreFactory(new FactoryBuilder.SingletonFactory<>(store));
-
-// Configure cache to use store.
-ccfg.setReadThrough(true);
-ccfg.setWriteThrough(true);
+CacheJdbcPojoStoreFactory storeFactory = new CacheJdbcPojoStoreFactory();
+storeFactory.setDataSourceBean("myDataSource");
 
 // Configure cache types.
-Collection<CacheTypeMetadata> meta = new ArrayList<>();
+Collection<JdbcType> jdbcTypes = new ArrayList<>();
 
 // PERSON type mapping.
-CacheTypeMetadata tm = new CacheTypeMetadata();
+JdbcType jdbcType = new JdbcType();
+
+jdbcType.setCacheName(CACHE_NAME);
 
-tm.setDatabaseTable("PERSON");
+jdbcType.setDatabaseSchema("MY_DB_SCHEMA");
+jdbcType.setDatabaseTable("PERSON");
 
-tm.setKeyType("java.lang.Long");
-tm.setValueType("org.apache.ignite.examples.datagrid.store.Person");
+jdbcType.setKeyType("java.lang.Integer");
+jdbcType.setValueType("my.company.Person");
 
 // Key fields for PERSONS.
-tm.setKeyFields(F.asList(new CacheTypeFieldMetadata("ID", Types.BIGINT, "id", Long.class)));
+jdbcType.setKeyFields(F.asArray(new JdbcType(Types.INTEGER, "ID", Integer.class, "id")));
 
 // Value fields for PERSONS.
-tm.setValueFields(F.asList(
-    new CacheTypeFieldMetadata("ID", Types.BIGINT, "id", long.class),
-    new CacheTypeFieldMetadata("FIRST_NAME", Types.VARCHAR, "firstName", String.class),
-    new CacheTypeFieldMetadata("LAST_NAME", Types.VARCHAR, "lastName", String.class)
+jdbcType.setValueFields(F.asArray(
+    new JdbcType(Types.INTEGER, "ID", int.class, "id"),
+    new JdbcType(Types.VARCHAR, "first_name", String.class, "firstName"),
+    new JdbcType(Types.VARCHAR, "last_name", String.class, "lastName"),
+    new JdbcType(Types.DOUBLE, "salary", Double.class, "salary")
 ));
-...
-ccfg.setTypeMetadata(tm);
+
+storeFactory.setTypes(jdbcTypes.toArray(new JdbcType[]));
+
+// Configure cache to use store.
+ccfg.setReadThrough(true);
+ccfg.setWriteThrough(true);
+ccfg.setCacheStoreFactory(storeFactory);
 
 cfg.setCacheConfiguration(ccfg);
+
 ...
+
 // Start Ignite node.
 Ignition.start(cfg);
 
@@ -183,6 +184,7 @@ Performance optimization.
 
 1. Use DataSource with connection pool.
 2. Enable write-behind feature by default write-behind is disabled.
+   Note, write-behind should not be used with TRANSACTIONAL caches.
 
 Example of spring configuration:
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/CodeGenerator.java
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/CodeGenerator.java b/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/CodeGenerator.java
index f1119cf..283ccc1 100644
--- a/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/CodeGenerator.java
+++ b/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/CodeGenerator.java
@@ -33,7 +33,9 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.regex.Pattern;
-import org.apache.ignite.schema.model.IndexItem;
+
+import org.apache.ignite.cache.QueryIndex;
+import org.apache.ignite.internal.util.typedef.F;
 import org.apache.ignite.schema.model.PojoDescriptor;
 import org.apache.ignite.schema.model.PojoField;
 import org.apache.ignite.schema.ui.ConfirmCallable;
@@ -53,11 +55,9 @@ public class CodeGenerator {
     private static final String TAB2 = TAB + TAB;
     /** */
     private static final String TAB3 = TAB + TAB + TAB;
-    /** */
-    private static final String TAB4 = TAB + TAB + TAB + TAB;
 
     /** Java key words. */
-    private static final Set<String> javaKeywords = new HashSet<>(Arrays.asList(
+    private static final Set<String> JAVA_KEYWORDS = new HashSet<>(Arrays.asList(
         "abstract",     "assert",        "boolean",      "break",           "byte",
         "case",         "catch",         "char",         "class",           "const",
         "continue",     "default",       "do",           "double",          "else",
@@ -71,6 +71,12 @@ public class CodeGenerator {
         "void",         "volatile",      "while"
     ));
 
+    /** java.lang.*  */
+    private  static final String JAVA_LANG_PKG = "java.lang.";
+
+    /** java.util.*  */
+    private  static final String JAVA_UTIL_PKG = "java.util.";
+
     /** Regexp to validate java identifier. */
     private static final Pattern VALID_JAVA_IDENTIFIER =
         Pattern.compile("\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*");
@@ -98,7 +104,7 @@ public class CodeGenerator {
             if (part.isEmpty())
                 throw new IllegalStateException(msg + " could not has empty parts!");
 
-            if (javaKeywords.contains(part))
+            if (JAVA_KEYWORDS.contains(part))
                 throw new IllegalStateException(msg + " could not contains reserved keyword:" +
                     " [type = " + type + ", identifier=" + identifier + ", keyword=" + part + "]");
 
@@ -160,16 +166,6 @@ public class CodeGenerator {
     }
 
     /**
-     * Add line to source code with four indents.
-     *
-     * @param src Source code.
-     * @param line Code line.
-     */
-    private static void add4(Collection<String> src, String line) {
-        src.add(TAB4 + line);
-    }
-
-    /**
      * @param str Source string.
      * @return String with first letters in upper case.
      */
@@ -278,11 +274,15 @@ public class CodeGenerator {
         boolean constructor, boolean includeKeys, ConfirmCallable askOverwrite) throws IOException {
         String type = key ? pojo.keyClassName() : pojo.valueClassName();
 
-        File out = new File(pkgFolder, type + ".java");
-
         checkValidJavaIdentifier(pkg, true, "Package", type);
+
         checkValidJavaIdentifier(type, false, "Type", type);
 
+        if (!pkgFolder.exists() && !pkgFolder.mkdirs())
+            throw new IOException("Failed to create folders for package: " + pkg);
+
+        File out = new File(pkgFolder, type + ".java");
+
         if (out.exists()) {
             MessageBox.Result choice = askOverwrite.confirm(out.getName());
 
@@ -529,9 +529,6 @@ public class CodeGenerator {
         boolean includeKeys, ConfirmCallable askOverwrite) throws IOException {
         File pkgFolder = new File(outFolder, pkg.replace('.', File.separatorChar));
 
-        if (!pkgFolder.exists() && !pkgFolder.mkdirs())
-            throw new IOException("Failed to create folders for package: " + pkg);
-
         generateCode(pojo, true, pkg, pkgFolder, constructor, false, askOverwrite);
 
         generateCode(pojo, false, pkg, pkgFolder, constructor, includeKeys, askOverwrite);
@@ -548,12 +545,13 @@ public class CodeGenerator {
         for (PojoField field : fields) {
             String javaTypeName = field.javaTypeName();
 
-            if (javaTypeName.startsWith("java.lang."))
-                javaTypeName = javaTypeName.substring(10);
+            if (javaTypeName.startsWith(JAVA_LANG_PKG))
+                javaTypeName = javaTypeName.substring(JAVA_LANG_PKG.length());
+            else  if (javaTypeName.startsWith(JAVA_UTIL_PKG))
+                javaTypeName = javaTypeName.substring(JAVA_UTIL_PKG.length());
 
-            add2(src, owner + ".add(new CacheTypeFieldMetadata(\"" + field.dbName() + "\", " +
-                "Types." + field.dbTypeName() + ", \"" +
-                field.javaName() + "\", " + javaTypeName + ".class));");
+            add2(src, owner + ".add(new JdbcTypeField(Types." + field.dbTypeName() + ", \"" + field.dbName() + "\", " +
+                    javaTypeName + ".class, \"" + field.javaName() + "\"));");
         }
     }
 
@@ -562,26 +560,24 @@ public class CodeGenerator {
      *
      * @param src Source code lines.
      * @param fields List of fields to add.
-     * @param varName Variable name to generate.
-     * @param mtdName Method name to generate.
      * @param comment Commentary text.
      * @param first {@code true} if variable should be declared.
      * @return {@code false} if variable was declared.
      */
-    private static boolean addQueryFields(Collection<String> src, Collection<PojoField> fields, String varName,
-        String mtdName, String comment, boolean first) {
+    private static boolean addQueryFields(Collection<String> src, Collection<PojoField> fields, String comment,
+        boolean first) {
         if (fields.isEmpty())
             return first;
 
         add2(src, comment);
-        add2(src, (first ? "Map<String, Class<?>> " : "") + varName + " = new LinkedHashMap<>();");
+        add2(src, (first ? "LinkedHashMap<String, String> " : "") + "fields = new LinkedHashMap<>();");
         add0(src, "");
 
         for (PojoField field : fields)
-            add2(src, varName + ".put(\"" + field.javaName() + "\", " + javaTypeName(field) + ".class);");
+            add2(src, "fields.put(\"" + field.javaName() + "\", \"" + javaTypeName(field) + "\");");
 
         add0(src, "");
-        add2(src, "type." + mtdName + "(" + varName + ");");
+        add2(src, "qryEntity.setFields(fields);");
         add0(src, "");
 
         return false;
@@ -615,118 +611,146 @@ public class CodeGenerator {
 
         Collection<String> src = new ArrayList<>(256);
 
-        header(src, pkg, "org.apache.ignite.cache.*;org.apache.ignite.cache.store.*;" +
-                "org.apache.ignite.configuration.*;org.apache.ignite.lang.*;;" +
-                "javax.cache.configuration.*;java.sql.*;java.util.*",
+        header(src, pkg, "java.sql.*;java.util.*;" +
+            "org.apache.ignite.cache.*;org.apache.ignite.cache.store.jdbc.*;" +
+            "org.apache.ignite.configuration.*;" + pkg + ".*",
             "CacheConfig", "CacheConfig");
 
         add1(src, "/**");
         add1(src, "* Configure cache.");
         add1(src, "*");
-        add1(src, "* @param name Cache name.");
+        add1(src, "* @param cacheName Cache name.");
         add1(src, "* @param storeFactory Cache store factory.");
         add1(src, "*/");
-        add1(src, "public static <K, V> CacheConfiguration<K, V> cache(String name," +
-            " Factory<CacheStore<K, V>> storeFactory) {");
+        add1(src, "public static <K, V> CacheConfiguration<K, V> cache(String cacheName," +
+            " CacheJdbcPojoStoreFactory<K, V> storeFactory) {");
         add2(src, "if (storeFactory == null)");
         add3(src, " throw new IllegalArgumentException(\"Cache store factory cannot be null.\");");
         add0(src, "");
-        add2(src, "CacheConfiguration<K, V> ccfg = new CacheConfiguration<>(name);");
+        add2(src, "CacheConfiguration<K, V> ccfg = new CacheConfiguration<>(cacheName);");
         add0(src, "");
         add2(src, "ccfg.setCacheStoreFactory(storeFactory);");
         add2(src, "ccfg.setReadThrough(true);");
         add2(src, "ccfg.setWriteThrough(true);");
         add0(src, "");
 
-        add2(src, "// Configure cache types. ");
-        add2(src, "Collection<CacheTypeMetadata> meta = new ArrayList<>();");
+        add2(src, "// Configure JDBC types. ");
+        add2(src, "Collection<JdbcType> jdbcTypes = new ArrayList<>();");
         add0(src, "");
 
         boolean first = true;
-        boolean firstAsc = true;
-        boolean firstDesc = true;
-        boolean firstGrps = true;
-        boolean firstGrp = true;
 
         for (PojoDescriptor pojo : pojos) {
             String tbl = pojo.table();
 
             add2(src, "// " + tbl + ".");
-            add2(src, (first ? "CacheTypeMetadata " : "") + "type = new CacheTypeMetadata();");
-            add0(src, "");
-            add2(src, "meta.add(type);");
+            add2(src, (first ? "JdbcType " : "") + "jdbcType = new JdbcType();");
             add0(src, "");
 
+            add2(src, "jdbcType.setCacheName(cacheName);");
+
             // Database info.
-            add2(src, "type.setDatabaseSchema(\"" + pojo.schema() + "\");");
-            add2(src, "type.setDatabaseTable(\"" + tbl + "\");");
+            add2(src, "jdbcType.setDatabaseSchema(\"" + pojo.schema() + "\");");
+            add2(src, "jdbcType.setDatabaseTable(\"" + tbl + "\");");
 
             // Java info.
-            add2(src, "type.setKeyType(" + pojo.keyClassName() + ".class.getName());");
-            add2(src, "type.setValueType(" + pojo.valueClassName() + ".class.getName());");
+            add2(src, "jdbcType.setKeyType(" + pojo.keyClassName() + ".class.getName());");
+            add2(src, "jdbcType.setValueType(" + pojo.valueClassName() + ".class.getName());");
             add0(src, "");
 
             // Key fields.
             add2(src, "// Key fields for " + tbl + ".");
-            add2(src, (first ? "Collection<CacheTypeFieldMetadata> " : "") + "keys = new ArrayList<>();");
+            add2(src, (first ? "Collection<JdbcTypeField> " : "") + "keys = new ArrayList<>();");
             addFields(src, "keys", pojo.keyFields());
-            add2(src, "type.setKeyFields(keys);");
+            add2(src, "jdbcType.setKeyFields(keys.toArray(new JdbcTypeField[keys.size()]));");
             add0(src, "");
 
             // Value fields.
             add2(src, "// Value fields for " + tbl + ".");
-            add2(src, (first ? "Collection<CacheTypeFieldMetadata> " : "") + "vals = new ArrayList<>();");
+            add2(src, (first ? "Collection<JdbcTypeField> " : "") + "vals = new ArrayList<>();");
             addFields(src, "vals", pojo.valueFields(includeKeys));
-            add2(src, "type.setValueFields(vals);");
+            add2(src, "jdbcType.setValueFields(vals.toArray(new JdbcTypeField[vals.size()]));");
             add0(src, "");
 
-            // Query fields.
-            addQueryFields(src, pojo.fields(), "qryFlds", "setQueryFields", "// Query fields for " + tbl + ".", first);
+            add2(src, "jdbcTypes.add(jdbcType);");
+            add0(src, "");
 
-            // Ascending fields.
-            firstAsc = addQueryFields(src, pojo.ascendingFields(), "ascFlds", "setAscendingFields",
-                "// Ascending fields for " + tbl + ".", firstAsc);
+            first = false;
+        }
 
-            // Descending fields.
-            firstDesc = addQueryFields(src, pojo.descendingFields(), "descFlds", "setDescendingFields",
-                "// Descending fields for " + tbl + ".", firstDesc);
+        add2(src, "storeFactory.setTypes(jdbcTypes.toArray(new JdbcType[jdbcTypes.size()]));");
+        add0(src, "");
 
-            // Groups.
-            Map<String, Map<String, IndexItem>> groups = pojo.groups();
+        // Queries entities.
+        add2(src, "// Configure query entities. ");
+        add2(src, "Collection<QueryEntity> qryEntities = new ArrayList<>();");
+        add0(src, "");
 
-            if (!groups.isEmpty()) {
-                add2(src, "// Groups for " + tbl + ".");
-                add2(src, (firstGrps ? "Map<String, LinkedHashMap<String, IgniteBiTuple<Class<?>, Boolean>>> " : "") +
-                    "grps = new LinkedHashMap<>();");
-                add0(src, "");
+        first = true;
+        boolean firstIdxs = true;
+        boolean firstIdx = true;
 
-                firstGrps = false;
+        for (PojoDescriptor pojo : pojos) {
+            String tbl = pojo.table();
 
-                for (Map.Entry<String, Map<String, IndexItem>> group : groups.entrySet()) {
-                    add2(src, (firstGrp ? "LinkedHashMap<String, IgniteBiTuple<Class<?>, Boolean>> " : "") +
-                        "grpItems = new LinkedHashMap<>();");
-                    add0(src, "");
+            add2(src, (first ? "QueryEntity " : "") + "qryEntity = new QueryEntity();");
+            add0(src, "");
+            add2(src, "qryEntity.setKeyType(" + pojo.keyClassName() + ".class.getName());");
+            add2(src, "qryEntity.setValueType(" + pojo.valueClassName() + ".class.getName());");
+            add0(src, "");
+
+            // Query fields.
+            addQueryFields(src, pojo.fields(), "// Query fields for " + tbl + ".", first);
 
-                    for (Map.Entry<String, IndexItem> grpItem : group.getValue().entrySet()) {
-                        IndexItem idxCol = grpItem.getValue();
+            // Indexes.
+            Collection<QueryIndex> idxs = pojo.indexes();
 
-                        add2(src, "grpItems.put(\"" + grpItem.getKey() + "\", " +
-                            "new IgniteBiTuple<Class<?>, Boolean>(" + javaTypeName(idxCol.type()) + ".class, " +
-                            idxCol.descending() + "));");
+            if (!idxs.isEmpty()) {
+                add2(src, "// Indexes for " + tbl + ".");
+                add2(src, (firstIdxs ? "Collection<QueryIndex> " : "") + "idxs = new ArrayList<>();");
+                add0(src, "");
+
+                firstIdxs = false;
+
+                for (QueryIndex idx : idxs) {
+                    if (idx.getFields().size() == 1) {
+                        Map.Entry<String, Boolean> fld = F.first(idx.getFields().entrySet());
+
+                        add2(src, "idxs.add(new QueryIndex(\"" + fld.getKey() + "\", " + fld.getValue() + ", \"" +
+                            idx.getName()  + "\"));");
+                        add0(src, "");
                     }
+                    else {
+                        add2(src, (firstIdx ? "QueryIndex " : "") + "idx = new QueryIndex();");
+                        add0(src, "");
 
-                    add0(src, "");
-                    add2(src, "grps.put(\"" + group.getKey() + "\", grpItems);");
-                    add0(src, "");
+                        add2(src, "idx.setName(\"" + idx.getName() + "\");");
+                        add0(src, "");
 
-                    firstGrp = false;
+                        add2(src, (firstIdx ? "LinkedHashMap<String, Boolean> " : "") +
+                            "idxFlds = new LinkedHashMap<>();");
+                        add0(src, "");
+
+                        for (Map.Entry<String, Boolean> idxFld : idx.getFields().entrySet())
+                            add2(src, "idxFlds.put(\"" + idxFld.getKey()  + "\", " + idxFld.getValue() + ");");
+
+                        add0(src, "");
+
+                        add2(src, "idx.setFields(idxFlds);");
+                        add0(src, "");
+
+                        add2(src, "idxs.add(idx);");
+                        add0(src, "");
+
+                        firstIdx = false;
+                    }
                 }
 
-                add2(src, "type.setGroups(grps);");
+                add2(src, "qryEntity.setIndexes(idxs);");
                 add0(src, "");
             }
 
-            add2(src, "ccfg.setTypeMetadata(meta);");
+            add2(src, "ccfg.setQueryEntities(qryEntities);");
             add0(src, "");
 
             first = false;

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/XmlGenerator.java
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/XmlGenerator.java b/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/XmlGenerator.java
index bc9bab7..547e7b0 100644
--- a/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/XmlGenerator.java
+++ b/modules/schema-import/src/main/java/org/apache/ignite/schema/generator/XmlGenerator.java
@@ -35,10 +35,11 @@ import javax.xml.transform.TransformerException;
 import javax.xml.transform.TransformerFactory;
 import javax.xml.transform.dom.DOMSource;
 import javax.xml.transform.stream.StreamResult;
-import org.apache.ignite.cache.CacheTypeFieldMetadata;
-import org.apache.ignite.cache.CacheTypeMetadata;
-import org.apache.ignite.lang.IgniteBiTuple;
-import org.apache.ignite.schema.model.IndexItem;
+import org.apache.ignite.cache.QueryEntity;
+import org.apache.ignite.cache.QueryIndex;
+import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactory;
+import org.apache.ignite.cache.store.jdbc.JdbcType;
+import org.apache.ignite.cache.store.jdbc.JdbcTypeField;
 import org.apache.ignite.schema.model.PojoDescriptor;
 import org.apache.ignite.schema.model.PojoField;
 import org.apache.ignite.schema.ui.ConfirmCallable;
@@ -166,20 +167,20 @@ public class XmlGenerator {
      * @param name Property name.
      * @param fields Collection of POJO fields.
      */
-    private static void addFields(Document doc, Node parent, String name, Collection<PojoField> fields) {
+    private static void addJdbcFields(Document doc, Node parent, String name, Collection<PojoField> fields) {
         if (!fields.isEmpty()) {
             Element prop = addProperty(doc, parent, name, null);
 
             Element list = addElement(doc, prop, "list");
 
             for (PojoField field : fields) {
-                Element item = addBean(doc, list, CacheTypeFieldMetadata.class);
+                Element item = addBean(doc, list, JdbcTypeField.class);
 
-                addProperty(doc, item, "databaseName", field.dbName());
-                Element dbType = addProperty(doc, item, "databaseType", null);
+                Element dbType = addProperty(doc, item, "databaseFieldType", null);
                 addElement(doc, dbType, "util:constant", "static-field", "java.sql.Types." + field.dbTypeName());
-                addProperty(doc, item, "javaName", field.javaName());
-                addProperty(doc, item, "javaType", field.javaTypeName());
+                addProperty(doc, item, "databaseFieldName", field.dbName());
+                addProperty(doc, item, "javaFieldType", field.javaTypeName());
+                addProperty(doc, item, "javaFieldName", field.javaName());
             }
         }
     }
@@ -189,14 +190,13 @@ public class XmlGenerator {
      *
      * @param doc XML document.
      * @param parent Parent XML node.
-     * @param name Property name.
      * @param fields Map with fields.
      */
-    private static void addQueryFields(Document doc, Node parent, String name, Collection<PojoField> fields) {
+    private static void addQueryFields(Document doc, Node parent, Collection<PojoField> fields) {
         if (!fields.isEmpty()) {
-            Element prop = addProperty(doc, parent, name, null);
+            Element prop = addProperty(doc, parent, "fields", null);
 
-            Element map = addElement(doc, prop, "map");
+            Element map = addElement(doc, prop, "util:map", "map-class", "java.util.LinkedHashMap");
 
             for (PojoField field : fields)
                 addElement(doc, map, "entry", "key", field.javaName(), "value", field.javaTypeName());
@@ -208,47 +208,45 @@ public class XmlGenerator {
      *
      * @param doc XML document.
      * @param parent Parent XML node.
-     * @param groups Map with indexes.
+     * @param idxs Indexes.
      */
-    private static void addQueryGroups(Document doc, Node parent,
-        Map<String, Map<String, IndexItem>> groups) {
-        if (!groups.isEmpty()) {
-            Element prop = addProperty(doc, parent, "groups", null);
+    private static void addQueryIndexes(Document doc, Node parent, Collection<QueryIndex> idxs) {
+        if (!idxs.isEmpty()) {
+            Element prop = addProperty(doc, parent, "indexes", null);
 
-            Element map = addElement(doc, prop, "map");
+            Element list = addElement(doc, prop, "list");
 
-            for (Map.Entry<String, Map<String, IndexItem>> group : groups.entrySet()) {
-                Element entry1 = addElement(doc, map, "entry", "key", group.getKey());
+            for (QueryIndex idx : idxs) {
+                Element idxBean = addBean(doc, list, QueryIndex.class);
 
-                Element val1 = addElement(doc, entry1, "map");
+                addProperty(doc, idxBean, "name", idx.getName());
 
-                Map<String, IndexItem> grpItems = group.getValue();
+                Element idxType = addProperty(doc, idxBean, "indexType", null);
+                addElement(doc, idxType, "util:constant", "static-field", "org.apache.ignite.cache.QueryIndexType." + idx.getIndexType());
 
-                for (Map.Entry<String, IndexItem> grpItem : grpItems.entrySet()) {
-                    Element entry2 = addElement(doc, val1, "entry", "key", grpItem.getKey());
+                Element flds = addProperty(doc, idxBean, "fields", null);
 
-                    Element val2 = addBean(doc, entry2, IgniteBiTuple.class);
+                Element fldsMap = addElement(doc, flds, "map");
 
-                    IndexItem idxCol = grpItem.getValue();
+                Map<String, Boolean> idxFlds = idx.getFields();
 
-                    addElement(doc, val2, "constructor-arg", null, null, "value", idxCol.type());
-                    addElement(doc, val2, "constructor-arg", null, null, "value", String.valueOf(idxCol.descending()));
-                }
+                for (Map.Entry<String, Boolean> fld : idxFlds.entrySet())
+                    addElement(doc, fldsMap, "entry", "key", fld.getKey(), "value", fld.getValue().toString());
             }
         }
     }
 
     /**
-     * Add element with type metadata to XML document.
+     * Add element with JDBC POJO store factory to XML document.
      *
      * @param doc XML document.
      * @param parent Parent XML node.
      * @param pkg Package fo types.
      * @param pojo POJO descriptor.
      */
-    private static void addTypeMetadata(Document doc, Node parent, String pkg, PojoDescriptor pojo,
+    private static void addJdbcPojoStoreFactory(Document doc, Node parent, String pkg, PojoDescriptor pojo,
         boolean includeKeys) {
-        Element bean = addBean(doc, parent, CacheTypeMetadata.class);
+        Element bean = addBean(doc, parent, JdbcType.class);
 
         addProperty(doc, bean, "databaseSchema", pojo.schema());
 
@@ -258,17 +256,29 @@ public class XmlGenerator {
 
         addProperty(doc, bean, "valueType", pkg + "." + pojo.valueClassName());
 
-        addFields(doc, bean, "keyFields", pojo.keyFields());
+        addJdbcFields(doc, bean, "keyFields", pojo.keyFields());
 
-        addFields(doc, bean, "valueFields", pojo.valueFields(includeKeys));
+        addJdbcFields(doc, bean, "valueFields", pojo.valueFields(includeKeys));
+    }
 
-        addQueryFields(doc, bean, "queryFields", pojo.fields());
+    /**
+     * Add element with query entity to XML document.
+     *
+     * @param doc XML document.
+     * @param parent Parent XML node.
+     * @param pkg Package fo types.
+     * @param pojo POJO descriptor.
+     */
+    private static void addQueryEntity(Document doc, Node parent, String pkg, PojoDescriptor pojo) {
+        Element bean = addBean(doc, parent, QueryEntity.class);
+
+        addProperty(doc, bean, "keyType", pkg + "." + pojo.keyClassName());
 
-        addQueryFields(doc, bean, "ascendingFields", pojo.ascendingFields());
+        addProperty(doc, bean, "valueType", pkg + "." + pojo.valueClassName());
 
-        addQueryFields(doc, bean, "descendingFields", pojo.descendingFields());
+        addQueryFields(doc, bean, pojo.fields());
 
-        addQueryGroups(doc, bean, pojo.groups());
+        addQueryIndexes(doc, bean, pojo.indexes());
     }
 
     /**
@@ -333,8 +343,15 @@ public class XmlGenerator {
                 "http://www.springframework.org/schema/util " +
                 "http://www.springframework.org/schema/util/spring-util.xsd");
 
+            Element factoryBean = addBean(doc, beans, CacheJdbcPojoStoreFactory.class);
+            Element typesElem = addProperty(doc, factoryBean, "types", null);
+            Element typesItemsElem = addElement(doc, typesElem, "list");
+
+            for (PojoDescriptor pojo : pojos)
+                addJdbcPojoStoreFactory(doc, typesItemsElem, pkg, pojo, includeKeys);
+
             for (PojoDescriptor pojo : pojos)
-                addTypeMetadata(doc, beans, pkg, pojo, includeKeys);
+                addQueryEntity(doc, beans, pkg, pojo);
 
             TransformerFactory transformerFactory = TransformerFactory.newInstance();
 
@@ -361,4 +378,4 @@ public class XmlGenerator {
             throw new IllegalStateException(e);
         }
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/src/main/java/org/apache/ignite/schema/model/IndexItem.java
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/main/java/org/apache/ignite/schema/model/IndexItem.java b/modules/schema-import/src/main/java/org/apache/ignite/schema/model/IndexItem.java
deleted file mode 100644
index d51ec8f..0000000
--- a/modules/schema-import/src/main/java/org/apache/ignite/schema/model/IndexItem.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.schema.model;
-
-/**
- * Index item descriptor.
- */
-public class IndexItem {
-    /** Column type. */
-    private final String type;
-
-    /** Sort direction. */
-    private final Boolean desc;
-
-    /**
-     * Constructor.
-     *
-     * @param type Column type.
-     * @param desc Sort direction.
-     */
-    public IndexItem(String type, Boolean desc) {
-        this.type = type;
-        this.desc = desc;
-    }
-
-    /**
-     * @return Column type.
-     */
-    public String type() {
-        return type;
-    }
-
-    /**
-     * @return Sort direction.
-     */
-    public Boolean descending() {
-        return desc;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/src/main/java/org/apache/ignite/schema/model/PojoDescriptor.java
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/main/java/org/apache/ignite/schema/model/PojoDescriptor.java b/modules/schema-import/src/main/java/org/apache/ignite/schema/model/PojoDescriptor.java
index d84643b..5053b07 100644
--- a/modules/schema-import/src/main/java/org/apache/ignite/schema/model/PojoDescriptor.java
+++ b/modules/schema-import/src/main/java/org/apache/ignite/schema/model/PojoDescriptor.java
@@ -21,12 +21,8 @@ import java.math.BigDecimal;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.Iterator;
-import java.util.LinkedHashMap;
 import java.util.List;
-import java.util.Map;
-import java.util.Set;
 import javafx.beans.property.BooleanProperty;
 import javafx.beans.property.SimpleBooleanProperty;
 import javafx.beans.property.SimpleStringProperty;
@@ -35,6 +31,7 @@ import javafx.beans.value.ChangeListener;
 import javafx.beans.value.ObservableValue;
 import javafx.collections.FXCollections;
 import javafx.collections.ObservableList;
+import org.apache.ignite.cache.QueryIndex;
 import org.apache.ignite.schema.parser.DbColumn;
 import org.apache.ignite.schema.parser.DbTable;
 
@@ -99,9 +96,6 @@ public class PojoDescriptor {
     /** Java class fields. */
     private final ObservableList<PojoField> fields;
 
-    /** Fields map for quick access. */
-    private final Map<String, PojoField> fieldsMap;
-
     /**
      * Constructor of POJO descriptor.
      *
@@ -125,8 +119,6 @@ public class PojoDescriptor {
 
         List<PojoField> flds = new ArrayList<>(cols.size());
 
-        fieldsMap = new HashMap<>(cols.size());
-
         for (DbColumn col : cols) {
             String colName = col.name();
 
@@ -137,8 +129,6 @@ public class PojoDescriptor {
             fld.owner(this);
 
             flds.add(fld);
-
-            fieldsMap.put(colName, fld);
         }
 
         fields = FXCollections.observableList(flds);
@@ -308,64 +298,12 @@ public class PojoDescriptor {
     }
 
     /**
-     * @return Ascending fields.
-     */
-    public Collection<PojoField> ascendingFields() {
-        Collection<PojoField> res = new ArrayList<>();
-
-        Set<String> asc = tbl.ascendingColumns();
-
-        for (PojoField field : fields)
-            if (field.use() && asc.contains(field.dbName()))
-                res.add(field);
-
-        return res;
-    }
-
-    /**
-     * @return Descending fields.
-     */
-    public Collection<PojoField> descendingFields() {
-        Collection<PojoField> res = new ArrayList<>();
-
-        Set<String> desc = tbl.descendingColumns();
-
-        for (PojoField field : fields)
-            if (field.use() && desc.contains(field.dbName()))
-                res.add(field);
-
-        return res;
-    }
-
-    /**
-     * Gets indexes groups.
+     * Gets indexes indexes.
      *
-     * @return Map with indexes.
+     * @return Collection with indexes.
      */
-    public Map<String, Map<String, IndexItem>> groups() {
-        Map<String, Map<String, Boolean>> idxs = tbl.indexes();
-
-        Map<String, Map<String, IndexItem>> groups = new LinkedHashMap<>(idxs.size());
-
-        for (Map.Entry<String, Map<String, Boolean>> idx : idxs.entrySet()) {
-            Map<String, Boolean> idxCols = idx.getValue();
-
-            if (idxCols.size() > 1) {
-                String idxName = idx.getKey();
-
-                Map<String, IndexItem> grp = new LinkedHashMap<>();
-
-                groups.put(idxName, grp);
-
-                for (Map.Entry<String, Boolean> idxCol : idxCols.entrySet()) {
-                    PojoField fld = fieldsMap.get(idxCol.getKey());
-
-                    grp.put(fld.javaName(), new IndexItem(fld.javaTypeName(), idxCol.getValue()));
-                }
-            }
-        }
-
-        return groups;
+    public Collection<QueryIndex> indexes() {
+        return tbl.indexes();
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/src/main/java/org/apache/ignite/schema/model/SchemaDescriptor.java
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/main/java/org/apache/ignite/schema/model/SchemaDescriptor.java b/modules/schema-import/src/main/java/org/apache/ignite/schema/model/SchemaDescriptor.java
index 1dc66f2..7de2247 100644
--- a/modules/schema-import/src/main/java/org/apache/ignite/schema/model/SchemaDescriptor.java
+++ b/modules/schema-import/src/main/java/org/apache/ignite/schema/model/SchemaDescriptor.java
@@ -55,8 +55,8 @@ public class SchemaDescriptor {
         return selected;
     }
 
-    @Override
-    public String toString() {
+    /** {@inheritDoc} */
+    @Override public String toString() {
         return schema;
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/DatabaseMetadataParser.java
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/DatabaseMetadataParser.java b/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/DatabaseMetadataParser.java
index 3ecee53..b68f90d 100644
--- a/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/DatabaseMetadataParser.java
+++ b/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/DatabaseMetadataParser.java
@@ -30,6 +30,7 @@ import java.util.logging.Level;
 import java.util.logging.Logger;
 import javafx.collections.FXCollections;
 import javafx.collections.ObservableList;
+import org.apache.ignite.cache.QueryIndex;
 import org.apache.ignite.schema.model.PojoDescriptor;
 import org.apache.ignite.schema.model.SchemaDescriptor;
 import org.apache.ignite.schema.parser.dialect.DB2MetadataDialect;
@@ -59,7 +60,7 @@ public class DatabaseMetadataParser {
                 return new OracleMetadataDialect();
             else if (dbProductName.startsWith("DB2/"))
                 return new DB2MetadataDialect();
-            else if (dbProductName.equals("MySQL"))
+            else if ("MySQL".equals(dbProductName))
                 return new MySQLMetadataDialect();
             else
                 return new JdbcMetadataDialect();
@@ -76,7 +77,7 @@ public class DatabaseMetadataParser {
      *
      * @param conn Connection to database.
      * @return List of schema descriptors.
-     * @throws SQLException If shemas loading failed.
+     * @throws SQLException If schemas loading failed.
      */
     public static ObservableList<SchemaDescriptor> schemas(Connection conn) throws SQLException  {
         List<String> dbSchemas = dialect(conn).schemas(conn);
@@ -93,7 +94,7 @@ public class DatabaseMetadataParser {
      * Parse database metadata.
      *
      * @param conn Connection to database.
-     * @param schemas Collention of schema names to load.
+     * @param schemas Collection of schema names to load.
      * @param tblsOnly If {@code true} then process tables only else process tables and views.
      * @return Collection of POJO descriptors.
      * @throws SQLException If parsing failed.
@@ -114,8 +115,7 @@ public class DatabaseMetadataParser {
 
             if (parent == null) {
                 parent = new PojoDescriptor(null, new DbTable(schema, "", Collections.<DbColumn>emptyList(),
-                    Collections.<String>emptySet(), Collections.<String>emptySet(),
-                    Collections.<String, Map<String, Boolean>>emptyMap()));
+                    Collections.<QueryIndex>emptyList()));
 
                 children = new ArrayList<>();
 
@@ -148,4 +148,4 @@ public class DatabaseMetadataParser {
 
         return FXCollections.observableList(res);
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/DbTable.java
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/DbTable.java b/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/DbTable.java
index c54bfd8..39f6f7b 100644
--- a/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/DbTable.java
+++ b/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/DbTable.java
@@ -17,9 +17,9 @@
 
 package org.apache.ignite.schema.parser;
 
+import org.apache.ignite.cache.QueryIndex;
+
 import java.util.Collection;
-import java.util.Map;
-import java.util.Set;
 
 /**
  * Database table.
@@ -34,14 +34,8 @@ public class DbTable {
     /** Columns. */
     private final Collection<DbColumn> cols;
 
-    /** Columns in ascending order. */
-    private final Set<String> ascCols;
-
-    /** Columns in descending order. */
-    private final Set<String> descCols;
-
     /** Indexes. */
-    private final Map<String, Map<String, Boolean>> idxs;
+    private final Collection<QueryIndex> idxs;
 
     /**
      * Default columns.
@@ -49,17 +43,12 @@ public class DbTable {
      * @param schema Schema name.
      * @param tbl Table name.
      * @param cols Columns.
-     * @param ascCols Columns in ascending order.
-     * @param descCols Columns in descending order.
      * @param idxs Indexes;
      */
-    public DbTable(String schema, String tbl, Collection<DbColumn> cols, Set<String> ascCols, Set<String> descCols,
-        Map<String, Map<String, Boolean>> idxs) {
+    public DbTable(String schema, String tbl, Collection<DbColumn> cols, Collection<QueryIndex> idxs) {
         this.schema = schema;
         this.tbl = tbl;
         this.cols = cols;
-        this.ascCols = ascCols;
-        this.descCols = descCols;
         this.idxs = idxs;
     }
 
@@ -85,23 +74,9 @@ public class DbTable {
     }
 
     /**
-     * @return Fields in ascending order
-     */
-    public Set<String> ascendingColumns() {
-        return ascCols;
-    }
-
-    /**
-     * @return Fields in descending order
-     */
-    public Set<String> descendingColumns() {
-        return descCols;
-    }
-
-    /**
      * @return Indexes.
      */
-    public Map<String, Map<String, Boolean>> indexes() {
+    public Collection<QueryIndex> indexes() {
         return idxs;
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/DatabaseMetadataDialect.java
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/DatabaseMetadataDialect.java b/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/DatabaseMetadataDialect.java
index 5d3a0fd..bdf309b 100644
--- a/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/DatabaseMetadataDialect.java
+++ b/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/DatabaseMetadataDialect.java
@@ -21,10 +21,10 @@ import java.sql.Connection;
 import java.sql.SQLException;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
+
+import org.apache.ignite.cache.QueryIndex;
 import org.apache.ignite.schema.parser.DbColumn;
 import org.apache.ignite.schema.parser.DbTable;
 
@@ -45,7 +45,7 @@ public abstract class DatabaseMetadataDialect {
      * Gets tables from database.
      *
      * @param conn Database connection.
-     * @param schemas Collention of schema names to load.
+     * @param schemas Collection of schema names to load.
      * @param tblsOnly If {@code true} then gets only tables otherwise gets tables and views.
      * @return Collection of table descriptors.
      * @throws SQLException If failed to get tables.
@@ -69,27 +69,7 @@ public abstract class DatabaseMetadataDialect {
      * @param idxs Table indexes.
      * @return New {@code DbTable} instance.
      */
-    protected DbTable table(String schema, String tbl, Collection<DbColumn> cols, Map<String, Map<String, Boolean>>idxs) {
-        Set<String> ascCols = new HashSet<>();
-
-        Set<String> descCols = new HashSet<>();
-
-        for (Map<String, Boolean> idx : idxs.values()) {
-            if (idx.size() == 1)
-                for (Map.Entry<String, Boolean> idxCol : idx.entrySet()) {
-                    String colName = idxCol.getKey();
-
-                    Boolean desc = idxCol.getValue();
-
-                    if (desc != null) {
-                        if (desc)
-                            descCols.add(colName);
-                        else
-                            ascCols.add(colName);
-                    }
-                }
-        }
-
-        return new DbTable(schema, tbl, cols, ascCols, descCols, idxs);
+    protected DbTable table(String schema, String tbl, Collection<DbColumn> cols, Collection<QueryIndex>idxs) {
+        return new DbTable(schema, tbl, cols, idxs);
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/JdbcMetadataDialect.java
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/JdbcMetadataDialect.java b/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/JdbcMetadataDialect.java
index 6f41195..45f0f9f 100644
--- a/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/JdbcMetadataDialect.java
+++ b/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/JdbcMetadataDialect.java
@@ -28,6 +28,9 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+
+import org.apache.ignite.cache.QueryIndex;
+import org.apache.ignite.cache.QueryIndexType;
 import org.apache.ignite.schema.parser.DbColumn;
 import org.apache.ignite.schema.parser.DbTable;
 
@@ -115,7 +118,7 @@ public class JdbcMetadataDialect extends DatabaseMetadataDialect {
 
         Collection<DbTable> tbls = new ArrayList<>();
 
-        if (schemas.size() == 0)
+        if (schemas.isEmpty())
             schemas.add(null);
 
         for (String toSchema: schemas) {
@@ -154,7 +157,7 @@ public class JdbcMetadataDialect extends DatabaseMetadataDialect {
                         }
                     }
 
-                    Map<String, Map<String, Boolean>> idxs = new LinkedHashMap<>();
+                    Map<String, QueryIndex> idxs = new LinkedHashMap<>();
 
                     try (ResultSet idxRs = dbMeta.getIndexInfo(tblCatalog, tblSchema, tblName, false, true)) {
                         while (idxRs.next()) {
@@ -165,27 +168,30 @@ public class JdbcMetadataDialect extends DatabaseMetadataDialect {
                             if (idxName == null || colName == null)
                                 continue;
 
-                            Map<String, Boolean> idx = idxs.get(idxName);
+                            QueryIndex idx = idxs.get(idxName);
 
                             if (idx == null) {
-                                idx = new LinkedHashMap<>();
+                                idx = new QueryIndex();
+                                idx.setName(idxName);
+                                idx.setIndexType(QueryIndexType.SORTED);
+                                idx.setFields(new LinkedHashMap<String, Boolean>());
 
                                 idxs.put(idxName, idx);
                             }
 
                             String askOrDesc = idxRs.getString(IDX_ASC_OR_DESC_IDX);
 
-                            Boolean desc = askOrDesc != null ? "D".equals(askOrDesc) : null;
+                            Boolean asc = askOrDesc == null || "A".equals(askOrDesc);
 
-                            idx.put(colName, desc);
+                            idx.getFields().put(colName, asc);
                         }
                     }
 
-                    tbls.add(table(schema, tblName, cols, idxs));
+                    tbls.add(table(schema, tblName, cols, idxs.values()));
                 }
             }
         }
 
         return tbls;
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/OracleMetadataDialect.java
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/OracleMetadataDialect.java b/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/OracleMetadataDialect.java
index cf7e979..0d4e8d6 100644
--- a/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/OracleMetadataDialect.java
+++ b/modules/schema-import/src/main/java/org/apache/ignite/schema/parser/dialect/OracleMetadataDialect.java
@@ -31,6 +31,9 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+
+import org.apache.ignite.cache.QueryIndex;
+import org.apache.ignite.cache.QueryIndexType;
 import org.apache.ignite.schema.parser.DbColumn;
 import org.apache.ignite.schema.parser.DbTable;
 
@@ -252,12 +255,12 @@ public class OracleMetadataDialect extends DatabaseMetadataDialect {
      * @param stmt Prepared SQL statement to execute.
      * @param owner DB owner.
      * @param tbl Table name.
-     * @return Index columns.
+     * @return Indexes.
      * @throws SQLException If failed to retrieve indexes columns.
      */
-    private Map<String, Map<String, Boolean>> indexes(PreparedStatement stmt, String owner, String tbl)
+    private Collection<QueryIndex> indexes(PreparedStatement stmt, String owner, String tbl)
         throws SQLException {
-        Map<String, Map<String, Boolean>> idxs = new LinkedHashMap<>();
+        Map<String, QueryIndex> idxs = new LinkedHashMap<>();
 
         stmt.setString(1, owner);
         stmt.setString(2, tbl);
@@ -266,10 +269,13 @@ public class OracleMetadataDialect extends DatabaseMetadataDialect {
             while (idxsRs.next()) {
                 String idxName = idxsRs.getString(IDX_NAME_IDX);
 
-                Map<String, Boolean> idx = idxs.get(idxName);
+                QueryIndex idx = idxs.get(idxName);
 
                 if (idx == null) {
-                    idx = new LinkedHashMap<>();
+                    idx = new QueryIndex();
+                    idx.setName(idxName);
+                    idx.setIndexType(QueryIndexType.SORTED);
+                    idx.setFields(new LinkedHashMap<String, Boolean>());
 
                     idxs.put(idxName, idx);
                 }
@@ -278,11 +284,11 @@ public class OracleMetadataDialect extends DatabaseMetadataDialect {
 
                 String col = expr == null ? idxsRs.getString(IDX_COL_NAME_IDX) : expr.replaceAll("\"", "");
 
-                idx.put(col, "DESC".equals(idxsRs.getString(IDX_COL_DESCEND_IDX)));
+                idx.getFields().put(col, !"DESC".equals(idxsRs.getString(IDX_COL_DESCEND_IDX)));
             }
         }
 
-        return idxs;
+        return idxs.values();
     }
 
     /** {@inheritDoc} */
@@ -294,7 +300,7 @@ public class OracleMetadataDialect extends DatabaseMetadataDialect {
 
         PreparedStatement idxStmt = conn.prepareStatement(SQL_INDEXES);
 
-        if (schemas.size() == 0)
+        if (schemas.isEmpty())
             schemas.add(null);
 
         Set<String> sysSchemas = systemSchemas();
@@ -307,7 +313,7 @@ public class OracleMetadataDialect extends DatabaseMetadataDialect {
                 Collection<DbColumn> cols = new ArrayList<>();
 
                 Set<String> pkCols = Collections.emptySet();
-                Map<String, Map<String, Boolean>> idxs = Collections.emptyMap();
+                Collection<QueryIndex> idxs = Collections.emptyList();
 
                 String sql = String.format(SQL_COLUMNS,
                         tblsOnly ? "INNER JOIN all_tables b on a.table_name = b.table_name and a.owner = b.owner" : "",

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/src/main/java/org/apache/ignite/schema/ui/ModalDialog.java
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/main/java/org/apache/ignite/schema/ui/ModalDialog.java b/modules/schema-import/src/main/java/org/apache/ignite/schema/ui/ModalDialog.java
index 29c8a49..a1e34a1 100644
--- a/modules/schema-import/src/main/java/org/apache/ignite/schema/ui/ModalDialog.java
+++ b/modules/schema-import/src/main/java/org/apache/ignite/schema/ui/ModalDialog.java
@@ -34,8 +34,8 @@ public abstract class ModalDialog extends Stage {
     protected ModalDialog(Stage owner, int width, int height) {
         this.owner = owner;
 
-        this.setWidth(width);
-        this.setHeight(height);
+        setWidth(width);
+        setHeight(height);
     }
 
     /**
@@ -47,4 +47,4 @@ public abstract class ModalDialog extends Stage {
 
         showAndWait();
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/src/main/java/org/apache/ignite/schema/ui/SchemaImportApp.java
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/main/java/org/apache/ignite/schema/ui/SchemaImportApp.java b/modules/schema-import/src/main/java/org/apache/ignite/schema/ui/SchemaImportApp.java
index 4f419e6..495c316 100644
--- a/modules/schema-import/src/main/java/org/apache/ignite/schema/ui/SchemaImportApp.java
+++ b/modules/schema-import/src/main/java/org/apache/ignite/schema/ui/SchemaImportApp.java
@@ -313,6 +313,7 @@ public class SchemaImportApp extends Application {
     /** */
     private ProgressIndicator pi;
 
+    /** */
     private ObservableList<SchemaDescriptor> schemas = FXCollections.emptyObservableList();
 
     /** List with POJOs descriptors. */
@@ -425,7 +426,7 @@ public class SchemaImportApp extends Application {
             if (schema.selected().getValue())
                 selSchemas.add(schema.schema());
 
-        if (selSchemas.size() == 0)
+        if (selSchemas.isEmpty())
             if (!MessageBox.confirmDialog(owner, "No schemas selected.\nExtract tables for all available schemas?"))
                 return;
 
@@ -944,8 +945,8 @@ public class SchemaImportApp extends Application {
         schemaPnl.wrap();
 
         schemaPnl.add(button("Load schemas", "Load schemas for specified database", new EventHandler<ActionEvent>() {
-            @Override
-            public void handle(ActionEvent evt) {
+            /** {@inheritDoc} */
+            @Override public void handle(ActionEvent evt) {
                 loadSchemas();
             }
         }));
@@ -1827,8 +1828,8 @@ public class SchemaImportApp extends Application {
      * Special list view cell to select loaded schemas.
      */
     private static class SchemaCell implements Callback<SchemaDescriptor, ObservableValue<Boolean>> {
-        @Override
-        public ObservableValue<Boolean> call(SchemaDescriptor item) {
+        /** {@inheritDoc} */
+        @Override public ObservableValue<Boolean> call(SchemaDescriptor item) {
             return item.selected();
         }
     }
@@ -1947,4 +1948,4 @@ public class SchemaImportApp extends Application {
             }
         }
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/d71f6129/modules/schema-import/src/test/java/org/apache/ignite/schema/test/AbstractSchemaImportTest.java
----------------------------------------------------------------------
diff --git a/modules/schema-import/src/test/java/org/apache/ignite/schema/test/AbstractSchemaImportTest.java b/modules/schema-import/src/test/java/org/apache/ignite/schema/test/AbstractSchemaImportTest.java
index 8c2b046..bf0f94a 100644
--- a/modules/schema-import/src/test/java/org/apache/ignite/schema/test/AbstractSchemaImportTest.java
+++ b/modules/schema-import/src/test/java/org/apache/ignite/schema/test/AbstractSchemaImportTest.java
@@ -48,7 +48,7 @@ public abstract class AbstractSchemaImportTest extends TestCase {
     protected static final String OUT_DIR_PATH = System.getProperty("java.io.tmpdir") + "/ignite-schema-import/out";
 
     /** Auto confirmation of file conflicts. */
-    protected ConfirmCallable askOverwrite = new ConfirmCallable(null, "") {
+    protected final ConfirmCallable askOverwrite = new ConfirmCallable(null, "") {
         @Override public MessageBox.Result confirm(String msg) {
             return YES_TO_ALL;
         }
@@ -141,4 +141,4 @@ public abstract class AbstractSchemaImportTest extends TestCase {
             return false;
         }
     }
-}
\ No newline at end of file
+}


[22/25] ignite git commit: Fixed javadoc and Java 1.8 build.

Posted by ag...@apache.org.
Fixed javadoc and Java 1.8 build.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/eee3b21b
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/eee3b21b
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/eee3b21b

Branch: refs/heads/ignite-1282
Commit: eee3b21b10a2ddbc131254fd2d077d95db9340de
Parents: ca0de93
Author: Alexey Goncharuk <al...@gmail.com>
Authored: Tue Nov 24 12:35:43 2015 +0300
Committer: Alexey Goncharuk <al...@gmail.com>
Committed: Tue Nov 24 12:35:43 2015 +0300

----------------------------------------------------------------------
 .../java/org/apache/ignite/IgniteBinary.java    |  6 +----
 .../apache/ignite/binary/BinaryIdMapper.java    |  2 +-
 .../apache/ignite/binary/BinarySerializer.java  |  2 +-
 .../internal/portable/BinaryTypeImpl.java       |  4 ++-
 .../processors/cache/CacheObjectContext.java    | 26 +++++++++++++++++---
 .../datastructures/GridCacheAtomicLongImpl.java |  6 ++++-
 .../portable/BinaryMarshallerSelfTest.java      |  6 ++---
 7 files changed, 36 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/eee3b21b/modules/core/src/main/java/org/apache/ignite/IgniteBinary.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/IgniteBinary.java b/modules/core/src/main/java/org/apache/ignite/IgniteBinary.java
index 5eb2670..8802c10 100644
--- a/modules/core/src/main/java/org/apache/ignite/IgniteBinary.java
+++ b/modules/core/src/main/java/org/apache/ignite/IgniteBinary.java
@@ -67,9 +67,7 @@ import org.jetbrains.annotations.Nullable;
  * String field = val.field("myFieldName");
  * </pre>
  * Alternatively, if we have class definitions in the classpath, we may choose to work with deserialized
- * typed objects at all times. In this case we do incur the deserialization cost. However, if
- * {@link org.apache.ignite.marshaller.portable.BinaryMarshaller#isKeepDeserialized()} is {@code true} then Ignite will only deserialize on the first access
- * and will cache the deserialized object, so it does not have to be deserialized again:
+ * typed objects at all times.
  * <pre name=code class=java>
  * IgniteCache&lt;MyKey.class, MyValue.class&gt; cache = grid.cache(null);
  *
@@ -150,8 +148,6 @@ import org.jetbrains.annotations.Nullable;
  * automatically.
  * <h1 class="header">Configuration</h1>
  * By default all your objects are considered as binary and no specific configuration is needed.
- * However, in some cases, like when an object is used by both Java and .Net, you may need to specify portable objects
- * explicitly by calling {@link org.apache.ignite.marshaller.portable.BinaryMarshaller#setClassNames(Collection)}.
  * The only requirement Ignite imposes is that your object has an empty
  * constructor. Note, that since server side does not have to know the class definition,
  * you only need to list portable objects in configuration on the client side. However, if you

http://git-wip-us.apache.org/repos/asf/ignite/blob/eee3b21b/modules/core/src/main/java/org/apache/ignite/binary/BinaryIdMapper.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/binary/BinaryIdMapper.java b/modules/core/src/main/java/org/apache/ignite/binary/BinaryIdMapper.java
index a4a18f6..9aebf85 100644
--- a/modules/core/src/main/java/org/apache/ignite/binary/BinaryIdMapper.java
+++ b/modules/core/src/main/java/org/apache/ignite/binary/BinaryIdMapper.java
@@ -27,7 +27,7 @@ package org.apache.ignite.binary;
  * actually do collide {@code BinaryIdMapper} allows to override the automatically
  * generated hash code IDs for the type and field names.
  * <p>
- * Binary ID mapper can be configured for all binary objects via {@link org.apache.ignite.marshaller.portable.BinaryMarshaller#getIdMapper()} method,
+ * Binary ID mapper can be configured for all binary objects via {@link org.apache.ignite.configuration.BinaryConfiguration#getIdMapper()} method,
  * or for a specific binary type via {@link BinaryTypeConfiguration#getIdMapper()} method.
  */
 public interface BinaryIdMapper {

http://git-wip-us.apache.org/repos/asf/ignite/blob/eee3b21b/modules/core/src/main/java/org/apache/ignite/binary/BinarySerializer.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/binary/BinarySerializer.java b/modules/core/src/main/java/org/apache/ignite/binary/BinarySerializer.java
index 31b3d30..3a8be3e 100644
--- a/modules/core/src/main/java/org/apache/ignite/binary/BinarySerializer.java
+++ b/modules/core/src/main/java/org/apache/ignite/binary/BinarySerializer.java
@@ -23,7 +23,7 @@ package org.apache.ignite.binary;
  * cannot be changed directly.
  * <p>
  * Binary serializer can be configured for all binary objects via
- * {@link org.apache.ignite.marshaller.portable.BinaryMarshaller#getSerializer()} method, or for a specific
+ * {@link org.apache.ignite.configuration.BinaryConfiguration#getSerializer()} method, or for a specific
  * binary type via {@link BinaryTypeConfiguration#getSerializer()} method.
  */
 public interface BinarySerializer {

http://git-wip-us.apache.org/repos/asf/ignite/blob/eee3b21b/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryTypeImpl.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryTypeImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryTypeImpl.java
index 2630a40..5f2a91e 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryTypeImpl.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/portable/BinaryTypeImpl.java
@@ -67,7 +67,9 @@ public class BinaryTypeImpl implements BinaryType {
         return meta.affinityKeyFieldName();
     }
 
-    /** {@inheritDoc} */
+    /**
+     * @return Portable context.
+     */
     public PortableContext context() {
         return ctx;
     }

http://git-wip-us.apache.org/repos/asf/ignite/blob/eee3b21b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/CacheObjectContext.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/CacheObjectContext.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/CacheObjectContext.java
index d49a029..b3d2d4e 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/CacheObjectContext.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/CacheObjectContext.java
@@ -123,12 +123,21 @@ import org.apache.ignite.internal.util.typedef.F;
         return proc;
     }
 
-    /** {@inheritDoc} */
+    /**
+     * @param o Object to unwrap.
+     * @param keepPortable Keep binary flag.
+     * @return Unwrapped object.
+     */
     public Object unwrapPortableIfNeeded(Object o, boolean keepPortable) {
         return unwrapPortableIfNeeded(o, keepPortable, true);
     }
 
-    /** {@inheritDoc} */
+    /**
+     * @param o Object to unwrap.
+     * @param keepPortable Keep binary flag.
+     * @param cpy Copy value flag.
+     * @return Unwrapped object.
+     */
     public Object unwrapPortableIfNeeded(Object o, boolean keepPortable, boolean cpy) {
         if (o == null)
             return null;
@@ -136,12 +145,21 @@ import org.apache.ignite.internal.util.typedef.F;
         return unwrapPortable(o, keepPortable, cpy);
     }
 
-    /** {@inheritDoc} */
+    /**
+     * @param col Collection of objects to unwrap.
+     * @param keepPortable Keep binary flag.
+     * @return Unwrapped collection.
+     */
     public Collection<Object> unwrapPortablesIfNeeded(Collection<Object> col, boolean keepPortable) {
         return unwrapPortablesIfNeeded(col, keepPortable, true);
     }
 
-    /** {@inheritDoc} */
+    /**
+     * @param col Collection to unwrap.
+     * @param keepPortable Keep binary flag.
+     * @param cpy Copy value flag.
+     * @return Unwrapped collection.
+     */
     public Collection<Object> unwrapPortablesIfNeeded(Collection<Object> col, boolean keepPortable, boolean cpy) {
         if (col instanceof ArrayList)
             return unwrapPortables((ArrayList<Object>)col, keepPortable, cpy);

http://git-wip-us.apache.org/repos/asf/ignite/blob/eee3b21b/modules/core/src/main/java/org/apache/ignite/internal/processors/datastructures/GridCacheAtomicLongImpl.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/datastructures/GridCacheAtomicLongImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/datastructures/GridCacheAtomicLongImpl.java
index 4169e5e..64fbcde 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/datastructures/GridCacheAtomicLongImpl.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/datastructures/GridCacheAtomicLongImpl.java
@@ -339,7 +339,11 @@ public final class GridCacheAtomicLongImpl implements GridCacheAtomicLongEx, Ext
         }
     }
 
-    /** {@inheritDoc} */
+    /**
+     * @param expVal Expected value.
+     * @param newVal New value.
+     * @return Old value.
+     */
     public long compareAndSetAndGet(long expVal, long newVal) {
         checkRemoved();
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/eee3b21b/modules/core/src/test/java/org/apache/ignite/internal/portable/BinaryMarshallerSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/portable/BinaryMarshallerSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/portable/BinaryMarshallerSelfTest.java
index 4584575..19c40b3 100644
--- a/modules/core/src/test/java/org/apache/ignite/internal/portable/BinaryMarshallerSelfTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/internal/portable/BinaryMarshallerSelfTest.java
@@ -795,7 +795,7 @@ public class BinaryMarshallerSelfTest extends GridCommonAbstractTest {
         BinaryObjectEx po1 = marshal(obj1, marsh);
 
         assertEquals(11111, po1.typeId());
-        assertEquals(10, po1.field(22222));
+        assertEquals((Integer)10, po1.field(22222));
         assertEquals("str", po1.field(33333));
 
         assertEquals(10, po1.<CustomMappedObject1>deserialize().val1);
@@ -852,7 +852,7 @@ public class BinaryMarshallerSelfTest extends GridCommonAbstractTest {
         BinaryObjectEx po1 = marshal(obj1, marsh);
 
         assertEquals(11111, po1.typeId());
-        assertEquals(10, po1.field(22222));
+        assertEquals((Integer)10, po1.field(22222));
         assertEquals("str1", po1.field(33333));
 
         assertEquals(10, po1.<CustomMappedObject1>deserialize().val1);
@@ -863,7 +863,7 @@ public class BinaryMarshallerSelfTest extends GridCommonAbstractTest {
         BinaryObjectEx po2 = marshal(obj2, marsh);
 
         assertEquals(44444, po2.typeId());
-        assertEquals(20, po2.field(55555));
+        assertEquals((Integer)20, po2.field(55555));
         assertEquals("str2", po2.field(66666));
 
         assertEquals(20, po2.<CustomMappedObject2>deserialize().val1);


[20/25] ignite git commit: IGNITE-1858: .NET: Updated project settings to support VS2010-VS2015.

Posted by ag...@apache.org.
IGNITE-1858: .NET: Updated project settings to support VS2010-VS2015.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/11369331
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/11369331
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/11369331

Branch: refs/heads/ignite-1282
Commit: 11369331955070758162fbbd3904552eef390049
Parents: 512fe6b
Author: Pavel Tupitsyn <pt...@gridgain.com>
Authored: Tue Nov 24 11:57:39 2015 +0300
Committer: vozerov-gridgain <vo...@gridgain.com>
Committed: Tue Nov 24 11:57:39 2015 +0300

----------------------------------------------------------------------
 .../Apache.Ignite.Benchmarks/Apache.Ignite.Benchmarks.csproj | 3 +++
 .../Apache.Ignite.Core.Tests.TestDll.csproj                  | 2 ++
 .../Apache.Ignite.Core.Tests/Apache.Ignite.Core.Tests.csproj | 2 ++
 .../dotnet/Apache.Ignite.Core/Apache.Ignite.Core.csproj      | 2 ++
 .../dotnet/Apache.Ignite.Core/Impl/Events/Events.cs          | 8 +++++---
 modules/platforms/dotnet/Apache.Ignite.sln                   | 6 ++----
 modules/platforms/dotnet/Apache.Ignite/Apache.Ignite.csproj  | 2 ++
 .../Apache.Ignite.Examples/Apache.Ignite.Examples.csproj     | 2 ++
 .../Apache.Ignite.ExamplesDll.csproj                         | 2 ++
 9 files changed, 22 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/11369331/modules/platforms/dotnet/Apache.Ignite.Benchmarks/Apache.Ignite.Benchmarks.csproj
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.Benchmarks/Apache.Ignite.Benchmarks.csproj b/modules/platforms/dotnet/Apache.Ignite.Benchmarks/Apache.Ignite.Benchmarks.csproj
index 14b6466..f4c7894 100644
--- a/modules/platforms/dotnet/Apache.Ignite.Benchmarks/Apache.Ignite.Benchmarks.csproj
+++ b/modules/platforms/dotnet/Apache.Ignite.Benchmarks/Apache.Ignite.Benchmarks.csproj
@@ -19,10 +19,12 @@
     <PlatformTarget>x86</PlatformTarget>
     <OutputPath>bin\x86\Debug\</OutputPath>
     <DefineConstants>DEBUG</DefineConstants>
+    <LangVersion>4</LangVersion>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x86'">
     <PlatformTarget>x86</PlatformTarget>
     <OutputPath>bin\x86\Release\</OutputPath>
+    <Optimize>true</Optimize>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x64'">
     <PlatformTarget>x64</PlatformTarget>
@@ -32,6 +34,7 @@
   <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x64'">
     <PlatformTarget>x64</PlatformTarget>
     <OutputPath>bin\x64\Release\</OutputPath>
+    <Optimize>true</Optimize>
   </PropertyGroup>
   <PropertyGroup>
     <SignAssembly>true</SignAssembly>

http://git-wip-us.apache.org/repos/asf/ignite/blob/11369331/modules/platforms/dotnet/Apache.Ignite.Core.Tests.TestDll/Apache.Ignite.Core.Tests.TestDll.csproj
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.Core.Tests.TestDll/Apache.Ignite.Core.Tests.TestDll.csproj b/modules/platforms/dotnet/Apache.Ignite.Core.Tests.TestDll/Apache.Ignite.Core.Tests.TestDll.csproj
index c333bda..ebc8043 100644
--- a/modules/platforms/dotnet/Apache.Ignite.Core.Tests.TestDll/Apache.Ignite.Core.Tests.TestDll.csproj
+++ b/modules/platforms/dotnet/Apache.Ignite.Core.Tests.TestDll/Apache.Ignite.Core.Tests.TestDll.csproj
@@ -19,6 +19,7 @@
   <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x86'">
     <PlatformTarget>x86</PlatformTarget>
     <OutputPath>bin\x86\Release\</OutputPath>
+    <Optimize>true</Optimize>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x64'">
     <PlatformTarget>x64</PlatformTarget>
@@ -27,6 +28,7 @@
   <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x64'">
     <PlatformTarget>x64</PlatformTarget>
     <OutputPath>bin\x64\Release\</OutputPath>
+    <Optimize>true</Optimize>
   </PropertyGroup>
   <PropertyGroup>
     <SignAssembly>true</SignAssembly>

http://git-wip-us.apache.org/repos/asf/ignite/blob/11369331/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Apache.Ignite.Core.Tests.csproj
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Apache.Ignite.Core.Tests.csproj b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Apache.Ignite.Core.Tests.csproj
index a5bb7a8..72c0210 100644
--- a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Apache.Ignite.Core.Tests.csproj
+++ b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Apache.Ignite.Core.Tests.csproj
@@ -25,6 +25,7 @@
     <PlatformTarget>x64</PlatformTarget>
     <OutputPath>bin\x64\Release\</OutputPath>
     <AllowUnsafeBlocks>true</AllowUnsafeBlocks>
+    <Optimize>true</Optimize>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x86'">
     <PlatformTarget>x86</PlatformTarget>
@@ -36,6 +37,7 @@
     <PlatformTarget>x86</PlatformTarget>
     <OutputPath>bin\x86\Release\</OutputPath>
     <AllowUnsafeBlocks>true</AllowUnsafeBlocks>
+    <Optimize>true</Optimize>
   </PropertyGroup>
   <PropertyGroup>
     <SignAssembly>true</SignAssembly>

http://git-wip-us.apache.org/repos/asf/ignite/blob/11369331/modules/platforms/dotnet/Apache.Ignite.Core/Apache.Ignite.Core.csproj
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.Core/Apache.Ignite.Core.csproj b/modules/platforms/dotnet/Apache.Ignite.Core/Apache.Ignite.Core.csproj
index d7db33e..ae10159 100644
--- a/modules/platforms/dotnet/Apache.Ignite.Core/Apache.Ignite.Core.csproj
+++ b/modules/platforms/dotnet/Apache.Ignite.Core/Apache.Ignite.Core.csproj
@@ -22,6 +22,7 @@
     <PlatformTarget>x64</PlatformTarget>
     <OutputPath>bin\x64\Release\</OutputPath>
     <AllowUnsafeBlocks>true</AllowUnsafeBlocks>
+    <Optimize>true</Optimize>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x86'">
     <PlatformTarget>x86</PlatformTarget>
@@ -33,6 +34,7 @@
     <PlatformTarget>x86</PlatformTarget>
     <OutputPath>bin\x86\Release\</OutputPath>
     <AllowUnsafeBlocks>true</AllowUnsafeBlocks>
+    <Optimize>true</Optimize>
   </PropertyGroup>
   <PropertyGroup>
     <SignAssembly>true</SignAssembly>

http://git-wip-us.apache.org/repos/asf/ignite/blob/11369331/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Events/Events.cs
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Events/Events.cs b/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Events/Events.cs
index 098102a..783ba94 100644
--- a/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Events/Events.cs
+++ b/modules/platforms/dotnet/Apache.Ignite.Core/Impl/Events/Events.cs
@@ -134,8 +134,9 @@ namespace Apache.Ignite.Core.Impl.Events
         {
             AsyncInstance.RemoteQuery(filter, timeout, types);
 
-            return GetFuture((futId, futTyp) => UU.TargetListenFutureForOperation(AsyncInstance.Target, futId, futTyp,
-                (int) Op.RemoteQuery), convertFunc: ReadEvents<T>).Task;
+            // ReSharper disable once RedundantTypeArgumentsOfMethod (won't compile in VS2010)
+            return GetFuture<ICollection<T>>((futId, futTyp) => UU.TargetListenFutureForOperation(AsyncInstance.Target, 
+                futId, futTyp, (int) Op.RemoteQuery), convertFunc: ReadEvents<T>).Task;
         }
 
         /** <inheritDoc /> */
@@ -251,7 +252,8 @@ namespace Apache.Ignite.Core.Impl.Events
             {
                 AsyncInstance.WaitForLocal0(filter, ref hnd, types);
 
-                var fut = GetFuture((futId, futTyp) => UU.TargetListenFutureForOperation(AsyncInstance.Target, futId,
+                // ReSharper disable once RedundantTypeArgumentsOfMethod (won't compile in VS2010)
+                var fut = GetFuture<T>((futId, futTyp) => UU.TargetListenFutureForOperation(AsyncInstance.Target, futId,
                     futTyp, (int) Op.WaitForLocal), convertFunc: reader => (T) EventReader.Read<IEvent>(reader));
 
                 if (filter != null)

http://git-wip-us.apache.org/repos/asf/ignite/blob/11369331/modules/platforms/dotnet/Apache.Ignite.sln
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.sln b/modules/platforms/dotnet/Apache.Ignite.sln
index 6b29e9d..929af73 100644
--- a/modules/platforms/dotnet/Apache.Ignite.sln
+++ b/modules/platforms/dotnet/Apache.Ignite.sln
@@ -1,8 +1,6 @@
 
-Microsoft Visual Studio Solution File, Format Version 12.00
-# Visual Studio 2013
-VisualStudioVersion = 12.0.31101.0
-MinimumVisualStudioVersion = 10.0.40219.1
+Microsoft Visual Studio Solution File, Format Version 11.00
+# Visual Studio 2010
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Apache.Ignite.Core", "Apache.Ignite.Core\Apache.Ignite.Core.csproj", "{4CD2F726-7E2B-46C4-A5BA-057BB82EECB6}"
 EndProject
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Apache.Ignite.Core.Tests", "Apache.Ignite.Core.Tests\Apache.Ignite.Core.Tests.csproj", "{6A62F66C-DA5B-4FBB-8CE7-A95F740FDC7A}"

http://git-wip-us.apache.org/repos/asf/ignite/blob/11369331/modules/platforms/dotnet/Apache.Ignite/Apache.Ignite.csproj
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite/Apache.Ignite.csproj b/modules/platforms/dotnet/Apache.Ignite/Apache.Ignite.csproj
index a58a660..fa7d6cf 100644
--- a/modules/platforms/dotnet/Apache.Ignite/Apache.Ignite.csproj
+++ b/modules/platforms/dotnet/Apache.Ignite/Apache.Ignite.csproj
@@ -19,6 +19,7 @@
   <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x64'">
     <PlatformTarget>x64</PlatformTarget>
     <OutputPath>bin\x64\Release\</OutputPath>
+    <Optimize>true</Optimize>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x86'">
     <PlatformTarget>x86</PlatformTarget>
@@ -27,6 +28,7 @@
   <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x86'">
     <PlatformTarget>x86</PlatformTarget>
     <OutputPath>bin\x86\Release\</OutputPath>
+    <Optimize>true</Optimize>
   </PropertyGroup>
   <PropertyGroup>
     <SignAssembly>true</SignAssembly>

http://git-wip-us.apache.org/repos/asf/ignite/blob/11369331/modules/platforms/dotnet/examples/Apache.Ignite.Examples/Apache.Ignite.Examples.csproj
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/examples/Apache.Ignite.Examples/Apache.Ignite.Examples.csproj b/modules/platforms/dotnet/examples/Apache.Ignite.Examples/Apache.Ignite.Examples.csproj
index e28ab75..a693930 100644
--- a/modules/platforms/dotnet/examples/Apache.Ignite.Examples/Apache.Ignite.Examples.csproj
+++ b/modules/platforms/dotnet/examples/Apache.Ignite.Examples/Apache.Ignite.Examples.csproj
@@ -21,6 +21,7 @@
     <OutputPath>bin\x64\Release\</OutputPath>
     <DebugType>full</DebugType>
     <DebugSymbols>true</DebugSymbols>
+    <Optimize>true</Optimize>
   </PropertyGroup>
   <PropertyGroup>
     <StartupObject>Apache.Ignite.Examples.Compute.TaskExample</StartupObject>
@@ -35,6 +36,7 @@
     <OutputPath>bin\x86\Release\</OutputPath>
     <PlatformTarget>x86</PlatformTarget>
     <CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
+    <Optimize>true</Optimize>
   </PropertyGroup>
   <PropertyGroup>
     <SignAssembly>true</SignAssembly>

http://git-wip-us.apache.org/repos/asf/ignite/blob/11369331/modules/platforms/dotnet/examples/Apache.Ignite.ExamplesDll/Apache.Ignite.ExamplesDll.csproj
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/examples/Apache.Ignite.ExamplesDll/Apache.Ignite.ExamplesDll.csproj b/modules/platforms/dotnet/examples/Apache.Ignite.ExamplesDll/Apache.Ignite.ExamplesDll.csproj
index 60e1ec7..59d2866 100644
--- a/modules/platforms/dotnet/examples/Apache.Ignite.ExamplesDll/Apache.Ignite.ExamplesDll.csproj
+++ b/modules/platforms/dotnet/examples/Apache.Ignite.ExamplesDll/Apache.Ignite.ExamplesDll.csproj
@@ -21,6 +21,7 @@
     <OutputPath>bin\x64\Release\</OutputPath>
     <DebugType>full</DebugType>
     <DebugSymbols>true</DebugSymbols>
+    <Optimize>true</Optimize>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x86'">
     <DebugSymbols>true</DebugSymbols>
@@ -32,6 +33,7 @@
     <OutputPath>bin\x86\Release\</OutputPath>
     <PlatformTarget>x86</PlatformTarget>
     <CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
+    <Optimize>true</Optimize>
   </PropertyGroup>
   <PropertyGroup>
     <SignAssembly>true</SignAssembly>


[18/25] ignite git commit: Merge branch 'ignite-1.5' of https://git-wip-us.apache.org/repos/asf/ignite into ignite-1282

Posted by ag...@apache.org.
Merge branch 'ignite-1.5' of https://git-wip-us.apache.org/repos/asf/ignite into ignite-1282


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/e52b267b
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/e52b267b
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/e52b267b

Branch: refs/heads/ignite-1282
Commit: e52b267bdf09be4b1661fe2d751e5505cac941ad
Parents: 9922d83 dcbfbd2
Author: Alexey Goncharuk <al...@gmail.com>
Authored: Tue Nov 24 11:06:56 2015 +0300
Committer: Alexey Goncharuk <al...@gmail.com>
Committed: Tue Nov 24 11:06:56 2015 +0300

----------------------------------------------------------------------
 .../Continuous/ContinuousQueryAbstractTest.cs   | 26 ++++++++++++--------
 1 file changed, 16 insertions(+), 10 deletions(-)
----------------------------------------------------------------------