You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ignite.apache.org by sb...@apache.org on 2017/11/13 09:51:55 UTC
[01/28] ignite git commit: Fixed test to stop nodes afterTest.
Repository: ignite
Updated Branches:
refs/heads/ignite-zk 246478186 -> 740c3b24f
Fixed test to stop nodes afterTest.
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/c162790d
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/c162790d
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/c162790d
Branch: refs/heads/ignite-zk
Commit: c162790d6205f7b62cbcc47047aa7ad2993cca75
Parents: f52f8f9
Author: sboikov <sb...@gridgain.com>
Authored: Wed Nov 8 14:30:34 2017 +0300
Committer: sboikov <sb...@gridgain.com>
Committed: Wed Nov 8 14:30:34 2017 +0300
----------------------------------------------------------------------
.../spi/discovery/IgniteDiscoveryCacheReuseSelfTest.java | 7 +++++++
1 file changed, 7 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/c162790d/modules/core/src/test/java/org/apache/ignite/spi/discovery/IgniteDiscoveryCacheReuseSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/spi/discovery/IgniteDiscoveryCacheReuseSelfTest.java b/modules/core/src/test/java/org/apache/ignite/spi/discovery/IgniteDiscoveryCacheReuseSelfTest.java
index c238a9a..b5cd9e1 100644
--- a/modules/core/src/test/java/org/apache/ignite/spi/discovery/IgniteDiscoveryCacheReuseSelfTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/spi/discovery/IgniteDiscoveryCacheReuseSelfTest.java
@@ -46,6 +46,13 @@ public class IgniteDiscoveryCacheReuseSelfTest extends GridCommonAbstractTest {
return cfg;
}
+ /** {@inheritDoc} */
+ @Override protected void afterTest() throws Exception {
+ stopAllGrids();
+
+ super.afterTest();
+ }
+
/**
* Tests correct reuse of discovery cache.
*
[22/28] ignite git commit: IGNITE-6859 Don't register "cols"
directive by it's function name.
Posted by sb...@apache.org.
IGNITE-6859 Don't register "cols" directive by it's function name.
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/78a8403e
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/78a8403e
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/78a8403e
Branch: refs/heads/ignite-zk
Commit: 78a8403e77d9ca13180e908627af6943b491b855
Parents: 131d80d
Author: Ilya Borisov <ib...@gridgain.com>
Authored: Mon Nov 13 13:20:03 2017 +0700
Committer: Andrey Novikov <an...@gridgain.com>
Committed: Mon Nov 13 13:20:03 2017 +0700
----------------------------------------------------------------------
.../list-editable/components/list-editable-cols/index.js | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/78a8403e/modules/web-console/frontend/app/components/list-editable/components/list-editable-cols/index.js
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/components/list-editable/components/list-editable-cols/index.js b/modules/web-console/frontend/app/components/list-editable/components/list-editable-cols/index.js
index b7b55f6..e0d4b61 100644
--- a/modules/web-console/frontend/app/components/list-editable/components/list-editable-cols/index.js
+++ b/modules/web-console/frontend/app/components/list-editable/components/list-editable-cols/index.js
@@ -23,6 +23,6 @@ import row from './row.directive.js';
export default angular
.module('list-editable-cols', [
])
- .directive(cols.name, cols)
+ .directive('listEditableCols', cols)
.directive('listEditableItemView', row)
.directive('listEditableItemEdit', row);
[02/28] ignite git commit: IGNITE-6737 Fixed license headers - Fixes
#2999.
Posted by sb...@apache.org.
IGNITE-6737 Fixed license headers - Fixes #2999.
Signed-off-by: Alexey Goncharuk <al...@gmail.com>
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/ca6a0098
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/ca6a0098
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/ca6a0098
Branch: refs/heads/ignite-zk
Commit: ca6a009895ed756d40d21b1b9aa7b0456614314e
Parents: c162790
Author: vd-pyatkov <vp...@gridgain.com>
Authored: Wed Nov 8 15:54:14 2017 +0300
Committer: Alexey Goncharuk <al...@gmail.com>
Committed: Wed Nov 8 15:54:14 2017 +0300
----------------------------------------------------------------------
.../p2p/DeploymentClassLoaderCallableTest.java | 17 +++++++++++++++++
.../ignite/tests/p2p/compute/ExternalCallable.java | 17 +++++++++++++++++
.../tests/p2p/compute/ExternalCallable1.java | 17 +++++++++++++++++
.../tests/p2p/compute/ExternalCallable2.java | 17 +++++++++++++++++
4 files changed, 68 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/ca6a0098/modules/core/src/test/java/org/apache/ignite/p2p/DeploymentClassLoaderCallableTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/p2p/DeploymentClassLoaderCallableTest.java b/modules/core/src/test/java/org/apache/ignite/p2p/DeploymentClassLoaderCallableTest.java
index a9cec68..9c0e446 100644
--- a/modules/core/src/test/java/org/apache/ignite/p2p/DeploymentClassLoaderCallableTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/p2p/DeploymentClassLoaderCallableTest.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package org.apache.ignite.p2p;
import java.lang.reflect.Constructor;
http://git-wip-us.apache.org/repos/asf/ignite/blob/ca6a0098/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable.java
----------------------------------------------------------------------
diff --git a/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable.java b/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable.java
index 16ce493..25f1f3e 100644
--- a/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable.java
+++ b/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package org.apache.ignite.tests.p2p.compute;
import org.apache.ignite.Ignite;
http://git-wip-us.apache.org/repos/asf/ignite/blob/ca6a0098/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable1.java
----------------------------------------------------------------------
diff --git a/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable1.java b/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable1.java
index f7bd8a7..6a6befc 100644
--- a/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable1.java
+++ b/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable1.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package org.apache.ignite.tests.p2p.compute;
import org.apache.ignite.Ignite;
http://git-wip-us.apache.org/repos/asf/ignite/blob/ca6a0098/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable2.java
----------------------------------------------------------------------
diff --git a/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable2.java b/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable2.java
index 7a09f9d..7d1d0f7 100644
--- a/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable2.java
+++ b/modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/compute/ExternalCallable2.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package org.apache.ignite.tests.p2p.compute;
import org.apache.ignite.Ignite;
[14/28] ignite git commit: IGNITE-6841: Increased ODBC protocol
version for multiple statements. This closes #2995
Posted by sb...@apache.org.
IGNITE-6841: Increased ODBC protocol version for multiple statements.
This closes #2995
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/954e47b9
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/954e47b9
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/954e47b9
Branch: refs/heads/ignite-zk
Commit: 954e47b9cf37dc24a7c859f7eda9af41b6d7d1e7
Parents: c11fc41
Author: Igor Sapego <is...@gridgain.com>
Authored: Fri Nov 10 13:12:24 2017 +0300
Committer: Igor Sapego <is...@gridgain.com>
Committed: Fri Nov 10 13:12:24 2017 +0300
----------------------------------------------------------------------
.../processors/odbc/odbc/OdbcConnectionContext.java | 6 +++++-
.../internal/processors/odbc/odbc/OdbcMessageParser.java | 2 +-
modules/platforms/cpp/odbc-test/src/queries_test.cpp | 8 ++++++++
.../cpp/odbc/include/ignite/odbc/protocol_version.h | 9 ++++++++-
modules/platforms/cpp/odbc/src/message.cpp | 2 +-
modules/platforms/cpp/odbc/src/protocol_version.cpp | 4 +++-
6 files changed, 26 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/954e47b9/modules/core/src/main/java/org/apache/ignite/internal/processors/odbc/odbc/OdbcConnectionContext.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/odbc/odbc/OdbcConnectionContext.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/odbc/odbc/OdbcConnectionContext.java
index 88a2e0f..6a922bf 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/odbc/odbc/OdbcConnectionContext.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/odbc/odbc/OdbcConnectionContext.java
@@ -40,8 +40,11 @@ public class OdbcConnectionContext implements ClientListenerConnectionContext {
/** Version 2.3.0: added "skipReducerOnUpdate" flag. */
public static final ClientListenerProtocolVersion VER_2_3_0 = ClientListenerProtocolVersion.create(2, 3, 0);
+ /** Version 2.3.2: added multiple statements support. */
+ public static final ClientListenerProtocolVersion VER_2_3_2 = ClientListenerProtocolVersion.create(2, 3, 2);
+
/** Current version. */
- private static final ClientListenerProtocolVersion CURRENT_VER = VER_2_3_0;
+ private static final ClientListenerProtocolVersion CURRENT_VER = VER_2_3_2;
/** Supported versions. */
private static final Set<ClientListenerProtocolVersion> SUPPORTED_VERS = new HashSet<>();
@@ -63,6 +66,7 @@ public class OdbcConnectionContext implements ClientListenerConnectionContext {
static {
SUPPORTED_VERS.add(CURRENT_VER);
+ SUPPORTED_VERS.add(VER_2_3_0);
SUPPORTED_VERS.add(VER_2_1_5);
SUPPORTED_VERS.add(VER_2_1_0);
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/954e47b9/modules/core/src/main/java/org/apache/ignite/internal/processors/odbc/odbc/OdbcMessageParser.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/odbc/odbc/OdbcMessageParser.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/odbc/odbc/OdbcMessageParser.java
index bf74bc5..fb17d2a 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/odbc/odbc/OdbcMessageParser.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/odbc/odbc/OdbcMessageParser.java
@@ -362,7 +362,7 @@ public class OdbcMessageParser implements ClientListenerMessageParser {
* @param affectedRows Affected rows.
*/
private void writeAffectedRows(BinaryWriterExImpl writer, Collection<Long> affectedRows) {
- if (ver.compareTo(OdbcConnectionContext.VER_2_3_0) < 0) {
+ if (ver.compareTo(OdbcConnectionContext.VER_2_3_2) < 0) {
long summ = 0;
for (Long value : affectedRows)
http://git-wip-us.apache.org/repos/asf/ignite/blob/954e47b9/modules/platforms/cpp/odbc-test/src/queries_test.cpp
----------------------------------------------------------------------
diff --git a/modules/platforms/cpp/odbc-test/src/queries_test.cpp b/modules/platforms/cpp/odbc-test/src/queries_test.cpp
index 0b0bf82..6fcf7c9 100644
--- a/modules/platforms/cpp/odbc-test/src/queries_test.cpp
+++ b/modules/platforms/cpp/odbc-test/src/queries_test.cpp
@@ -771,6 +771,14 @@ BOOST_AUTO_TEST_CASE(TestConnectionProtocolVersion_2_3_0)
InsertTestBatch(11, 20, 9);
}
+BOOST_AUTO_TEST_CASE(TestConnectionProtocolVersion_2_3_2)
+{
+ Connect("DRIVER={Apache Ignite};ADDRESS=127.0.0.1:11110;SCHEMA=cache;PROTOCOL_VERSION=2.3.2");
+
+ InsertTestStrings(10, false);
+ InsertTestBatch(11, 20, 9);
+}
+
BOOST_AUTO_TEST_CASE(TestTwoRowsInt8)
{
CheckTwoRowsInt<signed char>(SQL_C_STINYINT);
http://git-wip-us.apache.org/repos/asf/ignite/blob/954e47b9/modules/platforms/cpp/odbc/include/ignite/odbc/protocol_version.h
----------------------------------------------------------------------
diff --git a/modules/platforms/cpp/odbc/include/ignite/odbc/protocol_version.h b/modules/platforms/cpp/odbc/include/ignite/odbc/protocol_version.h
index e6088a7..f39c11e 100644
--- a/modules/platforms/cpp/odbc/include/ignite/odbc/protocol_version.h
+++ b/modules/platforms/cpp/odbc/include/ignite/odbc/protocol_version.h
@@ -31,11 +31,18 @@ namespace ignite
class ProtocolVersion
{
public:
- /** Current protocol version. */
+ /** Version 2.1.0. */
static const ProtocolVersion VERSION_2_1_0;
+
+ /** Version 2.1.5: added "lazy" flag. */
static const ProtocolVersion VERSION_2_1_5;
+
+ /** Version 2.3.0: added "skipReducerOnUpdate" flag. */
static const ProtocolVersion VERSION_2_3_0;
+ /** Version 2.3.2: added multiple statements support. */
+ static const ProtocolVersion VERSION_2_3_2;
+
typedef std::set<ProtocolVersion> VersionSet;
/**
http://git-wip-us.apache.org/repos/asf/ignite/blob/954e47b9/modules/platforms/cpp/odbc/src/message.cpp
----------------------------------------------------------------------
diff --git a/modules/platforms/cpp/odbc/src/message.cpp b/modules/platforms/cpp/odbc/src/message.cpp
index 5595ddb..32a5d91 100644
--- a/modules/platforms/cpp/odbc/src/message.cpp
+++ b/modules/platforms/cpp/odbc/src/message.cpp
@@ -28,7 +28,7 @@ namespace
{
affectedRows.clear();
- if (protocolVersion < ProtocolVersion::VERSION_2_3_0)
+ if (protocolVersion < ProtocolVersion::VERSION_2_3_2)
affectedRows.push_back(reader.ReadInt64());
else
{
http://git-wip-us.apache.org/repos/asf/ignite/blob/954e47b9/modules/platforms/cpp/odbc/src/protocol_version.cpp
----------------------------------------------------------------------
diff --git a/modules/platforms/cpp/odbc/src/protocol_version.cpp b/modules/platforms/cpp/odbc/src/protocol_version.cpp
index b0b9121..d7d85ad 100644
--- a/modules/platforms/cpp/odbc/src/protocol_version.cpp
+++ b/modules/platforms/cpp/odbc/src/protocol_version.cpp
@@ -29,11 +29,13 @@ namespace ignite
const ProtocolVersion ProtocolVersion::VERSION_2_1_0(2, 1, 0);
const ProtocolVersion ProtocolVersion::VERSION_2_1_5(2, 1, 5);
const ProtocolVersion ProtocolVersion::VERSION_2_3_0(2, 3, 0);
+ const ProtocolVersion ProtocolVersion::VERSION_2_3_2(2, 3, 2);
ProtocolVersion::VersionSet::value_type supportedArray[] = {
ProtocolVersion::VERSION_2_1_0,
ProtocolVersion::VERSION_2_1_5,
ProtocolVersion::VERSION_2_3_0,
+ ProtocolVersion::VERSION_2_3_2,
};
const ProtocolVersion::VersionSet ProtocolVersion::supported(supportedArray,
@@ -62,7 +64,7 @@ namespace ignite
const ProtocolVersion& ProtocolVersion::GetCurrent()
{
- return VERSION_2_3_0;
+ return VERSION_2_3_2;
}
void ThrowParseError()
[27/28] ignite git commit: Merge remote-tracking branch
'remotes/origin/master' into ignite-zk
Posted by sb...@apache.org.
Merge remote-tracking branch 'remotes/origin/master' into ignite-zk
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/2b75ecfb
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/2b75ecfb
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/2b75ecfb
Branch: refs/heads/ignite-zk
Commit: 2b75ecfb9f1f490dbab907efd3aab8db9622b09c
Parents: 2464781 85027e7
Author: sboikov <sb...@gridgain.com>
Authored: Mon Nov 13 12:01:23 2017 +0300
Committer: sboikov <sb...@gridgain.com>
Committed: Mon Nov 13 12:01:23 2017 +0300
----------------------------------------------------------------------
.gitignore | 2 +
examples/pom.xml | 5 +
.../examples/ml/math/trees/MNISTExample.java | 261 +++++
.../examples/ml/math/trees/package-info.java | 22 +
.../DummyPersistenceCompatibilityTest.java | 225 +++-
.../FoldersReuseCompatibilityTest.java | 48 +-
.../java/org/apache/ignite/cache/CacheMode.java | 2 +-
.../eviction/AbstractEvictionPolicyFactory.java | 104 ++
.../fifo/FifoEvictionPolicyFactory.java | 72 ++
.../eviction/lru/LruEvictionPolicyFactory.java | 72 ++
.../sorted/SortedEvictionPolicyFactory.java | 98 ++
.../configuration/CacheConfiguration.java | 42 +-
.../configuration/NearCacheConfiguration.java | 37 +
.../processors/cache/CacheGroupContext.java | 2 +-
.../processors/cache/ClusterCachesInfo.java | 7 +
.../cache/DynamicCacheChangeRequest.java | 11 +-
.../processors/cache/GridCacheAttributes.java | 19 +
.../cache/GridCacheEvictionManager.java | 15 +-
.../processors/cache/GridCacheProcessor.java | 26 +-
.../processors/cache/GridCacheUtils.java | 1 +
.../cache/IgniteCacheOffheapManager.java | 2 +-
.../distributed/near/GridNearCacheEntry.java | 2 +-
.../persistence/snapshot/SnapshotOperation.java | 7 +-
.../wal/reader/StandaloneGridKernalContext.java | 2 +-
.../store/GridCacheStoreManagerAdapter.java | 94 +-
.../processors/igfs/IgfsHelperImpl.java | 8 +-
.../internal/processors/igfs/IgfsImpl.java | 4 +-
.../odbc/odbc/OdbcConnectionContext.java | 6 +-
.../processors/odbc/odbc/OdbcMessageParser.java | 2 +-
.../apache/ignite/internal/sql/SqlKeyword.java | 237 ++++
.../apache/ignite/internal/sql/SqlLexer.java | 219 ++++
.../internal/sql/SqlLexerLookAheadToken.java | 75 ++
.../ignite/internal/sql/SqlLexerToken.java | 48 +
.../ignite/internal/sql/SqlLexerTokenType.java | 112 ++
.../ignite/internal/sql/SqlParseException.java | 99 ++
.../apache/ignite/internal/sql/SqlParser.java | 181 +++
.../ignite/internal/sql/SqlParserUtils.java | 356 ++++++
.../ignite/internal/sql/command/SqlCommand.java | 43 +
.../sql/command/SqlCreateIndexCommand.java | 214 ++++
.../sql/command/SqlDropIndexCommand.java | 80 ++
.../internal/sql/command/SqlIndexColumn.java | 61 +
.../internal/sql/command/SqlQualifiedName.java | 70 ++
.../ignite/internal/util/nio/GridNioServer.java | 8 +-
.../cache/VisorCacheNearConfiguration.java | 1 +
.../communication/tcp/TcpCommunicationSpi.java | 37 +-
...istenerRWThroughDisabledAtomicCacheTest.java | 33 +
...RWThroughDisabledTransactionalCacheTest.java | 138 +++
...ionListenerReadWriteThroughDisabledTest.java | 291 +++++
...reSessionListenerWriteBehindEnabledTest.java | 304 +++++
.../IgniteCommunicationBalanceTest.java | 13 +
.../IgniteCommunicationSslBalanceTest.java | 28 +
...idCacheConfigurationConsistencySelfTest.java | 52 +
.../near/GridCacheNearClientHitTest.java | 154 +++
.../GridCacheNearEvictionEventSelfTest.java | 5 -
.../EvictionPolicyFactoryAbstractTest.java | 1073 ++++++++++++++++++
.../fifo/FifoEvictionPolicyFactorySelfTest.java | 261 +++++
.../lru/LruEvictionPolicyFactorySelfTest.java | 352 ++++++
.../SortedEvictionPolicyFactorySelfTest.java | 264 +++++
.../db/wal/reader/IgniteWalReaderTest.java | 106 +-
.../internal/sql/SqlParserAbstractSelfTest.java | 46 +
.../sql/SqlParserCreateIndexSelfTest.java | 182 +++
.../sql/SqlParserDropIndexSelfTest.java | 99 ++
.../p2p/DeploymentClassLoaderCallableTest.java | 17 +
...ommunicationSpiHalfOpenedConnectionTest.java | 142 +++
.../IgniteDiscoveryCacheReuseSelfTest.java | 7 +
.../IgniteCacheEvictionSelfTestSuite.java | 6 +
.../ignite/testsuites/IgniteCacheTestSuite.java | 2 +
.../testsuites/IgniteCacheTestSuite2.java | 3 +-
.../testsuites/IgniteCacheTestSuite4.java | 8 +-
.../IgniteSpiCommunicationSelfTestSuite.java | 2 +
.../tests/p2p/compute/ExternalCallable.java | 17 +
.../tests/p2p/compute/ExternalCallable1.java | 17 +
.../tests/p2p/compute/ExternalCallable2.java | 17 +
.../processors/query/h2/IgniteH2Indexing.java | 67 +-
.../query/h2/ddl/DdlStatementsProcessor.java | 98 +-
.../IgniteCacheQuerySelfTestSuite.java | 5 +
modules/ml/licenses/netlib-java-bsd3.txt | 51 +
modules/ml/pom.xml | 12 +-
.../main/java/org/apache/ignite/ml/Model.java | 4 +-
.../main/java/org/apache/ignite/ml/Trainer.java | 30 +
.../clustering/KMeansDistributedClusterer.java | 19 +-
.../apache/ignite/ml/estimators/Estimators.java | 50 +
.../ignite/ml/estimators/package-info.java | 22 +
.../ignite/ml/math/distributed/CacheUtils.java | 192 +++-
.../math/distributed/keys/MatrixCacheKey.java | 6 +-
.../distributed/keys/impl/BlockMatrixKey.java | 17 +-
.../distributed/keys/impl/SparseMatrixKey.java | 59 +-
.../ignite/ml/math/functions/Functions.java | 38 +
.../ml/math/functions/IgniteBinaryOperator.java | 29 +
.../math/functions/IgniteCurriedBiFunction.java | 29 +
.../ml/math/functions/IgniteSupplier.java | 30 +
.../math/functions/IgniteToDoubleFunction.java | 25 +
.../matrix/SparseBlockDistributedMatrix.java | 4 +-
.../impls/matrix/SparseDistributedMatrix.java | 3 +-
.../storage/matrix/BlockMatrixStorage.java | 12 +-
.../impls/storage/matrix/MapWrapperStorage.java | 18 +-
.../matrix/SparseDistributedMatrixStorage.java | 17 +-
.../ml/math/impls/vector/MapWrapperVector.java | 8 +
.../ignite/ml/structures/LabeledVector.java | 63 +
.../ml/structures/LabeledVectorDouble.java | 46 +
.../ignite/ml/structures/package-info.java | 22 +
.../ignite/ml/trees/CategoricalRegionInfo.java | 72 ++
.../ignite/ml/trees/CategoricalSplitInfo.java | 68 ++
.../ignite/ml/trees/ContinuousRegionInfo.java | 74 ++
.../ml/trees/ContinuousSplitCalculator.java | 50 +
.../org/apache/ignite/ml/trees/RegionInfo.java | 62 +
.../ml/trees/models/DecisionTreeModel.java | 44 +
.../ignite/ml/trees/models/package-info.java | 22 +
.../ml/trees/nodes/CategoricalSplitNode.java | 50 +
.../ml/trees/nodes/ContinuousSplitNode.java | 56 +
.../ignite/ml/trees/nodes/DecisionTreeNode.java | 33 +
.../org/apache/ignite/ml/trees/nodes/Leaf.java | 49 +
.../apache/ignite/ml/trees/nodes/SplitNode.java | 100 ++
.../ignite/ml/trees/nodes/package-info.java | 22 +
.../apache/ignite/ml/trees/package-info.java | 22 +
.../ml/trees/trainers/columnbased/BiIndex.java | 113 ++
...exedCacheColumnDecisionTreeTrainerInput.java | 57 +
.../CacheColumnDecisionTreeTrainerInput.java | 142 +++
.../columnbased/ColumnDecisionTreeTrainer.java | 557 +++++++++
.../ColumnDecisionTreeTrainerInput.java | 55 +
.../MatrixColumnDecisionTreeTrainerInput.java | 82 ++
.../trainers/columnbased/RegionProjection.java | 109 ++
.../trainers/columnbased/TrainingContext.java | 166 +++
.../columnbased/caches/ContextCache.java | 68 ++
.../columnbased/caches/FeaturesCache.java | 151 +++
.../columnbased/caches/ProjectionsCache.java | 284 +++++
.../trainers/columnbased/caches/SplitCache.java | 206 ++++
.../ContinuousSplitCalculators.java | 34 +
.../contsplitcalcs/GiniSplitCalculator.java | 234 ++++
.../contsplitcalcs/VarianceSplitCalculator.java | 179 +++
.../contsplitcalcs/package-info.java | 22 +
.../trainers/columnbased/package-info.java | 22 +
.../columnbased/regcalcs/RegionCalculators.java | 85 ++
.../columnbased/regcalcs/package-info.java | 22 +
.../vectors/CategoricalFeatureProcessor.java | 211 ++++
.../vectors/ContinuousFeatureProcessor.java | 111 ++
.../vectors/ContinuousSplitInfo.java | 71 ++
.../columnbased/vectors/FeatureProcessor.java | 81 ++
.../vectors/FeatureVectorProcessorUtils.java | 57 +
.../columnbased/vectors/SampleInfo.java | 80 ++
.../trainers/columnbased/vectors/SplitInfo.java | 106 ++
.../columnbased/vectors/package-info.java | 22 +
.../org/apache/ignite/ml/util/MnistUtils.java | 121 ++
.../java/org/apache/ignite/ml/util/Utils.java | 53 +
.../org/apache/ignite/ml/IgniteMLTestSuite.java | 4 +-
.../java/org/apache/ignite/ml/TestUtils.java | 15 +
.../ml/clustering/ClusteringTestSuite.java | 3 +-
.../KMeansDistributedClustererTest.java | 197 ----
...KMeansDistributedClustererTestMultiNode.java | 146 +++
...MeansDistributedClustererTestSingleNode.java | 197 ++++
.../SparseDistributedBlockMatrixTest.java | 1 +
.../ml/math/impls/vector/CacheVectorTest.java | 10 +-
.../math/impls/vector/VectorToMatrixTest.java | 3 -
.../ignite/ml/trees/BaseDecisionTreeTest.java | 70 ++
.../ml/trees/ColumnDecisionTreeTrainerTest.java | 190 ++++
.../ignite/ml/trees/DecisionTreesTestSuite.java | 33 +
.../ml/trees/GiniSplitCalculatorTest.java | 141 +++
.../ignite/ml/trees/SplitDataGenerator.java | 390 +++++++
.../ml/trees/VarianceSplitCalculatorTest.java | 84 ++
.../ColumnDecisionTreeTrainerBenchmark.java | 455 ++++++++
.../trees/columntrees.manualrun.properties | 22 +
.../cpp/odbc-test/src/queries_test.cpp | 8 +
.../odbc/include/ignite/odbc/protocol_version.h | 9 +-
modules/platforms/cpp/odbc/src/message.cpp | 2 +-
.../platforms/cpp/odbc/src/protocol_version.cpp | 4 +-
.../ApiParity/CacheConfigurationParityTest.cs | 3 +-
.../Cache/Store/CacheStoreSessionTest.cs | 13 +-
.../components/list-editable-cols/index.js | 2 +-
.../components/pcbScaleNumber.js | 1 +
.../mixins/pcb-form-field-size.pug | 1 +
.../ui-ace-docker/ui-ace-docker.controller.js | 26 +-
.../ui-ace-java/ui-ace-java.controller.js | 150 +--
.../ui-ace-pojos/ui-ace-pojos.controller.js | 148 +--
.../ui-ace-pom/ui-ace-pom.controller.js | 26 +-
.../ui-ace-sharp/ui-ace-sharp.controller.js | 12 +-
.../ui-ace-spring/ui-ace-spring.controller.js | 146 +--
.../app/modules/dialog/dialog.controller.js | 26 +-
.../app/modules/form/field/down.directive.js | 16 +-
.../app/modules/form/field/up.directive.js | 14 +-
.../app/modules/form/group/add.directive.js | 24 +-
.../app/modules/form/group/tooltip.directive.js | 24 +-
.../app/modules/navbar/userbar.directive.js | 32 +-
modules/web-console/frontend/package.json | 31 +-
.../org/apache/ignite/yardstick/IgniteNode.java | 6 +-
184 files changed, 13752 insertions(+), 812 deletions(-)
----------------------------------------------------------------------
[24/28] ignite git commit: Fixed NPE on node stop when SSL is used.
Posted by sb...@apache.org.
Fixed NPE on node stop when SSL is used.
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/132ec3fa
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/132ec3fa
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/132ec3fa
Branch: refs/heads/ignite-zk
Commit: 132ec3fa4f3e55f001899f4eef3cdf3786cefa41
Parents: 250ceb7
Author: amashenkov <an...@gmail.com>
Authored: Mon Nov 13 10:26:53 2017 +0300
Committer: sboikov <sb...@gridgain.com>
Committed: Mon Nov 13 10:26:53 2017 +0300
----------------------------------------------------------------------
.../ignite/internal/util/nio/GridNioServer.java | 8 ++++--
.../IgniteCommunicationBalanceTest.java | 13 +++++++++
.../IgniteCommunicationSslBalanceTest.java | 28 ++++++++++++++++++++
.../ignite/testsuites/IgniteCacheTestSuite.java | 2 ++
4 files changed, 49 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/132ec3fa/modules/core/src/main/java/org/apache/ignite/internal/util/nio/GridNioServer.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/util/nio/GridNioServer.java b/modules/core/src/main/java/org/apache/ignite/internal/util/nio/GridNioServer.java
index 0dd7dd6..1d595d2 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/util/nio/GridNioServer.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/util/nio/GridNioServer.java
@@ -3371,8 +3371,12 @@ public class GridNioServer<T> {
GridSelectorNioSessionImpl ses0 = (GridSelectorNioSessionImpl)ses;
- if (!ses0.procWrite.get() && ses0.procWrite.compareAndSet(false, true))
- ses0.worker().registerWrite(ses0);
+ if (!ses0.procWrite.get() && ses0.procWrite.compareAndSet(false, true)) {
+ GridNioWorker worker = ses0.worker();
+
+ if (worker != null)
+ worker.registerWrite(ses0);
+ }
return null;
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/132ec3fa/modules/core/src/test/java/org/apache/ignite/internal/managers/communication/IgniteCommunicationBalanceTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/managers/communication/IgniteCommunicationBalanceTest.java b/modules/core/src/test/java/org/apache/ignite/internal/managers/communication/IgniteCommunicationBalanceTest.java
index 37a307f..666bc1d 100644
--- a/modules/core/src/test/java/org/apache/ignite/internal/managers/communication/IgniteCommunicationBalanceTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/internal/managers/communication/IgniteCommunicationBalanceTest.java
@@ -72,10 +72,20 @@ public class IgniteCommunicationBalanceTest extends GridCommonAbstractTest {
cfg.setClientMode(client);
+ if (sslEnabled())
+ cfg.setSslContextFactory(GridTestUtils.sslFactory());
+
return cfg;
}
/**
+ * @return {@code True} to enable SSL.
+ */
+ protected boolean sslEnabled() {
+ return false;
+ }
+
+ /**
* @return Value for {@link TcpCommunicationSpi#setUsePairedConnections(boolean)}.
*/
protected boolean usePairedConnections() {
@@ -100,6 +110,9 @@ public class IgniteCommunicationBalanceTest extends GridCommonAbstractTest {
* @throws Exception If failed.
*/
public void testBalance1() throws Exception {
+ if (sslEnabled())
+ return;
+
System.setProperty(IgniteSystemProperties.IGNITE_IO_BALANCE_PERIOD, "5000");
try {
http://git-wip-us.apache.org/repos/asf/ignite/blob/132ec3fa/modules/core/src/test/java/org/apache/ignite/internal/managers/communication/IgniteCommunicationSslBalanceTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/managers/communication/IgniteCommunicationSslBalanceTest.java b/modules/core/src/test/java/org/apache/ignite/internal/managers/communication/IgniteCommunicationSslBalanceTest.java
new file mode 100644
index 0000000..68094e2
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/internal/managers/communication/IgniteCommunicationSslBalanceTest.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.managers.communication;
+
+/**
+ *
+ */
+public class IgniteCommunicationSslBalanceTest extends IgniteCommunicationBalanceTest {
+ /** {@inheritDoc} */
+ @Override protected boolean sslEnabled() {
+ return true;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/132ec3fa/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java
index 047550d..e3ebbc1 100755
--- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java
+++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite.java
@@ -38,6 +38,7 @@ import org.apache.ignite.internal.managers.IgniteDiagnosticMessagesTest;
import org.apache.ignite.internal.managers.communication.IgniteCommunicationBalanceMultipleConnectionsTest;
import org.apache.ignite.internal.managers.communication.IgniteCommunicationBalancePairedConnectionsTest;
import org.apache.ignite.internal.managers.communication.IgniteCommunicationBalanceTest;
+import org.apache.ignite.internal.managers.communication.IgniteCommunicationSslBalanceTest;
import org.apache.ignite.internal.managers.communication.IgniteIoTestMessagesTest;
import org.apache.ignite.internal.managers.communication.IgniteVariousConnectionNumberTest;
import org.apache.ignite.internal.processors.cache.CacheAffinityCallSelfTest;
@@ -309,6 +310,7 @@ public class IgniteCacheTestSuite extends TestSuite {
suite.addTestSuite(IgniteCommunicationBalanceTest.class);
suite.addTestSuite(IgniteCommunicationBalancePairedConnectionsTest.class);
suite.addTestSuite(IgniteCommunicationBalanceMultipleConnectionsTest.class);
+ suite.addTestSuite(IgniteCommunicationSslBalanceTest.class);
suite.addTestSuite(IgniteIoTestMessagesTest.class);
suite.addTestSuite(IgniteDiagnosticMessagesTest.class);
suite.addTestSuite(IgniteDiagnosticMessagesMultipleConnectionsTest.class);
[26/28] ignite git commit: IGNITE-6767 Reset non-valid ner cache entry
Posted by sb...@apache.org.
IGNITE-6767 Reset non-valid ner cache entry
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/85027e7f
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/85027e7f
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/85027e7f
Branch: refs/heads/ignite-zk
Commit: 85027e7fa348e9b210d6fa24add6f0b86d6d262f
Parents: 191295d
Author: Tim Onyschak <to...@trustwave.com>
Authored: Mon Nov 13 10:47:30 2017 +0300
Committer: sboikov <sb...@gridgain.com>
Committed: Mon Nov 13 10:47:30 2017 +0300
----------------------------------------------------------------------
.../distributed/near/GridNearCacheEntry.java | 2 +-
.../near/GridCacheNearClientHitTest.java | 154 +++++++++++++++++++
.../testsuites/IgniteCacheTestSuite2.java | 3 +-
3 files changed, 157 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/85027e7f/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearCacheEntry.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearCacheEntry.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearCacheEntry.java
index ea52766..baf117b 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearCacheEntry.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/near/GridNearCacheEntry.java
@@ -379,7 +379,7 @@ public class GridNearCacheEntry extends GridDistributedCacheEntry {
CacheObject old = this.val;
boolean hasVal = hasValueUnlocked();
- if (this.dhtVer == null || this.dhtVer.compareTo(dhtVer) < 0) {
+ if (this.dhtVer == null || this.dhtVer.compareTo(dhtVer) < 0 || !valid(topVer)) {
primaryNode(primaryNodeId, topVer);
update(val, expireTime, ttl, ver, true);
http://git-wip-us.apache.org/repos/asf/ignite/blob/85027e7f/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheNearClientHitTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheNearClientHitTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheNearClientHitTest.java
new file mode 100644
index 0000000..1dd62e4
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheNearClientHitTest.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.cache.distributed.near;
+
+import java.util.UUID;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.cache.CacheAtomicityMode;
+import org.apache.ignite.cache.CacheMode;
+import org.apache.ignite.cache.eviction.lru.LruEvictionPolicy;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.configuration.IgniteConfiguration;
+import org.apache.ignite.configuration.NearCacheConfiguration;
+import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+
+import static org.apache.ignite.cache.CachePeekMode.NEAR;
+
+/**
+ *
+ */
+public class GridCacheNearClientHitTest extends GridCommonAbstractTest {
+ /** Ip finder. */
+ private final static TcpDiscoveryVmIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
+
+ /** */
+ private final static String CACHE_NAME = "test-near-cache";
+
+ /** {@inheritDoc} */
+ @Override protected IgniteConfiguration getConfiguration(final String igniteInstanceName) throws Exception {
+ final IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
+
+ ((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(ipFinder);
+
+ return cfg;
+ }
+
+ /**
+ * @param igniteInstanceName Node name.
+ * @return Configuration.
+ * @throws Exception If failed.
+ */
+ private IgniteConfiguration getClientConfiguration(final String igniteInstanceName) throws Exception {
+ final IgniteConfiguration cfg = getConfiguration(igniteInstanceName);
+
+ cfg.setClientMode(true);
+
+ return cfg;
+ }
+
+ /**
+ * @return Cache configuration.
+ */
+ private CacheConfiguration<Object, Object> cacheConfiguration() {
+ CacheConfiguration<Object, Object> cfg = new CacheConfiguration<>();
+
+ cfg.setAtomicityMode(CacheAtomicityMode.ATOMIC);
+
+ cfg.setCacheMode(CacheMode.PARTITIONED);
+
+ cfg.setBackups(1);
+
+ cfg.setCopyOnRead(false);
+
+ cfg.setName(CACHE_NAME);
+
+ return cfg;
+ }
+
+ /**
+ * @return Near cache configuration.
+ */
+ private NearCacheConfiguration<Object, Object> nearCacheConfiguration() {
+ NearCacheConfiguration<Object, Object> cfg = new NearCacheConfiguration<>();
+
+ cfg.setNearEvictionPolicy(new LruEvictionPolicy<>(25000));
+
+ return cfg;
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testLocalPeekAfterPrimaryNodeLeft() throws Exception {
+ try {
+ Ignite crd = startGrid("coordinator", getConfiguration("coordinator"));
+
+ Ignite client = startGrid("client", getClientConfiguration("client"));
+
+ Ignite srvNode = startGrid("server", getConfiguration("server"));
+
+ awaitPartitionMapExchange();
+
+ IgniteCache<Object, Object> cache = srvNode.getOrCreateCache(cacheConfiguration());
+
+ IgniteCache<Object, Object> nearCache = client.createNearCache(CACHE_NAME, nearCacheConfiguration());
+
+ UUID serverNodeId = srvNode.cluster().localNode().id();
+
+ int remoteKey = 0;
+ for (; ; remoteKey++) {
+ if (crd.affinity(CACHE_NAME).mapKeyToNode(remoteKey).id().equals(serverNodeId))
+ break;
+ }
+
+ cache.put(remoteKey, remoteKey);
+
+ Object value = nearCache.localPeek(remoteKey, NEAR);
+
+ assertNull("The value should not be loaded from a remote node.", value);
+
+ nearCache.get(remoteKey);
+
+ value = nearCache.localPeek(remoteKey, NEAR);
+
+ assertNotNull("The returned value should not be null.", value);
+
+ srvNode.close();
+
+ awaitPartitionMapExchange();
+
+ value = nearCache.localPeek(remoteKey, NEAR);
+
+ assertNull("The value should not be loaded from a remote node.", value);
+
+ value = nearCache.get(remoteKey);
+
+ assertNotNull("The value should be loaded from a remote node.", value);
+
+ value = nearCache.localPeek(remoteKey, NEAR);
+
+ assertNotNull("The returned value should not be null.", value);
+ }
+ finally {
+ stopAllGrids();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/85027e7f/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite2.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite2.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite2.java
index 6f5b710..5ce213e 100644
--- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite2.java
+++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite2.java
@@ -87,6 +87,7 @@ import org.apache.ignite.internal.processors.cache.distributed.dht.IgniteCachePa
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheAtomicNearEvictionEventSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheAtomicNearMultiNodeSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheAtomicNearReadersSelfTest;
+import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearClientHitTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearEvictionEventSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearJobExecutionSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearMultiGetSelfTest;
@@ -242,7 +243,7 @@ public class IgniteCacheTestSuite2 extends TestSuite {
suite.addTest(new TestSuite(GridCacheNearPartitionedClearSelfTest.class));
suite.addTest(new TestSuite(GridCacheOffheapUpdateSelfTest.class));
-
+ suite.addTest(new TestSuite(GridCacheNearClientHitTest.class));
suite.addTest(new TestSuite(GridCacheNearPrimarySyncSelfTest.class));
suite.addTest(new TestSuite(GridCacheColocatedPrimarySyncSelfTest.class));
[12/28] ignite git commit: Updated javadocs for some of the internal
cache classes - Fixes #3011.
Posted by sb...@apache.org.
Updated javadocs for some of the internal cache classes - Fixes #3011.
Signed-off-by: Alexey Goncharuk <al...@gmail.com>
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/291b166f
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/291b166f
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/291b166f
Branch: refs/heads/ignite-zk
Commit: 291b166f29b177f67cf810ebf9567221566ab5db
Parents: 20ec6c9
Author: dpavlov <dp...@gridgain.com>
Authored: Fri Nov 10 11:09:38 2017 +0300
Committer: Alexey Goncharuk <al...@gmail.com>
Committed: Fri Nov 10 11:09:38 2017 +0300
----------------------------------------------------------------------
.../internal/processors/cache/CacheGroupContext.java | 2 +-
.../processors/cache/DynamicCacheChangeRequest.java | 11 +++++++----
.../internal/processors/cache/GridCacheProcessor.java | 7 ++++++-
.../processors/cache/IgniteCacheOffheapManager.java | 2 +-
.../cache/persistence/snapshot/SnapshotOperation.java | 7 ++++---
5 files changed, 19 insertions(+), 10 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/291b166f/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/CacheGroupContext.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/CacheGroupContext.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/CacheGroupContext.java
index d9523e3..ad4bbe3 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/CacheGroupContext.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/CacheGroupContext.java
@@ -673,7 +673,7 @@ public class CacheGroupContext {
/**
* @param cctx Cache context.
- * @param destroy Destroy flag.
+ * @param destroy Destroy data flag. Setting to <code>true</code> will remove all cache data.
*/
void stopCache(GridCacheContext cctx, boolean destroy) {
if (top != null)
http://git-wip-us.apache.org/repos/asf/ignite/blob/291b166f/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/DynamicCacheChangeRequest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/DynamicCacheChangeRequest.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/DynamicCacheChangeRequest.java
index 3ee5903..2b942b0 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/DynamicCacheChangeRequest.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/DynamicCacheChangeRequest.java
@@ -71,7 +71,7 @@ public class DynamicCacheChangeRequest implements Serializable {
/** Cache active on start or not*/
private boolean disabledAfterStart;
- /** Destroy. */
+ /** Cache data destroy flag. Setting to <code>true</code> will cause removing all cache data.*/
private boolean destroy;
/** Whether cache was created through SQL. */
@@ -144,7 +144,7 @@ public class DynamicCacheChangeRequest implements Serializable {
* @param ctx Context.
* @param cacheName Cache name.
* @param sql {@code true} if the cache must be stopped only if it was created by SQL command {@code CREATE TABLE}.
- * @param destroy Destroy flag.
+ * @param destroy Cache data destroy flag. Setting to <code>true</code> will cause removing all cache data.
* @return Cache stop request.
*/
public static DynamicCacheChangeRequest stopRequest(
@@ -226,14 +226,15 @@ public class DynamicCacheChangeRequest implements Serializable {
}
/**
- * @return Destroy flag.
+ * @return Cache data destroy flag. Setting to <code>true</code> will remove all cache data.
*/
public boolean destroy(){
return destroy;
}
/**
- * @param destroy Destroy.
+ * Sets cache data destroy flag. Setting to <code>true</code> will cause removing all cache data.
+ * @param destroy Destroy flag.
*/
public void destroy(boolean destroy) {
this.destroy = destroy;
@@ -359,6 +360,8 @@ public class DynamicCacheChangeRequest implements Serializable {
}
/**
+ * Sets if cache is created using create table.
+ *
* @param sql New SQL flag.
*/
public void sql(boolean sql) {
http://git-wip-us.apache.org/repos/asf/ignite/blob/291b166f/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java
index 01d0d41..dc599c7 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java
@@ -1133,6 +1133,7 @@ public class GridCacheProcessor extends GridProcessorAdapter {
/**
* @param cache Cache to stop.
* @param cancel Cancel flag.
+ * @param destroy Destroy data flag. Setting to <code>true</code> will remove all cache data.
*/
@SuppressWarnings({"TypeMayBeWeakened", "unchecked"})
private void stopCache(GridCacheAdapter<?, ?> cache, boolean cancel, boolean destroy) {
@@ -1963,7 +1964,7 @@ public class GridCacheProcessor extends GridProcessorAdapter {
/**
* @param cacheName Cache name.
- * @param destroy Cache destroy flag.
+ * @param destroy Cache data destroy flag. Setting to <code>true</code> will remove all cache data.
* @return Stopped cache context.
*/
private GridCacheContext<?, ?> prepareCacheStop(String cacheName, boolean destroy) {
@@ -2697,6 +2698,7 @@ public class GridCacheProcessor extends GridProcessorAdapter {
* @param sql If the cache needs to be destroyed only if it was created as the result of SQL {@code CREATE TABLE}
* command.
* @param checkThreadTx If {@code true} checks that current thread does not have active transactions.
+ * @param restart Restart flag.
* @return Future that will be completed when cache is destroyed.
*/
public IgniteInternalFuture<Boolean> dynamicDestroyCache(String cacheName, boolean sql, boolean checkThreadTx,
@@ -2718,6 +2720,7 @@ public class GridCacheProcessor extends GridProcessorAdapter {
/**
* @param cacheNames Collection of cache names to destroy.
* @param checkThreadTx If {@code true} checks that current thread does not have active transactions.
+ * @param restart Restart flag.
* @return Future that will be completed when cache is destroyed.
*/
public IgniteInternalFuture<?> dynamicDestroyCaches(Collection<String> cacheNames, boolean checkThreadTx,
@@ -2728,6 +2731,8 @@ public class GridCacheProcessor extends GridProcessorAdapter {
/**
* @param cacheNames Collection of cache names to destroy.
* @param checkThreadTx If {@code true} checks that current thread does not have active transactions.
+ * @param restart Restart flag.
+ * @param destroy Cache data destroy flag. Setting to <code>true</code> will cause removing all cache data
* @return Future that will be completed when cache is destroyed.
*/
public IgniteInternalFuture<?> dynamicDestroyCaches(Collection<String> cacheNames, boolean checkThreadTx,
http://git-wip-us.apache.org/repos/asf/ignite/blob/291b166f/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/IgniteCacheOffheapManager.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/IgniteCacheOffheapManager.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/IgniteCacheOffheapManager.java
index 4531802..761b787 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/IgniteCacheOffheapManager.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/IgniteCacheOffheapManager.java
@@ -60,7 +60,7 @@ public interface IgniteCacheOffheapManager {
/**
* @param cacheId Cache ID.
- * @param destroy Destroy data flag.
+ * @param destroy Destroy data flag. Setting to <code>true</code> will remove all cache data.
*/
public void stopCache(int cacheId, boolean destroy);
http://git-wip-us.apache.org/repos/asf/ignite/blob/291b166f/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/snapshot/SnapshotOperation.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/snapshot/SnapshotOperation.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/snapshot/SnapshotOperation.java
index dfdf832..51c8c32 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/snapshot/SnapshotOperation.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/snapshot/SnapshotOperation.java
@@ -28,17 +28,18 @@ public interface SnapshotOperation extends Serializable {
/**
* Cache group ids included to this snapshot.
*
- * @return Cache names.
+ * @return Cache group identifiers.
*/
public Set<Integer> cacheGroupIds();
/**
- * Cache names included to this snapshot.
+ * @return Cache names included to this snapshot.
*/
public Set<String> cacheNames();
/**
- * Any custom extra parameter.
+ * @return Any custom extra parameter.
+ * In case Map object is provided, contains named snapshot operation attributes.
*/
public Object extraParameter();
}
[18/28] ignite git commit: IGNITE-5218: First version of decision
trees. This closes #2936
Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/FeaturesCache.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/FeaturesCache.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/FeaturesCache.java
new file mode 100644
index 0000000..fcc1f16
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/FeaturesCache.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased.caches;
+
+import java.util.Set;
+import java.util.UUID;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.cache.CacheAtomicityMode;
+import org.apache.ignite.cache.CacheMode;
+import org.apache.ignite.cache.CacheWriteSynchronizationMode;
+import org.apache.ignite.cache.affinity.AffinityKeyMapped;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.ml.math.functions.IgniteBiFunction;
+import org.apache.ignite.ml.trees.trainers.columnbased.ColumnDecisionTreeTrainer;
+
+/**
+ * Cache storing features for {@link ColumnDecisionTreeTrainer}.
+ */
+public class FeaturesCache {
+ /**
+ * Name of cache which is used for storing features for {@link ColumnDecisionTreeTrainer}.
+ */
+ public static final String COLUMN_DECISION_TREE_TRAINER_FEATURES_CACHE_NAME = "COLUMN_DECISION_TREE_TRAINER_FEATURES_CACHE_NAME";
+
+ /**
+ * Key of features cache.
+ */
+ public static class FeatureKey {
+ /** Column key of cache used as input for {@link ColumnDecisionTreeTrainer}. */
+ @AffinityKeyMapped
+ private Object parentColKey;
+
+ /** Index of feature. */
+ private final int featureIdx;
+
+ /** UUID of training. */
+ private final UUID trainingUUID;
+
+ /**
+ * Construct FeatureKey.
+ *
+ * @param featureIdx Feature index.
+ * @param trainingUUID UUID of training.
+ * @param parentColKey Column key of cache used as input.
+ */
+ public FeatureKey(int featureIdx, UUID trainingUUID, Object parentColKey) {
+ this.parentColKey = parentColKey;
+ this.featureIdx = featureIdx;
+ this.trainingUUID = trainingUUID;
+ this.parentColKey = parentColKey;
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+
+ FeatureKey key = (FeatureKey)o;
+
+ if (featureIdx != key.featureIdx)
+ return false;
+ return trainingUUID != null ? trainingUUID.equals(key.trainingUUID) : key.trainingUUID == null;
+ }
+
+ /** {@inheritDoc} */
+ @Override public int hashCode() {
+ int res = trainingUUID != null ? trainingUUID.hashCode() : 0;
+ res = 31 * res + featureIdx;
+ return res;
+ }
+ }
+
+ /**
+ * Create new projections cache for ColumnDecisionTreeTrainer if needed.
+ *
+ * @param ignite Ignite instance.
+ */
+ public static IgniteCache<FeatureKey, double[]> getOrCreate(Ignite ignite) {
+ CacheConfiguration<FeatureKey, double[]> cfg = new CacheConfiguration<>();
+
+ // Write to primary.
+ cfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.PRIMARY_SYNC);
+
+ // Atomic transactions only.
+ cfg.setAtomicityMode(CacheAtomicityMode.ATOMIC);
+
+ // No eviction.
+ cfg.setEvictionPolicy(null);
+
+ // No copying of values.
+ cfg.setCopyOnRead(false);
+
+ // Cache is partitioned.
+ cfg.setCacheMode(CacheMode.PARTITIONED);
+
+ cfg.setOnheapCacheEnabled(true);
+
+ cfg.setBackups(0);
+
+ cfg.setName(COLUMN_DECISION_TREE_TRAINER_FEATURES_CACHE_NAME);
+
+ return ignite.getOrCreateCache(cfg);
+ }
+
+ /**
+ * Construct FeatureKey from index, uuid and affinity key.
+ *
+ * @param idx Feature index.
+ * @param uuid UUID of training.
+ * @param aff Affinity key.
+ * @return FeatureKey.
+ */
+ public static FeatureKey getFeatureCacheKey(int idx, UUID uuid, Object aff) {
+ return new FeatureKey(idx, uuid, aff);
+ }
+
+ /**
+ * Clear all data from features cache related to given training.
+ *
+ * @param featuresCnt Count of features.
+ * @param affinity Affinity function.
+ * @param uuid Training uuid.
+ * @param ignite Ignite instance.
+ */
+ public static void clear(int featuresCnt, IgniteBiFunction<Integer, Ignite, Object> affinity, UUID uuid,
+ Ignite ignite) {
+ Set<FeatureKey> toRmv = IntStream.range(0, featuresCnt).boxed().map(fIdx -> getFeatureCacheKey(fIdx, uuid, affinity.apply(fIdx, ignite))).collect(Collectors.toSet());
+
+ getOrCreate(ignite).removeAll(toRmv);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/ProjectionsCache.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/ProjectionsCache.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/ProjectionsCache.java
new file mode 100644
index 0000000..29cf6b4
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/ProjectionsCache.java
@@ -0,0 +1,284 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased.caches;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.PrimitiveIterator;
+import java.util.Set;
+import java.util.UUID;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.CacheAtomicityMode;
+import org.apache.ignite.cache.CacheMode;
+import org.apache.ignite.cache.CacheWriteSynchronizationMode;
+import org.apache.ignite.cache.affinity.Affinity;
+import org.apache.ignite.cache.affinity.AffinityKeyMapped;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.math.functions.IgniteBiFunction;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.trees.trainers.columnbased.ColumnDecisionTreeTrainer;
+import org.apache.ignite.ml.trees.trainers.columnbased.RegionProjection;
+
+/**
+ * Cache used for storing data of region projections on features.
+ */
+public class ProjectionsCache {
+ /**
+ * Name of cache which is used for storing data of region projections on features of {@link
+ * ColumnDecisionTreeTrainer}.
+ */
+ public static final String CACHE_NAME = "COLUMN_DECISION_TREE_TRAINER_PROJECTIONS_CACHE_NAME";
+
+ /**
+ * Key of region projections cache.
+ */
+ public static class RegionKey {
+ /** Column key of cache used as input for {@link ColumnDecisionTreeTrainer}. */
+ @AffinityKeyMapped
+ private final Object parentColKey;
+
+ /** Feature index. */
+ private final int featureIdx;
+
+ /** Region index. */
+ private final int regBlockIdx;
+
+ /** Training UUID. */
+ private final UUID trainingUUID;
+
+ /**
+ * Construct a RegionKey from feature index, index of block, key of column in input cache and UUID of training.
+ *
+ * @param featureIdx Feature index.
+ * @param regBlockIdx Index of block.
+ * @param parentColKey Key of column in input cache.
+ * @param trainingUUID UUID of training.
+ */
+ public RegionKey(int featureIdx, int regBlockIdx, Object parentColKey, UUID trainingUUID) {
+ this.featureIdx = featureIdx;
+ this.regBlockIdx = regBlockIdx;
+ this.trainingUUID = trainingUUID;
+ this.parentColKey = parentColKey;
+ }
+
+ /**
+ * Feature index.
+ *
+ * @return Feature index.
+ */
+ public int featureIdx() {
+ return featureIdx;
+ }
+
+ /**
+ * Region block index.
+ *
+ * @return Region block index.
+ */
+ public int regionBlockIndex() {
+ return regBlockIdx;
+ }
+
+ /**
+ * UUID of training.
+ *
+ * @return UUID of training.
+ */
+ public UUID trainingUUID() {
+ return trainingUUID;
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+
+ RegionKey key = (RegionKey)o;
+
+ if (featureIdx != key.featureIdx)
+ return false;
+ if (regBlockIdx != key.regBlockIdx)
+ return false;
+ return trainingUUID != null ? trainingUUID.equals(key.trainingUUID) : key.trainingUUID == null;
+ }
+
+ /** {@inheritDoc} */
+ @Override public int hashCode() {
+ int res = trainingUUID != null ? trainingUUID.hashCode() : 0;
+ res = 31 * res + featureIdx;
+ res = 31 * res + regBlockIdx;
+ return res;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return "RegionKey [" +
+ "parentColKey=" + parentColKey +
+ ", featureIdx=" + featureIdx +
+ ", regBlockIdx=" + regBlockIdx +
+ ", trainingUUID=" + trainingUUID +
+ ']';
+ }
+ }
+
+ /**
+ * Affinity service for region projections cache.
+ *
+ * @return Affinity service for region projections cache.
+ */
+ public static Affinity<RegionKey> affinity() {
+ return Ignition.localIgnite().affinity(CACHE_NAME);
+ }
+
+ /**
+ * Get or create region projections cache.
+ *
+ * @param ignite Ignite instance.
+ * @return Region projections cache.
+ */
+ public static IgniteCache<RegionKey, List<RegionProjection>> getOrCreate(Ignite ignite) {
+ CacheConfiguration<RegionKey, List<RegionProjection>> cfg = new CacheConfiguration<>();
+
+ // Write to primary.
+ cfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.PRIMARY_SYNC);
+
+ // Atomic transactions only.
+ cfg.setAtomicityMode(CacheAtomicityMode.ATOMIC);
+
+ // No eviction.
+ cfg.setEvictionPolicy(null);
+
+ // No copying of values.
+ cfg.setCopyOnRead(false);
+
+ // Cache is partitioned.
+ cfg.setCacheMode(CacheMode.PARTITIONED);
+
+ cfg.setBackups(0);
+
+ cfg.setOnheapCacheEnabled(true);
+
+ cfg.setName(CACHE_NAME);
+
+ return ignite.getOrCreateCache(cfg);
+ }
+
+ /**
+ * Get region projections in the form of map (regionIndex -> regionProjections).
+ *
+ * @param featureIdx Feature index.
+ * @param maxDepth Max depth of decision tree.
+ * @param regionIndexes Indexes of regions for which we want get projections.
+ * @param blockSize Size of regions block.
+ * @param affinity Affinity function.
+ * @param trainingUUID UUID of training.
+ * @param ignite Ignite instance.
+ * @return Region projections in the form of map (regionIndex -> regionProjections).
+ */
+ public static Map<Integer, RegionProjection> projectionsOfRegions(int featureIdx, int maxDepth,
+ IntStream regionIndexes, int blockSize, IgniteFunction<Integer, Object> affinity, UUID trainingUUID,
+ Ignite ignite) {
+ HashMap<Integer, RegionProjection> regsForSearch = new HashMap<>();
+ IgniteCache<RegionKey, List<RegionProjection>> cache = getOrCreate(ignite);
+
+ PrimitiveIterator.OfInt itr = regionIndexes.iterator();
+
+ int curBlockIdx = -1;
+ List<RegionProjection> block = null;
+
+ Object affinityKey = affinity.apply(featureIdx);
+
+ while (itr.hasNext()) {
+ int i = itr.nextInt();
+
+ int blockIdx = i / blockSize;
+
+ if (blockIdx != curBlockIdx) {
+ block = cache.localPeek(key(featureIdx, blockIdx, affinityKey, trainingUUID));
+ curBlockIdx = blockIdx;
+ }
+
+ if (block == null)
+ throw new IllegalStateException("Unexpected null block at index " + i);
+
+ RegionProjection reg = block.get(i % blockSize);
+
+ if (reg.depth() < maxDepth)
+ regsForSearch.put(i, reg);
+ }
+
+ return regsForSearch;
+ }
+
+ /**
+ * Returns projections of regions on given feature filtered by maximal depth in the form of (region index -> region projection).
+ *
+ * @param featureIdx Feature index.
+ * @param maxDepth Maximal depth of the tree.
+ * @param regsCnt Count of regions.
+ * @param blockSize Size of regions blocks.
+ * @param affinity Affinity function.
+ * @param trainingUUID UUID of training.
+ * @param ignite Ignite instance.
+ * @return Projections of regions on given feature filtered by maximal depth in the form of (region index -> region projection).
+ */
+ public static Map<Integer, RegionProjection> projectionsOfFeature(int featureIdx, int maxDepth, int regsCnt,
+ int blockSize, IgniteFunction<Integer, Object> affinity, UUID trainingUUID, Ignite ignite) {
+ return projectionsOfRegions(featureIdx, maxDepth, IntStream.range(0, regsCnt), blockSize, affinity, trainingUUID, ignite);
+ }
+
+ /**
+ * Construct key for projections cache.
+ *
+ * @param featureIdx Feature index.
+ * @param regBlockIdx Region block index.
+ * @param parentColKey Column key of cache used as input for {@link ColumnDecisionTreeTrainer}.
+ * @param uuid UUID of training.
+ * @return Key for projections cache.
+ */
+ public static RegionKey key(int featureIdx, int regBlockIdx, Object parentColKey, UUID uuid) {
+ return new RegionKey(featureIdx, regBlockIdx, parentColKey, uuid);
+ }
+
+ /**
+ * Clear data from projections cache related to given training.
+ *
+ * @param featuresCnt Features count.
+ * @param regs Regions count.
+ * @param aff Affinity function.
+ * @param uuid UUID of training.
+ * @param ignite Ignite instance.
+ */
+ public static void clear(int featuresCnt, int regs, IgniteBiFunction<Integer, Ignite, Object> aff, UUID uuid,
+ Ignite ignite) {
+ Set<RegionKey> toRmv = IntStream.range(0, featuresCnt).boxed().
+ flatMap(fIdx -> IntStream.range(0, regs).boxed().map(reg -> new IgniteBiTuple<>(fIdx, reg))).
+ map(t -> key(t.get1(), t.get2(), aff.apply(t.get1(), ignite), uuid)).
+ collect(Collectors.toSet());
+
+ getOrCreate(ignite).removeAll(toRmv);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/SplitCache.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/SplitCache.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/SplitCache.java
new file mode 100644
index 0000000..ecbc861
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/SplitCache.java
@@ -0,0 +1,206 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased.caches;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Set;
+import java.util.UUID;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import javax.cache.Cache;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.CacheAtomicityMode;
+import org.apache.ignite.cache.CacheMode;
+import org.apache.ignite.cache.CacheWriteSynchronizationMode;
+import org.apache.ignite.cache.affinity.Affinity;
+import org.apache.ignite.cache.affinity.AffinityKeyMapped;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.internal.processors.cache.CacheEntryImpl;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.math.functions.IgniteBiFunction;
+import org.apache.ignite.ml.trees.trainers.columnbased.ColumnDecisionTreeTrainer;
+
+/**
+ * Class for working with cache used for storing of best splits during training with {@link ColumnDecisionTreeTrainer}.
+ */
+public class SplitCache {
+ /** Name of splits cache. */
+ public static final String CACHE_NAME = "COLUMN_DECISION_TREE_TRAINER_SPLIT_CACHE_NAME";
+
+ /**
+ * Class used for keys in the splits cache.
+ */
+ public static class SplitKey {
+ /** UUID of current training. */
+ private final UUID trainingUUID;
+
+ /** Affinity key of input data. */
+ @AffinityKeyMapped
+ private final Object parentColKey;
+
+ /** Index of feature by which the split is made. */
+ private final int featureIdx;
+
+ /**
+ * Construct SplitKey.
+ *
+ * @param trainingUUID UUID of the training.
+ * @param parentColKey Affinity key used to ensure that cache entry for given feature will be on the same node
+ * as column with that feature in input.
+ * @param featureIdx Feature index.
+ */
+ public SplitKey(UUID trainingUUID, Object parentColKey, int featureIdx) {
+ this.trainingUUID = trainingUUID;
+ this.featureIdx = featureIdx;
+ this.parentColKey = parentColKey;
+ }
+
+ /** Get UUID of current training. */
+ public UUID trainingUUID() {
+ return trainingUUID;
+ }
+
+ /**
+ * Get feature index.
+ *
+ * @return Feature index.
+ */
+ public int featureIdx() {
+ return featureIdx;
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+
+ SplitKey splitKey = (SplitKey)o;
+
+ if (featureIdx != splitKey.featureIdx)
+ return false;
+ return trainingUUID != null ? trainingUUID.equals(splitKey.trainingUUID) : splitKey.trainingUUID == null;
+
+ }
+
+ /** {@inheritDoc} */
+ @Override public int hashCode() {
+ int res = trainingUUID != null ? trainingUUID.hashCode() : 0;
+ res = 31 * res + featureIdx;
+ return res;
+ }
+ }
+
+ /**
+ * Construct the key for splits cache.
+ *
+ * @param featureIdx Feature index.
+ * @param parentColKey Affinity key used to ensure that cache entry for given feature will be on the same node as
+ * column with that feature in input.
+ * @param uuid UUID of current training.
+ * @return Key for splits cache.
+ */
+ public static SplitKey key(int featureIdx, Object parentColKey, UUID uuid) {
+ return new SplitKey(uuid, parentColKey, featureIdx);
+ }
+
+ /**
+ * Get or create splits cache.
+ *
+ * @param ignite Ignite instance.
+ * @return Splits cache.
+ */
+ public static IgniteCache<SplitKey, IgniteBiTuple<Integer, Double>> getOrCreate(Ignite ignite) {
+ CacheConfiguration<SplitKey, IgniteBiTuple<Integer, Double>> cfg = new CacheConfiguration<>();
+
+ // Write to primary.
+ cfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.PRIMARY_SYNC);
+
+ // Atomic transactions only.
+ cfg.setAtomicityMode(CacheAtomicityMode.ATOMIC);
+
+ // No eviction.
+ cfg.setEvictionPolicy(null);
+
+ // No copying of values.
+ cfg.setCopyOnRead(false);
+
+ // Cache is partitioned.
+ cfg.setCacheMode(CacheMode.PARTITIONED);
+
+ cfg.setBackups(0);
+
+ cfg.setOnheapCacheEnabled(true);
+
+ cfg.setName(CACHE_NAME);
+
+ return ignite.getOrCreateCache(cfg);
+ }
+
+ /**
+ * Affinity function used in splits cache.
+ *
+ * @return Affinity function used in splits cache.
+ */
+ public static Affinity<SplitKey> affinity() {
+ return Ignition.localIgnite().affinity(CACHE_NAME);
+ }
+
+ /**
+ * Returns local entries for keys corresponding to {@code featureIndexes}.
+ *
+ * @param featureIndexes Index of features.
+ * @param affinity Affinity function.
+ * @param trainingUUID UUID of training.
+ * @return local entries for keys corresponding to {@code featureIndexes}.
+ */
+ public static Iterable<Cache.Entry<SplitKey, IgniteBiTuple<Integer, Double>>> localEntries(
+ Set<Integer> featureIndexes,
+ IgniteBiFunction<Integer, Ignite, Object> affinity,
+ UUID trainingUUID) {
+ Ignite ignite = Ignition.localIgnite();
+ Set<SplitKey> keys = featureIndexes.stream().map(fIdx -> new SplitKey(trainingUUID, affinity.apply(fIdx, ignite), fIdx)).collect(Collectors.toSet());
+
+ Collection<SplitKey> locKeys = affinity().mapKeysToNodes(keys).getOrDefault(ignite.cluster().localNode(), Collections.emptyList());
+
+ return () -> {
+ Function<SplitKey, Cache.Entry<SplitKey, IgniteBiTuple<Integer, Double>>> f = k -> (new CacheEntryImpl<>(k, getOrCreate(ignite).localPeek(k)));
+ return locKeys.stream().map(f).iterator();
+ };
+ }
+
+ /**
+ * Clears data related to current training from splits cache related to given training.
+ *
+ * @param featuresCnt Count of features.
+ * @param affinity Affinity function.
+ * @param uuid UUID of the given training.
+ * @param ignite Ignite instance.
+ */
+ public static void clear(int featuresCnt, IgniteBiFunction<Integer, Ignite, Object> affinity, UUID uuid,
+ Ignite ignite) {
+ Set<SplitKey> toRmv = IntStream.range(0, featuresCnt).boxed().map(fIdx -> new SplitKey(uuid, affinity.apply(fIdx, ignite), fIdx)).collect(Collectors.toSet());
+
+ getOrCreate(ignite).removeAll(toRmv);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/ContinuousSplitCalculators.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/ContinuousSplitCalculators.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/ContinuousSplitCalculators.java
new file mode 100644
index 0000000..9fd4c66
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/ContinuousSplitCalculators.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased.contsplitcalcs;
+
+import org.apache.ignite.Ignite;
+import org.apache.ignite.ml.math.functions.IgniteCurriedBiFunction;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.trees.trainers.columnbased.ColumnDecisionTreeTrainerInput;
+
+/** Continuous Split Calculators. */
+public class ContinuousSplitCalculators {
+ /** Variance split calculator. */
+ public static IgniteFunction<ColumnDecisionTreeTrainerInput, VarianceSplitCalculator> VARIANCE = input ->
+ new VarianceSplitCalculator();
+
+ /** Gini split calculator. */
+ public static IgniteCurriedBiFunction<Ignite, ColumnDecisionTreeTrainerInput, GiniSplitCalculator> GINI = ignite ->
+ input -> new GiniSplitCalculator(input.labels(ignite));
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/GiniSplitCalculator.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/GiniSplitCalculator.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/GiniSplitCalculator.java
new file mode 100644
index 0000000..259c84c
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/GiniSplitCalculator.java
@@ -0,0 +1,234 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased.contsplitcalcs;
+
+import it.unimi.dsi.fastutil.doubles.Double2IntArrayMap;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.PrimitiveIterator;
+import java.util.stream.DoubleStream;
+import org.apache.ignite.ml.trees.ContinuousRegionInfo;
+import org.apache.ignite.ml.trees.ContinuousSplitCalculator;
+import org.apache.ignite.ml.trees.trainers.columnbased.vectors.ContinuousSplitInfo;
+import org.apache.ignite.ml.trees.trainers.columnbased.vectors.SplitInfo;
+
+/**
+ * Calculator for Gini impurity.
+ */
+public class GiniSplitCalculator implements ContinuousSplitCalculator<GiniSplitCalculator.GiniData> {
+ /** Mapping assigning index to each member value */
+ private final Map<Double, Integer> mapping = new Double2IntArrayMap();
+
+ /**
+ * Create Gini split calculator from labels.
+ *
+ * @param labels Labels.
+ */
+ public GiniSplitCalculator(double[] labels) {
+ int i = 0;
+
+ for (double label : labels) {
+ if (!mapping.containsKey(label)) {
+ mapping.put(label, i);
+ i++;
+ }
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override public GiniData calculateRegionInfo(DoubleStream s, int l) {
+ PrimitiveIterator.OfDouble itr = s.iterator();
+
+ Map<Double, Integer> m = new HashMap<>();
+
+ int size = 0;
+
+ while (itr.hasNext()) {
+ size++;
+ m.compute(itr.next(), (a, i) -> i != null ? i + 1 : 1);
+ }
+
+ double c2 = m.values().stream().mapToDouble(v -> v * v).sum();
+
+ int[] cnts = new int[mapping.size()];
+
+ m.forEach((key, value) -> cnts[mapping.get(key)] = value);
+
+ return new GiniData(size != 0 ? 1 - c2 / (size * size) : 0.0, size, cnts, c2);
+ }
+
+ /** {@inheritDoc} */
+ @Override public SplitInfo<GiniData> splitRegion(Integer[] s, double[] values, double[] labels, int regionIdx,
+ GiniData d) {
+ int size = d.getSize();
+
+ double lg = 0.0;
+ double rg = d.impurity();
+
+ double lc2 = 0.0;
+ double rc2 = d.c2;
+ int lSize = 0;
+
+ double minImpurity = d.impurity() * size;
+ double curThreshold;
+ double curImpurity;
+ double threshold = Double.NEGATIVE_INFINITY;
+
+ int i = 0;
+ int nextIdx = s[0];
+ i++;
+ double[] lrImps = new double[] {0.0, d.impurity(), lc2, rc2};
+
+ int[] lMapCur = new int[d.counts().length];
+ int[] rMapCur = new int[d.counts().length];
+
+ System.arraycopy(d.counts(), 0, rMapCur, 0, d.counts().length);
+
+ int[] lMap = new int[d.counts().length];
+ int[] rMap = new int[d.counts().length];
+
+ System.arraycopy(d.counts(), 0, rMap, 0, d.counts().length);
+
+ do {
+ // Process all values equal to prev.
+ while (i < s.length) {
+ moveLeft(labels[nextIdx], i, size - i, lMapCur, rMapCur, lrImps);
+ curImpurity = (i * lrImps[0] + (size - i) * lrImps[1]);
+ curThreshold = values[nextIdx];
+
+ if (values[nextIdx] != values[(nextIdx = s[i++])]) {
+ if (curImpurity < minImpurity) {
+ lSize = i - 1;
+
+ lg = lrImps[0];
+ rg = lrImps[1];
+
+ lc2 = lrImps[2];
+ rc2 = lrImps[3];
+
+ System.arraycopy(lMapCur, 0, lMap, 0, lMapCur.length);
+ System.arraycopy(rMapCur, 0, rMap, 0, rMapCur.length);
+
+ minImpurity = curImpurity;
+ threshold = curThreshold;
+ }
+
+ break;
+ }
+ }
+ }
+ while (i < s.length - 1);
+
+ if (lSize == size || lSize == 0)
+ return null;
+
+ GiniData lData = new GiniData(lg, lSize, lMap, lc2);
+ int rSize = size - lSize;
+ GiniData rData = new GiniData(rg, rSize, rMap, rc2);
+
+ return new ContinuousSplitInfo<>(regionIdx, threshold, lData, rData);
+ }
+
+ /**
+ * Add point to the left interval and remove it from the right interval and calculate necessary statistics on
+ * intervals with new bounds.
+ */
+ private void moveLeft(double x, int lSize, int rSize, int[] lMap, int[] rMap, double[] data) {
+ double lc2 = data[2];
+ double rc2 = data[3];
+
+ Integer idx = mapping.get(x);
+
+ int cxl = lMap[idx];
+ int cxr = rMap[idx];
+
+ lc2 += 2 * cxl + 1;
+ rc2 -= 2 * cxr - 1;
+
+ lMap[idx] += 1;
+ rMap[idx] -= 1;
+
+ data[0] = 1 - lc2 / (lSize * lSize);
+ data[1] = 1 - rc2 / (rSize * rSize);
+
+ data[2] = lc2;
+ data[3] = rc2;
+ }
+
+ /**
+ * Data used for gini impurity calculations.
+ */
+ public static class GiniData extends ContinuousRegionInfo {
+ /** Sum of squares of counts of each label. */
+ private double c2;
+
+ /** Counts of each label. On i-th position there is count of label which is mapped to index i. */
+ private int[] m;
+
+ /**
+ * Create Gini data.
+ *
+ * @param impurity Impurity (i.e. Gini impurity).
+ * @param size Count of samples.
+ * @param m Counts of each label.
+ * @param c2 Sum of squares of counts of each label.
+ */
+ public GiniData(double impurity, int size, int[] m, double c2) {
+ super(impurity, size);
+ this.m = m;
+ this.c2 = c2;
+ }
+
+ /**
+ * No-op constructor for serialization/deserialization..
+ */
+ public GiniData() {
+ // No-op.
+ }
+
+ /** Get counts of each label. */
+ public int[] counts() {
+ return m;
+ }
+
+ /** {@inheritDoc} */
+ @Override public void writeExternal(ObjectOutput out) throws IOException {
+ super.writeExternal(out);
+ out.writeDouble(c2);
+ out.writeInt(m.length);
+ for (int i : m)
+ out.writeInt(i);
+
+ }
+
+ /** {@inheritDoc} */
+ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
+ super.readExternal(in);
+
+ c2 = in.readDouble();
+ int size = in.readInt();
+ m = new int[size];
+
+ for (int i = 0; i < size; i++)
+ m[i] = in.readInt();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/VarianceSplitCalculator.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/VarianceSplitCalculator.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/VarianceSplitCalculator.java
new file mode 100644
index 0000000..66c54f2
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/VarianceSplitCalculator.java
@@ -0,0 +1,179 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased.contsplitcalcs;
+
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.util.PrimitiveIterator;
+import java.util.stream.DoubleStream;
+import org.apache.ignite.ml.trees.ContinuousRegionInfo;
+import org.apache.ignite.ml.trees.ContinuousSplitCalculator;
+import org.apache.ignite.ml.trees.trainers.columnbased.vectors.ContinuousSplitInfo;
+import org.apache.ignite.ml.trees.trainers.columnbased.vectors.SplitInfo;
+
+/**
+ * Calculator of variance in a given region.
+ */
+public class VarianceSplitCalculator implements ContinuousSplitCalculator<VarianceSplitCalculator.VarianceData> {
+ /**
+ * Data used in variance calculations.
+ */
+ public static class VarianceData extends ContinuousRegionInfo {
+ /** Mean value in a given region. */
+ double mean;
+
+ /**
+ * @param var Variance in this region.
+ * @param size Size of data for which variance is calculated.
+ * @param mean Mean value in this region.
+ */
+ public VarianceData(double var, int size, double mean) {
+ super(var, size);
+ this.mean = mean;
+ }
+
+ /**
+ * No-op constructor. For serialization/deserialization.
+ */
+ public VarianceData() {
+ // No-op.
+ }
+
+ /** {@inheritDoc} */
+ @Override public void writeExternal(ObjectOutput out) throws IOException {
+ super.writeExternal(out);
+ out.writeDouble(mean);
+ }
+
+ /** {@inheritDoc} */
+ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
+ super.readExternal(in);
+ mean = in.readDouble();
+ }
+
+ /**
+ * Returns mean.
+ */
+ public double mean() {
+ return mean;
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override public VarianceData calculateRegionInfo(DoubleStream s, int size) {
+ PrimitiveIterator.OfDouble itr = s.iterator();
+ int i = 0;
+
+ double mean = 0.0;
+ double m2 = 0.0;
+
+ // Here we calculate variance and mean by incremental computation.
+ while (itr.hasNext()) {
+ i++;
+ double x = itr.next();
+ double delta = x - mean;
+ mean += delta / i;
+ double delta2 = x - mean;
+ m2 += delta * delta2;
+ }
+
+ return new VarianceData(m2 / i, size, mean);
+ }
+
+ /** {@inheritDoc} */
+ @Override public SplitInfo<VarianceData> splitRegion(Integer[] s, double[] values, double[] labels, int regionIdx,
+ VarianceData d) {
+ int size = d.getSize();
+
+ double lm2 = 0.0;
+ double rm2 = d.impurity() * size;
+ int lSize = size;
+
+ double lMean = 0.0;
+ double rMean = d.mean;
+
+ double minImpurity = d.impurity() * size;
+ double curThreshold;
+ double curImpurity;
+ double threshold = Double.NEGATIVE_INFINITY;
+
+ int i = 0;
+ int nextIdx = s[0];
+ i++;
+ double[] lrImps = new double[] {lm2, rm2, lMean, rMean};
+
+ do {
+ // Process all values equal to prev.
+ while (i < s.length) {
+ moveLeft(labels[nextIdx], lrImps[2], i, lrImps[0], lrImps[3], size - i, lrImps[1], lrImps);
+ curImpurity = (lrImps[0] + lrImps[1]);
+ curThreshold = values[nextIdx];
+
+ if (values[nextIdx] != values[(nextIdx = s[i++])]) {
+ if (curImpurity < minImpurity) {
+ lSize = i - 1;
+
+ lm2 = lrImps[0];
+ rm2 = lrImps[1];
+
+ lMean = lrImps[2];
+ rMean = lrImps[3];
+
+ minImpurity = curImpurity;
+ threshold = curThreshold;
+ }
+
+ break;
+ }
+ }
+ }
+ while (i < s.length - 1);
+
+ if (lSize == size)
+ return null;
+
+ VarianceData lData = new VarianceData(lm2 / (lSize != 0 ? lSize : 1), lSize, lMean);
+ int rSize = size - lSize;
+ VarianceData rData = new VarianceData(rm2 / (rSize != 0 ? rSize : 1), rSize, rMean);
+
+ return new ContinuousSplitInfo<>(regionIdx, threshold, lData, rData);
+ }
+
+ /**
+ * Add point to the left interval and remove it from the right interval and calculate necessary statistics on
+ * intervals with new bounds.
+ */
+ private void moveLeft(double x, double lMean, int lSize, double lm2, double rMean, int rSize, double rm2,
+ double[] data) {
+ // We add point to the left interval.
+ double lDelta = x - lMean;
+ double lMeanNew = lMean + lDelta / lSize;
+ double lm2New = lm2 + lDelta * (x - lMeanNew);
+
+ // We remove point from the right interval. lSize + 1 is the size of right interval before removal.
+ double rMeanNew = (rMean * (rSize + 1) - x) / rSize;
+ double rm2New = rm2 - (x - rMean) * (x - rMeanNew);
+
+ data[0] = lm2New;
+ data[1] = rm2New;
+
+ data[2] = lMeanNew;
+ data[3] = rMeanNew;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/package-info.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/package-info.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/package-info.java
new file mode 100644
index 0000000..08c8a75
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/contsplitcalcs/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Calculators of splits by continuous features.
+ */
+package org.apache.ignite.ml.trees.trainers.columnbased.contsplitcalcs;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/package-info.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/package-info.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/package-info.java
new file mode 100644
index 0000000..8523914
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains column based decision tree algorithms.
+ */
+package org.apache.ignite.ml.trees.trainers.columnbased;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/regcalcs/RegionCalculators.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/regcalcs/RegionCalculators.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/regcalcs/RegionCalculators.java
new file mode 100644
index 0000000..5c4b354
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/regcalcs/RegionCalculators.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased.regcalcs;
+
+import it.unimi.dsi.fastutil.doubles.Double2IntOpenHashMap;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.PrimitiveIterator;
+import java.util.stream.DoubleStream;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.trees.trainers.columnbased.ColumnDecisionTreeTrainerInput;
+
+/** Some commonly used functions for calculations of regions of space which correspond to decision tree leaf nodes. */
+public class RegionCalculators {
+ /** Mean value in the region. */
+ public static final IgniteFunction<DoubleStream, Double> MEAN = s -> s.average().orElse(0.0);
+
+ /** Most common value in the region. */
+ public static final IgniteFunction<DoubleStream, Double> MOST_COMMON =
+ s -> {
+ PrimitiveIterator.OfDouble itr = s.iterator();
+ Map<Double, Integer> voc = new HashMap<>();
+
+ while (itr.hasNext())
+ voc.compute(itr.next(), (d, i) -> i != null ? i + 1 : 0);
+
+ return voc.entrySet().stream().max(Comparator.comparing(Map.Entry::getValue)).map(Map.Entry::getKey).orElse(0.0);
+ };
+
+ /** Variance of a region. */
+ public static final IgniteFunction<ColumnDecisionTreeTrainerInput, IgniteFunction<DoubleStream, Double>> VARIANCE = input ->
+ s -> {
+ PrimitiveIterator.OfDouble itr = s.iterator();
+ int i = 0;
+
+ double mean = 0.0;
+ double m2 = 0.0;
+
+ while (itr.hasNext()) {
+ i++;
+ double x = itr.next();
+ double delta = x - mean;
+ mean += delta / i;
+ double delta2 = x - mean;
+ m2 += delta * delta2;
+ }
+
+ return i > 0 ? m2 / i : 0.0;
+ };
+
+ /** Gini impurity of a region. */
+ public static final IgniteFunction<ColumnDecisionTreeTrainerInput, IgniteFunction<DoubleStream, Double>> GINI = input ->
+ s -> {
+ PrimitiveIterator.OfDouble itr = s.iterator();
+
+ Double2IntOpenHashMap m = new Double2IntOpenHashMap();
+
+ int size = 0;
+
+ while (itr.hasNext()) {
+ size++;
+ m.compute(itr.next(), (a, i) -> i != null ? i + 1 : 1);
+ }
+
+ double c2 = m.values().stream().mapToDouble(v -> v * v).sum();
+
+ return size != 0 ? 1 - c2 / (size * size) : 0.0;
+ };
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/regcalcs/package-info.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/regcalcs/package-info.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/regcalcs/package-info.java
new file mode 100644
index 0000000..e8edd8f
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/regcalcs/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Region calculators.
+ */
+package org.apache.ignite.ml.trees.trainers.columnbased.regcalcs;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/CategoricalFeatureProcessor.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/CategoricalFeatureProcessor.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/CategoricalFeatureProcessor.java
new file mode 100644
index 0000000..9469768
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/CategoricalFeatureProcessor.java
@@ -0,0 +1,211 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased.vectors;
+
+import com.zaxxer.sparsebits.SparseBitSet;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import java.util.stream.DoubleStream;
+import java.util.stream.Stream;
+import java.util.stream.StreamSupport;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.trees.CategoricalRegionInfo;
+import org.apache.ignite.ml.trees.CategoricalSplitInfo;
+import org.apache.ignite.ml.trees.RegionInfo;
+import org.apache.ignite.ml.trees.trainers.columnbased.RegionProjection;
+
+import static org.apache.ignite.ml.trees.trainers.columnbased.vectors.FeatureVectorProcessorUtils.splitByBitSet;
+
+/**
+ * Categorical feature vector processor implementation used by {@see ColumnDecisionTreeTrainer}.
+ */
+public class CategoricalFeatureProcessor
+ implements FeatureProcessor<CategoricalRegionInfo, CategoricalSplitInfo<CategoricalRegionInfo>> {
+ /** Count of categories for this feature. */
+ private final int catsCnt;
+
+ /** Function for calculating impurity of a given region of points. */
+ private final IgniteFunction<DoubleStream, Double> calc;
+
+ /**
+ * @param calc Function for calculating impurity of a given region of points.
+ * @param catsCnt Number of categories.
+ */
+ public CategoricalFeatureProcessor(IgniteFunction<DoubleStream, Double> calc, int catsCnt) {
+ this.calc = calc;
+ this.catsCnt = catsCnt;
+ }
+
+ /** */
+ private SplitInfo<CategoricalRegionInfo> split(BitSet leftCats, int intervalIdx, Map<Integer, Integer> mapping,
+ Integer[] sampleIndexes, double[] values, double[] labels, double impurity) {
+ Map<Boolean, List<Integer>> leftRight = Arrays.stream(sampleIndexes).
+ collect(Collectors.partitioningBy((smpl) -> leftCats.get(mapping.get((int)values[smpl]))));
+
+ List<Integer> left = leftRight.get(true);
+ int leftSize = left.size();
+ double leftImpurity = calc.apply(left.stream().mapToDouble(s -> labels[s]));
+
+ List<Integer> right = leftRight.get(false);
+ int rightSize = right.size();
+ double rightImpurity = calc.apply(right.stream().mapToDouble(s -> labels[s]));
+
+ int totalSize = leftSize + rightSize;
+
+ // Result of this call will be sent back to trainer node, we do not need vectors inside of sent data.
+ CategoricalSplitInfo<CategoricalRegionInfo> res = new CategoricalSplitInfo<>(intervalIdx,
+ new CategoricalRegionInfo(leftImpurity, null), // cats can be computed on the last step.
+ new CategoricalRegionInfo(rightImpurity, null),
+ leftCats);
+
+ res.setInfoGain(impurity - (double)leftSize / totalSize * leftImpurity - (double)rightSize / totalSize * rightImpurity);
+ return res;
+ }
+
+ /**
+ * Get a stream of subsets given categories count.
+ *
+ * @param catsCnt categories count.
+ * @return Stream of subsets given categories count.
+ */
+ private Stream<BitSet> powerSet(int catsCnt) {
+ Iterable<BitSet> iterable = () -> new PSI(catsCnt);
+ return StreamSupport.stream(iterable.spliterator(), false);
+ }
+
+ /** {@inheritDoc} */
+ @Override public SplitInfo findBestSplit(RegionProjection<CategoricalRegionInfo> regionPrj, double[] values,
+ double[] labels, int regIdx) {
+ Map<Integer, Integer> mapping = mapping(regionPrj.data().cats());
+
+ return powerSet(regionPrj.data().cats().length()).
+ map(s -> split(s, regIdx, mapping, regionPrj.sampleIndexes(), values, labels, regionPrj.data().impurity())).
+ max(Comparator.comparingDouble(SplitInfo::infoGain)).
+ orElse(null);
+ }
+
+ /** {@inheritDoc} */
+ @Override public RegionProjection<CategoricalRegionInfo> createInitialRegion(Integer[] sampleIndexes,
+ double[] values, double[] labels) {
+ BitSet set = new BitSet();
+ set.set(0, catsCnt);
+
+ Double impurity = calc.apply(Arrays.stream(labels));
+
+ return new RegionProjection<>(sampleIndexes, new CategoricalRegionInfo(impurity, set), 0);
+ }
+
+ /** {@inheritDoc} */
+ @Override public SparseBitSet calculateOwnershipBitSet(RegionProjection<CategoricalRegionInfo> regionPrj,
+ double[] values,
+ CategoricalSplitInfo<CategoricalRegionInfo> s) {
+ SparseBitSet res = new SparseBitSet();
+ Arrays.stream(regionPrj.sampleIndexes()).forEach(smpl -> res.set(smpl, s.bitSet().get((int)values[smpl])));
+ return res;
+ }
+
+ /** {@inheritDoc} */
+ @Override public IgniteBiTuple<RegionProjection, RegionProjection> performSplit(SparseBitSet bs,
+ RegionProjection<CategoricalRegionInfo> reg, CategoricalRegionInfo leftData, CategoricalRegionInfo rightData) {
+ return performSplitGeneric(bs, null, reg, leftData, rightData);
+ }
+
+ /** {@inheritDoc} */
+ @Override public IgniteBiTuple<RegionProjection, RegionProjection> performSplitGeneric(
+ SparseBitSet bs, double[] values, RegionProjection<CategoricalRegionInfo> reg, RegionInfo leftData,
+ RegionInfo rightData) {
+ int depth = reg.depth();
+
+ int lSize = bs.cardinality();
+ int rSize = reg.sampleIndexes().length - lSize;
+ IgniteBiTuple<Integer[], Integer[]> lrSamples = splitByBitSet(lSize, rSize, reg.sampleIndexes(), bs);
+ BitSet leftCats = calculateCats(lrSamples.get1(), values);
+ CategoricalRegionInfo lInfo = new CategoricalRegionInfo(leftData.impurity(), leftCats);
+
+ // TODO: IGNITE-5892 Check how it will work with sparse data.
+ BitSet rightCats = calculateCats(lrSamples.get2(), values);
+ CategoricalRegionInfo rInfo = new CategoricalRegionInfo(rightData.impurity(), rightCats);
+
+ RegionProjection<CategoricalRegionInfo> rPrj = new RegionProjection<>(lrSamples.get2(), rInfo, depth + 1);
+ RegionProjection<CategoricalRegionInfo> lPrj = new RegionProjection<>(lrSamples.get1(), lInfo, depth + 1);
+ return new IgniteBiTuple<>(lPrj, rPrj);
+ }
+
+ /**
+ * Powerset iterator. Iterates not over the whole powerset, but on half of it.
+ */
+ private static class PSI implements Iterator<BitSet> {
+
+ /** Current subset number. */
+ private int i = 1; // We are not interested in {emptyset, set} split and therefore start from 1.
+
+ /** Size of set, subsets of which we iterate over. */
+ final int size;
+
+ /**
+ * @param bitCnt Size of set, subsets of which we iterate over.
+ */
+ PSI(int bitCnt) {
+ this.size = 1 << (bitCnt - 1);
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean hasNext() {
+ return i < size;
+ }
+
+ /** {@inheritDoc} */
+ @Override public BitSet next() {
+ BitSet res = BitSet.valueOf(new long[] {i});
+ i++;
+ return res;
+ }
+ }
+
+ /** */
+ private Map<Integer, Integer> mapping(BitSet bs) {
+ int bn = 0;
+ Map<Integer, Integer> res = new HashMap<>();
+
+ int i = 0;
+ while ((bn = bs.nextSetBit(bn)) != -1) {
+ res.put(bn, i);
+ i++;
+ bn++;
+ }
+
+ return res;
+ }
+
+ /** Get set of categories of given samples */
+ private BitSet calculateCats(Integer[] sampleIndexes, double[] values) {
+ BitSet res = new BitSet();
+
+ for (int smpl : sampleIndexes)
+ res.set((int)values[smpl]);
+
+ return res;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/ContinuousFeatureProcessor.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/ContinuousFeatureProcessor.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/ContinuousFeatureProcessor.java
new file mode 100644
index 0000000..4117993
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/ContinuousFeatureProcessor.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased.vectors;
+
+import com.zaxxer.sparsebits.SparseBitSet;
+import java.util.Arrays;
+import java.util.Comparator;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.trees.ContinuousRegionInfo;
+import org.apache.ignite.ml.trees.ContinuousSplitCalculator;
+import org.apache.ignite.ml.trees.RegionInfo;
+import org.apache.ignite.ml.trees.trainers.columnbased.RegionProjection;
+
+import static org.apache.ignite.ml.trees.trainers.columnbased.vectors.FeatureVectorProcessorUtils.splitByBitSet;
+
+/**
+ * Container of projection of samples on continuous feature.
+ *
+ * @param <D> Information about regions. Designed to contain information which will make computations of impurity
+ * optimal.
+ */
+public class ContinuousFeatureProcessor<D extends ContinuousRegionInfo> implements
+ FeatureProcessor<D, ContinuousSplitInfo<D>> {
+ /** ContinuousSplitCalculator used for calculating of best split of each region. */
+ private final ContinuousSplitCalculator<D> calc;
+
+ /**
+ * @param splitCalc Calculator used for calculating splits.
+ */
+ public ContinuousFeatureProcessor(ContinuousSplitCalculator<D> splitCalc) {
+ this.calc = splitCalc;
+ }
+
+ /** {@inheritDoc} */
+ @Override public SplitInfo<D> findBestSplit(RegionProjection<D> ri, double[] values, double[] labels, int regIdx) {
+ SplitInfo<D> res = calc.splitRegion(ri.sampleIndexes(), values, labels, regIdx, ri.data());
+
+ if (res == null)
+ return null;
+
+ double lWeight = (double)res.leftData.getSize() / ri.sampleIndexes().length;
+ double rWeight = (double)res.rightData.getSize() / ri.sampleIndexes().length;
+
+ double infoGain = ri.data().impurity() - lWeight * res.leftData().impurity() - rWeight * res.rightData().impurity();
+ res.setInfoGain(infoGain);
+
+ return res;
+ }
+
+ /** {@inheritDoc} */
+ @Override public RegionProjection<D> createInitialRegion(Integer[] samples, double[] values, double[] labels) {
+ Arrays.sort(samples, Comparator.comparingDouble(s -> values[s]));
+ return new RegionProjection<>(samples, calc.calculateRegionInfo(Arrays.stream(labels), samples.length), 0);
+ }
+
+ /** {@inheritDoc} */
+ @Override public SparseBitSet calculateOwnershipBitSet(RegionProjection<D> reg, double[] values,
+ ContinuousSplitInfo<D> s) {
+ SparseBitSet res = new SparseBitSet();
+
+ for (int i = 0; i < s.leftData().getSize(); i++)
+ res.set(reg.sampleIndexes()[i]);
+
+ return res;
+ }
+
+ /** {@inheritDoc} */
+ @Override public IgniteBiTuple<RegionProjection, RegionProjection> performSplit(SparseBitSet bs,
+ RegionProjection<D> reg, D leftData, D rightData) {
+ int lSize = leftData.getSize();
+ int rSize = rightData.getSize();
+ int depth = reg.depth();
+
+ IgniteBiTuple<Integer[], Integer[]> lrSamples = splitByBitSet(lSize, rSize, reg.sampleIndexes(), bs);
+
+ RegionProjection<D> left = new RegionProjection<>(lrSamples.get1(), leftData, depth + 1);
+ RegionProjection<D> right = new RegionProjection<>(lrSamples.get2(), rightData, depth + 1);
+
+ return new IgniteBiTuple<>(left, right);
+ }
+
+ /** {@inheritDoc} */
+ @Override public IgniteBiTuple<RegionProjection, RegionProjection> performSplitGeneric(SparseBitSet bs,
+ double[] labels, RegionProjection<D> reg, RegionInfo leftData, RegionInfo rightData) {
+ int lSize = bs.cardinality();
+ int rSize = reg.sampleIndexes().length - lSize;
+ int depth = reg.depth();
+
+ IgniteBiTuple<Integer[], Integer[]> lrSamples = splitByBitSet(lSize, rSize, reg.sampleIndexes(), bs);
+
+ D ld = calc.calculateRegionInfo(Arrays.stream(lrSamples.get1()).mapToDouble(s -> labels[s]), lSize);
+ D rd = calc.calculateRegionInfo(Arrays.stream(lrSamples.get2()).mapToDouble(s -> labels[s]), rSize);
+
+ return new IgniteBiTuple<>(new RegionProjection<>(lrSamples.get1(), ld, depth + 1), new RegionProjection<>(lrSamples.get2(), rd, depth + 1));
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/ContinuousSplitInfo.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/ContinuousSplitInfo.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/ContinuousSplitInfo.java
new file mode 100644
index 0000000..d6f2847
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/ContinuousSplitInfo.java
@@ -0,0 +1,54 @@
+package org.apache.ignite.ml.trees.trainers.columnbased.vectors;
+
+import org.apache.ignite.ml.trees.RegionInfo;
+import org.apache.ignite.ml.trees.nodes.ContinuousSplitNode;
+import org.apache.ignite.ml.trees.nodes.SplitNode;
+
+/**
+ * Information about split of continuous region.
+ *
+ * @param <D> Class encapsulating information about the region.
+ */
+public class ContinuousSplitInfo<D extends RegionInfo> extends SplitInfo<D> {
+ /**
+ * Threshold used for split.
+ * Samples with values less or equal than this go to left region, others go to the right region.
+ */
+ private final double threshold;
+
+ /**
+ * @param regionIdx Index of region being split.
+ * @param threshold Threshold used for split. Samples with values less or equal than this go to left region, others
+ * go to the right region.
+ * @param leftData Information about left subregion.
+ * @param rightData Information about right subregion.
+ */
+ public ContinuousSplitInfo(int regionIdx, double threshold, D leftData, D rightData) {
+ super(regionIdx, leftData, rightData);
+ this.threshold = threshold;
+ }
+
+ /** {@inheritDoc} */
+ @Override public SplitNode createSplitNode(int featureIdx) {
+ return new ContinuousSplitNode(threshold, featureIdx);
+ }
+
+ /**
+ * Threshold used for splits.
+ * Samples with values less or equal than this go to left region, others go to the right region.
+ */
+ public double threshold() {
+ return threshold;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return "ContinuousSplitInfo [" +
+ "threshold=" + threshold +
+ ", infoGain=" + infoGain +
+ ", regionIdx=" + regionIdx +
+ ", leftData=" + leftData +
+ ", rightData=" + rightData +
+ ']';
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/FeatureProcessor.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/FeatureProcessor.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/FeatureProcessor.java
new file mode 100644
index 0000000..cb8f5c2
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/FeatureProcessor.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased.vectors;
+
+import com.zaxxer.sparsebits.SparseBitSet;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.trees.RegionInfo;
+import org.apache.ignite.ml.trees.trainers.columnbased.RegionProjection;
+
+/**
+ * Base interface for feature processors used in {@see org.apache.ignite.ml.trees.trainers.columnbased.ColumnDecisionTreeTrainer}
+ *
+ * @param <D> Class representing data of regions resulted from split.
+ * @param <S> Class representing data of split.
+ */
+public interface FeatureProcessor<D extends RegionInfo, S extends SplitInfo<D>> {
+ /**
+ * Finds best split by this feature among all splits of all regions.
+ *
+ * @return best split by this feature among all splits of all regions.
+ */
+ SplitInfo findBestSplit(RegionProjection<D> regionPrj, double[] values, double[] labels, int regIdx);
+
+ /**
+ * Creates initial region from samples.
+ *
+ * @param samples samples.
+ * @return region.
+ */
+ RegionProjection<D> createInitialRegion(Integer[] samples, double[] values, double[] labels);
+
+ /**
+ * Calculates the bitset mapping each data point to left (corresponding bit is set) or right subregion.
+ *
+ * @param s data used for calculating the split.
+ * @return Bitset mapping each data point to left (corresponding bit is set) or right subregion.
+ */
+ SparseBitSet calculateOwnershipBitSet(RegionProjection<D> regionPrj, double[] values, S s);
+
+ /**
+ * Splits given region using bitset which maps data point to left or right subregion.
+ * This method is present for the vectors of the same type to be able to pass between them information about regions
+ * and therefore used iff the optimal split is received on feature of the same type.
+ *
+ * @param bs Bitset which maps data point to left or right subregion.
+ * @param leftData Data of the left subregion.
+ * @param rightData Data of the right subregion.
+ * @return This feature vector.
+ */
+ IgniteBiTuple<RegionProjection, RegionProjection> performSplit(SparseBitSet bs, RegionProjection<D> reg, D leftData,
+ D rightData);
+
+ /**
+ * Splits given region using bitset which maps data point to left or right subregion. This method is used iff the
+ * optimal split is received on feature of different type, therefore information about regions is limited to the
+ * {@see RegionInfo} class which is base for all classes used to represent region data.
+ *
+ * @param bs Bitset which maps data point to left or right subregion.
+ * @param leftData Data of the left subregion.
+ * @param rightData Data of the right subregion.
+ * @return This feature vector.
+ */
+ IgniteBiTuple<RegionProjection, RegionProjection> performSplitGeneric(SparseBitSet bs, double[] values,
+ RegionProjection<D> reg, RegionInfo leftData,
+ RegionInfo rightData);
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/FeatureVectorProcessorUtils.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/FeatureVectorProcessorUtils.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/FeatureVectorProcessorUtils.java
new file mode 100644
index 0000000..69ff019
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/FeatureVectorProcessorUtils.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased.vectors;
+
+import com.zaxxer.sparsebits.SparseBitSet;
+import org.apache.ignite.lang.IgniteBiTuple;
+
+/** Utility class for feature vector processors. */
+public class FeatureVectorProcessorUtils {
+ /**
+ * Split target array into two (left and right) arrays by bitset.
+ *
+ * @param lSize Left array size;
+ * @param rSize Right array size.
+ * @param samples Arrays to split size.
+ * @param bs Bitset specifying split.
+ * @return BiTuple containing result of split.
+ */
+ public static IgniteBiTuple<Integer[], Integer[]> splitByBitSet(int lSize, int rSize, Integer[] samples,
+ SparseBitSet bs) {
+ Integer[] lArr = new Integer[lSize];
+ Integer[] rArr = new Integer[rSize];
+
+ int lc = 0;
+ int rc = 0;
+
+ for (int i = 0; i < lSize + rSize; i++) {
+ int si = samples[i];
+
+ if (bs.get(si)) {
+ lArr[lc] = si;
+ lc++;
+ }
+ else {
+ rArr[rc] = si;
+ rc++;
+ }
+ }
+
+ return new IgniteBiTuple<>(lArr, rArr);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/SampleInfo.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/SampleInfo.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/SampleInfo.java
new file mode 100644
index 0000000..8aa4f79
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/SampleInfo.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased.vectors;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+
+/**
+ * Information about given sample within given fixed feature.
+ */
+public class SampleInfo implements Externalizable {
+ /** Value of projection of this sample on given fixed feature. */
+ private double val;
+
+ /** Sample index. */
+ private int sampleIdx;
+
+ /**
+ * @param val Value of projection of this sample on given fixed feature.
+ * @param sampleIdx Sample index.
+ */
+ public SampleInfo(double val, int sampleIdx) {
+ this.val = val;
+ this.sampleIdx = sampleIdx;
+ }
+
+ /**
+ * No-op constructor used for serialization/deserialization.
+ */
+ public SampleInfo() {
+ // No-op.
+ }
+
+ /**
+ * Get the value of projection of this sample on given fixed feature.
+ *
+ * @return Value of projection of this sample on given fixed feature.
+ */
+ public double val() {
+ return val;
+ }
+
+ /**
+ * Get the sample index.
+ *
+ * @return Sample index.
+ */
+ public int sampleInd() {
+ return sampleIdx;
+ }
+
+ /** {@inheritDoc} */
+ @Override public void writeExternal(ObjectOutput out) throws IOException {
+ out.writeDouble(val);
+ out.writeInt(sampleIdx);
+ }
+
+ /** {@inheritDoc} */
+ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
+ val = in.readDouble();
+ sampleIdx = in.readInt();
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/SplitInfo.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/SplitInfo.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/SplitInfo.java
new file mode 100644
index 0000000..124e82f
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/SplitInfo.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased.vectors;
+
+import org.apache.ignite.ml.trees.RegionInfo;
+import org.apache.ignite.ml.trees.nodes.SplitNode;
+
+/**
+ * Class encapsulating information about the split.
+ *
+ * @param <D> Class representing information of left and right subregions.
+ */
+public abstract class SplitInfo<D extends RegionInfo> {
+ /** Information gain of this split. */
+ protected double infoGain;
+
+ /** Index of the region to split. */
+ protected final int regionIdx;
+
+ /** Data of left subregion. */
+ protected final D leftData;
+
+ /** Data of right subregion. */
+ protected final D rightData;
+
+ /**
+ * Construct the split info.
+ *
+ * @param regionIdx Index of the region to split.
+ * @param leftData Data of left subregion.
+ * @param rightData Data of right subregion.
+ */
+ public SplitInfo(int regionIdx, D leftData, D rightData) {
+ this.regionIdx = regionIdx;
+ this.leftData = leftData;
+ this.rightData = rightData;
+ }
+
+ /**
+ * Index of region to split.
+ *
+ * @return Index of region to split.
+ */
+ public int regionIndex() {
+ return regionIdx;
+ }
+
+ /**
+ * Information gain of the split.
+ *
+ * @return Information gain of the split.
+ */
+ public double infoGain() {
+ return infoGain;
+ }
+
+ /**
+ * Data of right subregion.
+ *
+ * @return Data of right subregion.
+ */
+ public D rightData() {
+ return rightData;
+ }
+
+ /**
+ * Data of left subregion.
+ *
+ * @return Data of left subregion.
+ */
+ public D leftData() {
+ return leftData;
+ }
+
+ /**
+ * Create SplitNode from this split info.
+ *
+ * @param featureIdx Index of feature by which goes split.
+ * @return SplitNode from this split info.
+ */
+ public abstract SplitNode createSplitNode(int featureIdx);
+
+ /**
+ * Set information gain.
+ *
+ * @param infoGain Information gain.
+ */
+ public void setInfoGain(double infoGain) {
+ this.infoGain = infoGain;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/package-info.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/package-info.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/package-info.java
new file mode 100644
index 0000000..0dea204
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains feature containers needed by column based decision tree trainers.
+ */
+package org.apache.ignite.ml.trees.trainers.columnbased.vectors;
\ No newline at end of file
[04/28] ignite git commit: IGNITE-6848: SQL parser: support DROP
INDEX command. This closes #3006.
Posted by sb...@apache.org.
IGNITE-6848: SQL parser: support DROP INDEX command. This closes #3006.
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/a1b6a33f
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/a1b6a33f
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/a1b6a33f
Branch: refs/heads/ignite-zk
Commit: a1b6a33ff16b927e9c5b232a70af20f03f9b36c0
Parents: 145c59d
Author: devozerov <vo...@gridgain.com>
Authored: Thu Nov 9 14:37:54 2017 +0300
Committer: devozerov <vo...@gridgain.com>
Committed: Thu Nov 9 14:37:54 2017 +0300
----------------------------------------------------------------------
.../apache/ignite/internal/sql/SqlLexer.java | 6 +
.../apache/ignite/internal/sql/SqlParser.java | 25 ++-
.../ignite/internal/sql/SqlParserUtils.java | 9 +-
.../sql/command/SqlDropIndexCommand.java | 80 ++++++++
.../internal/sql/SqlParserAbstractSelfTest.java | 46 +++++
.../sql/SqlParserCreateIndexSelfTest.java | 178 +++++++++++++++++
.../sql/SqlParserDropIndexSelfTest.java | 99 ++++++++++
.../ignite/internal/sql/SqlParserSelfTest.java | 198 -------------------
.../processors/query/h2/IgniteH2Indexing.java | 5 +-
.../query/h2/ddl/DdlStatementsProcessor.java | 20 +-
.../IgniteCacheQuerySelfTestSuite.java | 6 +-
11 files changed, 452 insertions(+), 220 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/a1b6a33f/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexer.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexer.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexer.java
index a8009b7..3fd6fa9 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexer.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexer.java
@@ -176,6 +176,10 @@ public class SqlLexer implements SqlLexerToken {
}
}
+ token = null;
+ tokenPos = pos;
+ tokenTyp = SqlLexerTokenType.EOF;
+
return false;
}
@@ -191,6 +195,8 @@ public class SqlLexer implements SqlLexerToken {
/** {@inheritDoc} */
public char tokenFirstChar() {
+ assert tokenTyp != SqlLexerTokenType.EOF;
+
return token.charAt(0);
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/a1b6a33f/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParser.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParser.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParser.java
index 9e0eee0..19f526d 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParser.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParser.java
@@ -19,6 +19,7 @@ package org.apache.ignite.internal.sql;
import org.apache.ignite.internal.sql.command.SqlCommand;
import org.apache.ignite.internal.sql.command.SqlCreateIndexCommand;
+import org.apache.ignite.internal.sql.command.SqlDropIndexCommand;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.internal.sql.SqlKeyword.CREATE;
@@ -27,10 +28,8 @@ import static org.apache.ignite.internal.sql.SqlKeyword.HASH;
import static org.apache.ignite.internal.sql.SqlKeyword.INDEX;
import static org.apache.ignite.internal.sql.SqlKeyword.PRIMARY;
import static org.apache.ignite.internal.sql.SqlKeyword.SPATIAL;
-import static org.apache.ignite.internal.sql.SqlKeyword.TABLE;
import static org.apache.ignite.internal.sql.SqlKeyword.UNIQUE;
import static org.apache.ignite.internal.sql.SqlParserUtils.errorUnexpectedToken;
-import static org.apache.ignite.internal.sql.SqlParserUtils.errorUnsupported;
import static org.apache.ignite.internal.sql.SqlParserUtils.errorUnsupportedIfMatchesKeyword;
import static org.apache.ignite.internal.sql.SqlParserUtils.matchesKeyword;
@@ -139,9 +138,6 @@ public class SqlParser {
break;
- case TABLE:
- throw errorUnsupported(lex);
-
case SPATIAL:
if (lex.shift() && matchesKeyword(lex, INDEX))
cmd = new SqlCreateIndexCommand().spatial(true);
@@ -157,7 +153,7 @@ public class SqlParser {
errorUnsupportedIfMatchesKeyword(lex, HASH, PRIMARY, UNIQUE);
}
- throw errorUnexpectedToken(lex, INDEX, TABLE, SPATIAL);
+ throw errorUnexpectedToken(lex, INDEX, SPATIAL);
}
/**
@@ -166,9 +162,20 @@ public class SqlParser {
* @return Command.
*/
private SqlCommand processDrop() {
- if (lex.shift() && lex.tokenType() == SqlLexerTokenType.DEFAULT)
- throw errorUnsupported(lex);
+ if (lex.shift() && lex.tokenType() == SqlLexerTokenType.DEFAULT) {
+ SqlCommand cmd = null;
+
+ switch (lex.token()) {
+ case INDEX:
+ cmd = new SqlDropIndexCommand();
+
+ break;
+ }
+
+ if (cmd != null)
+ return cmd.parse(lex);
+ }
- throw errorUnexpectedToken(lex, INDEX, TABLE);
+ throw errorUnexpectedToken(lex, INDEX);
}
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/a1b6a33f/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParserUtils.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParserUtils.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParserUtils.java
index cfe4b6f..2f3b3da 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParserUtils.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParserUtils.java
@@ -163,14 +163,7 @@ public class SqlParserUtils {
case DEFAULT:
char c = token.tokenFirstChar();
- if ((c >= 'A' && c <= 'Z') || c == '_') {
- if (SqlKeyword.isKeyword(token.token()))
- throw errorUnexpectedToken(token, "[identifier]");
-
- return true;
- }
-
- throw error(token, "Illegal identifier name: " + token.token());
+ return ((c >= 'A' && c <= 'Z') || c == '_') && !SqlKeyword.isKeyword(token.token());
case QUOTED:
return true;
http://git-wip-us.apache.org/repos/asf/ignite/blob/a1b6a33f/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlDropIndexCommand.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlDropIndexCommand.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlDropIndexCommand.java
new file mode 100644
index 0000000..1a1ea87
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlDropIndexCommand.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql.command;
+
+import org.apache.ignite.internal.sql.SqlLexer;
+import org.apache.ignite.internal.util.typedef.internal.S;
+
+import static org.apache.ignite.internal.sql.SqlKeyword.IF;
+import static org.apache.ignite.internal.sql.SqlParserUtils.parseIfExists;
+import static org.apache.ignite.internal.sql.SqlParserUtils.parseQualifiedIdentifier;
+
+/**
+ * DROP INDEX command.
+ */
+public class SqlDropIndexCommand implements SqlCommand {
+ /** Schema name. */
+ private String schemaName;
+
+ /** Index name. */
+ private String idxName;
+
+ /** IF EXISTS flag. */
+ private boolean ifExists;
+
+ /** {@inheritDoc} */
+ @Override public String schemaName() {
+ return schemaName;
+ }
+
+ /** {@inheritDoc} */
+ @Override public void schemaName(String schemaName) {
+ this.schemaName = schemaName;
+ }
+
+ /**
+ * @return Index name.
+ */
+ public String indexName() {
+ return idxName;
+ }
+
+ /**
+ * @return IF EXISTS flag.
+ */
+ public boolean ifExists() {
+ return ifExists;
+ }
+
+ /** {@inheritDoc} */
+ @Override public SqlCommand parse(SqlLexer lex) {
+ ifExists = parseIfExists(lex);
+
+ SqlQualifiedName idxQName = parseQualifiedIdentifier(lex, IF);
+
+ schemaName = idxQName.schemaName();
+ idxName = idxQName.name();
+
+ return this;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return S.toString(SqlDropIndexCommand.class, this);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/a1b6a33f/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserAbstractSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserAbstractSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserAbstractSelfTest.java
new file mode 100644
index 0000000..c095201
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserAbstractSelfTest.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql;
+
+import org.apache.ignite.testframework.GridTestUtils;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+
+import java.util.concurrent.Callable;
+
+/**
+ * Common class for SQL parser tests.
+ */
+@SuppressWarnings("ThrowableNotThrown")
+public abstract class SqlParserAbstractSelfTest extends GridCommonAbstractTest {
+ /**
+ * Make sure that parse error occurs.
+ *
+ * @param schema Schema.
+ * @param sql SQL.
+ * @param msg Expected error message.
+ */
+ protected static void assertParseError(final String schema, final String sql, String msg) {
+ GridTestUtils.assertThrows(null, new Callable<Void>() {
+ @Override public Void call() throws Exception {
+ new SqlParser(schema, sql).nextCommand();
+
+ return null;
+ }
+ }, SqlParseException.class, msg);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/a1b6a33f/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserCreateIndexSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserCreateIndexSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserCreateIndexSelfTest.java
new file mode 100644
index 0000000..5de0a3a
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserCreateIndexSelfTest.java
@@ -0,0 +1,178 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql;
+
+import org.apache.ignite.internal.sql.command.SqlCreateIndexCommand;
+import org.apache.ignite.internal.sql.command.SqlIndexColumn;
+import org.apache.ignite.internal.util.typedef.F;
+
+import java.util.Collection;
+import java.util.Iterator;
+
+/**
+ * Tests for SQL parser: CREATE INDEX.
+ */
+@SuppressWarnings({"UnusedReturnValue", "ThrowableNotThrown"})
+public class SqlParserCreateIndexSelfTest extends SqlParserAbstractSelfTest {
+ /**
+ * Tests for CREATE INDEX command.
+ *
+ * @throws Exception If failed.
+ */
+ public void testCreateIndex() throws Exception {
+ // Base.
+ parseValidate(null, "CREATE INDEX idx ON tbl(a)", null, "TBL", "IDX", "A", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a ASC)", null, "TBL", "IDX", "A", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a DESC)", null, "TBL", "IDX", "A", true);
+
+ // Case (in)sensitivity.
+ parseValidate(null, "CREATE INDEX IDX ON TBL(COL)", null, "TBL", "IDX", "COL", false);
+ parseValidate(null, "CREATE INDEX iDx ON tBl(cOl)", null, "TBL", "IDX", "COL", false);
+
+ parseValidate(null, "CREATE INDEX \"idx\" ON tbl(col)", null, "TBL", "idx", "COL", false);
+ parseValidate(null, "CREATE INDEX \"iDx\" ON tbl(col)", null, "TBL", "iDx", "COL", false);
+
+ parseValidate(null, "CREATE INDEX idx ON \"tbl\"(col)", null, "tbl", "IDX", "COL", false);
+ parseValidate(null, "CREATE INDEX idx ON \"tBl\"(col)", null, "tBl", "IDX", "COL", false);
+
+ parseValidate(null, "CREATE INDEX idx ON tbl(\"col\")", null, "TBL", "IDX", "col", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(\"cOl\")", null, "TBL", "IDX", "cOl", false);
+
+ parseValidate(null, "CREATE INDEX idx ON tbl(\"cOl\" ASC)", null, "TBL", "IDX", "cOl", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(\"cOl\" DESC)", null, "TBL", "IDX", "cOl", true);
+
+ // Columns.
+ parseValidate(null, "CREATE INDEX idx ON tbl(a, b)", null, "TBL", "IDX", "A", false, "B", false);
+
+ parseValidate(null, "CREATE INDEX idx ON tbl(a ASC, b)", null, "TBL", "IDX", "A", false, "B", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a, b ASC)", null, "TBL", "IDX", "A", false, "B", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a ASC, b ASC)", null, "TBL", "IDX", "A", false, "B", false);
+
+ parseValidate(null, "CREATE INDEX idx ON tbl(a DESC, b)", null, "TBL", "IDX", "A", true, "B", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a, b DESC)", null, "TBL", "IDX", "A", false, "B", true);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a DESC, b DESC)", null, "TBL", "IDX", "A", true, "B", true);
+
+ parseValidate(null, "CREATE INDEX idx ON tbl(a ASC, b DESC)", null, "TBL", "IDX", "A", false, "B", true);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a DESC, b ASC)", null, "TBL", "IDX", "A", true, "B", false);
+
+ parseValidate(null, "CREATE INDEX idx ON tbl(a, b, c)", null, "TBL", "IDX", "A", false, "B", false, "C", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a DESC, b, c)", null, "TBL", "IDX", "A", true, "B", false, "C", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a, b DESC, c)", null, "TBL", "IDX", "A", false, "B", true, "C", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a, b, c DESC)", null, "TBL", "IDX", "A", false, "B", false, "C", true);
+
+ // Negative cases.
+ assertParseError(null, "CREATE INDEX idx ON tbl()", "Unexpected token");
+ assertParseError(null, "CREATE INDEX idx ON tbl(a, a)", "Column already defined: A");
+ assertParseError(null, "CREATE INDEX idx ON tbl(a, b, a)", "Column already defined: A");
+ assertParseError(null, "CREATE INDEX idx ON tbl(b, a, a)", "Column already defined: A");
+
+ // Tests with schema.
+ parseValidate(null, "CREATE INDEX idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
+ parseValidate(null, "CREATE INDEX idx ON \"schema\".tbl(a)", "schema", "TBL", "IDX", "A", false);
+ parseValidate(null, "CREATE INDEX idx ON \"sChema\".tbl(a)", "sChema", "TBL", "IDX", "A", false);
+
+ parseValidate("SCHEMA", "CREATE INDEX idx ON tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
+ parseValidate("schema", "CREATE INDEX idx ON tbl(a)", "schema", "TBL", "IDX", "A", false);
+ parseValidate("sChema", "CREATE INDEX idx ON tbl(a)", "sChema", "TBL", "IDX", "A", false);
+
+ // NOT EXISTS
+ SqlCreateIndexCommand cmd;
+
+ cmd = parseValidate(null, "CREATE INDEX idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
+ assertFalse(cmd.ifNotExists());
+
+ cmd = parseValidate(null, "CREATE INDEX IF NOT EXISTS idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
+ assertTrue(cmd.ifNotExists());
+
+ assertParseError(null, "CREATE INDEX IF idx ON tbl(a)", "Unexpected token: \"IDX\"");
+ assertParseError(null, "CREATE INDEX IF NOT idx ON tbl(a)", "Unexpected token: \"IDX\"");
+ assertParseError(null, "CREATE INDEX IF EXISTS idx ON tbl(a)", "Unexpected token: \"EXISTS\"");
+ assertParseError(null, "CREATE INDEX NOT EXISTS idx ON tbl(a)", "Unexpected token: \"NOT\"");
+
+ // SPATIAL
+ cmd = parseValidate(null, "CREATE INDEX idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
+ assertFalse(cmd.spatial());
+
+ cmd = parseValidate(null, "CREATE SPATIAL INDEX idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
+ assertTrue(cmd.spatial());
+
+ // UNIQUE
+ assertParseError(null, "CREATE UNIQUE INDEX idx ON tbl(a)", "Unsupported keyword: \"UNIQUE\"");
+
+ // HASH
+ assertParseError(null, "CREATE HASH INDEX idx ON tbl(a)", "Unsupported keyword: \"HASH\"");
+
+ // PRIMARY KEY
+ assertParseError(null, "CREATE PRIMARY KEY INDEX idx ON tbl(a)", "Unsupported keyword: \"PRIMARY\"");
+ }
+
+ /**
+ * Parse and validate SQL script.
+ *
+ * @param schema Schema.
+ * @param sql SQL.
+ * @param expSchemaName Expected schema name.
+ * @param expTblName Expected table name.
+ * @param expIdxName Expected index name.
+ * @param expColDefs Expected column definitions.
+ * @return Command.
+ */
+ private static SqlCreateIndexCommand parseValidate(String schema, String sql, String expSchemaName,
+ String expTblName, String expIdxName, Object... expColDefs) {
+ SqlCreateIndexCommand cmd = (SqlCreateIndexCommand)new SqlParser(schema, sql).nextCommand();
+
+ validate(cmd, expSchemaName, expTblName, expIdxName, expColDefs);
+
+ return cmd;
+ }
+
+ /**
+ * Validate create index command.
+ *
+ * @param cmd Command.
+ * @param expSchemaName Expected schema name.
+ * @param expTblName Expected table name.
+ * @param expIdxName Expected index name.
+ * @param expColDefs Expected column definitions.
+ */
+ private static void validate(SqlCreateIndexCommand cmd, String expSchemaName, String expTblName, String expIdxName,
+ Object... expColDefs) {
+ assertEquals(expSchemaName, cmd.schemaName());
+ assertEquals(expTblName, cmd.tableName());
+ assertEquals(expIdxName, cmd.indexName());
+
+ if (F.isEmpty(expColDefs) || expColDefs.length % 2 == 1)
+ throw new IllegalArgumentException("Column definitions must be even.");
+
+ Collection<SqlIndexColumn> cols = cmd.columns();
+
+ assertEquals(expColDefs.length / 2, cols.size());
+
+ Iterator<SqlIndexColumn> colIter = cols.iterator();
+
+ for (int i = 0; i < expColDefs.length;) {
+ SqlIndexColumn col = colIter.next();
+
+ String expColName = (String)expColDefs[i++];
+ Boolean expDesc = (Boolean) expColDefs[i++];
+
+ assertEquals(expColName, col.name());
+ assertEquals(expDesc, (Boolean)col.descending());
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/a1b6a33f/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserDropIndexSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserDropIndexSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserDropIndexSelfTest.java
new file mode 100644
index 0000000..a0af3a6
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserDropIndexSelfTest.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql;
+
+import org.apache.ignite.internal.sql.command.SqlDropIndexCommand;
+
+/**
+ * Tests for SQL parser: CREATE INDEX.
+ */
+public class SqlParserDropIndexSelfTest extends SqlParserAbstractSelfTest {
+ /**
+ * Tests for DROP INDEX command.
+ *
+ * @throws Exception If failed.
+ */
+ public void testDropIndex() throws Exception {
+ // Base.
+ parseValidate(null, "DROP INDEX idx", null, "IDX");
+ parseValidate(null, "DROP INDEX IDX", null, "IDX");
+ parseValidate(null, "DROP INDEX iDx", null, "IDX");
+
+ parseValidate(null, "DROP INDEX \"idx\"", null, "idx");
+ parseValidate(null, "DROP INDEX \"IDX\"", null, "IDX");
+ parseValidate(null, "DROP INDEX \"iDx\"", null, "iDx");
+
+ assertParseError(null, "DROP INDEX", "Unexpected");
+
+ // Schema.
+ parseValidate("SCHEMA", "DROP INDEX idx", "SCHEMA", "IDX");
+ parseValidate("schema", "DROP INDEX idx", "schema", "IDX");
+ parseValidate("sChema", "DROP INDEX idx", "sChema", "IDX");
+
+ parseValidate(null, "DROP INDEX \"SCHEMA\".idx", "SCHEMA", "IDX");
+ parseValidate(null, "DROP INDEX \"schema\".idx", "schema", "IDX");
+ parseValidate(null, "DROP INDEX \"sChema\".idx", "sChema", "IDX");
+
+ parseValidate(null, "DROP INDEX \"schema\".\"idx\"", "schema", "idx");
+
+ assertParseError(null, "DROP INDEX .idx", "Unexpected");
+
+ // IF EXISTS
+ SqlDropIndexCommand cmd;
+
+ cmd = parseValidate(null, "DROP INDEX schema.idx", "SCHEMA", "IDX");
+ assertFalse(cmd.ifExists());
+
+ cmd = parseValidate(null, "DROP INDEX IF EXISTS schema.idx", "SCHEMA", "IDX");
+ assertTrue(cmd.ifExists());
+
+ assertParseError(null, "DROP INDEX IF idx", "Unexpected token: \"IDX\"");
+
+ assertParseError(null, "DROP INDEX EXISTS idx", "Unexpected token: \"EXISTS\"");
+ }
+
+ /**
+ * Parse and validate SQL script.
+ *
+ * @param schema Schema.
+ * @param sql SQL.
+ * @param expSchemaName Expected schema name.
+ * @param expIdxName Expected index name.
+ * @return Command.
+ */
+ private static SqlDropIndexCommand parseValidate(String schema, String sql, String expSchemaName,
+ String expIdxName) {
+ SqlDropIndexCommand cmd = (SqlDropIndexCommand)new SqlParser(schema, sql).nextCommand();
+
+ validate(cmd, expSchemaName, expIdxName);
+
+ return cmd;
+ }
+
+ /**
+ * Validate command.
+ *
+ * @param cmd Command.
+ * @param expSchemaName Expected schema name.
+ * @param expIdxName Expected index name.
+ */
+ private static void validate(SqlDropIndexCommand cmd, String expSchemaName, String expIdxName) {
+ assertEquals(expSchemaName, cmd.schemaName());
+ assertEquals(expIdxName, cmd.indexName());
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/a1b6a33f/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserSelfTest.java
deleted file mode 100644
index 98a6aae..0000000
--- a/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserSelfTest.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.sql;
-
-import org.apache.ignite.internal.sql.command.SqlCreateIndexCommand;
-import org.apache.ignite.internal.sql.command.SqlIndexColumn;
-import org.apache.ignite.internal.util.typedef.F;
-import org.apache.ignite.testframework.GridTestUtils;
-import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
-
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.concurrent.Callable;
-
-/**
- * Test for parser.
- */
-@SuppressWarnings({"UnusedReturnValue", "ThrowableNotThrown"})
-public class SqlParserSelfTest extends GridCommonAbstractTest {
- /**
- * Tests for CREATE INDEX command.
- *
- * @throws Exception If failed.
- */
- public void testCreateIndex() throws Exception {
- // Base.
- parseValidate(null, "CREATE INDEX idx ON tbl(a)", null, "TBL", "IDX", "A", false);
- parseValidate(null, "CREATE INDEX idx ON tbl(a ASC)", null, "TBL", "IDX", "A", false);
- parseValidate(null, "CREATE INDEX idx ON tbl(a DESC)", null, "TBL", "IDX", "A", true);
-
- // Case (in)sensitivity.
- parseValidate(null, "CREATE INDEX IDX ON TBL(COL)", null, "TBL", "IDX", "COL", false);
- parseValidate(null, "CREATE INDEX iDx ON tBl(cOl)", null, "TBL", "IDX", "COL", false);
-
- parseValidate(null, "CREATE INDEX \"idx\" ON tbl(col)", null, "TBL", "idx", "COL", false);
- parseValidate(null, "CREATE INDEX \"iDx\" ON tbl(col)", null, "TBL", "iDx", "COL", false);
-
- parseValidate(null, "CREATE INDEX idx ON \"tbl\"(col)", null, "tbl", "IDX", "COL", false);
- parseValidate(null, "CREATE INDEX idx ON \"tBl\"(col)", null, "tBl", "IDX", "COL", false);
-
- parseValidate(null, "CREATE INDEX idx ON tbl(\"col\")", null, "TBL", "IDX", "col", false);
- parseValidate(null, "CREATE INDEX idx ON tbl(\"cOl\")", null, "TBL", "IDX", "cOl", false);
-
- parseValidate(null, "CREATE INDEX idx ON tbl(\"cOl\" ASC)", null, "TBL", "IDX", "cOl", false);
- parseValidate(null, "CREATE INDEX idx ON tbl(\"cOl\" DESC)", null, "TBL", "IDX", "cOl", true);
-
- // Columns.
- parseValidate(null, "CREATE INDEX idx ON tbl(a, b)", null, "TBL", "IDX", "A", false, "B", false);
-
- parseValidate(null, "CREATE INDEX idx ON tbl(a ASC, b)", null, "TBL", "IDX", "A", false, "B", false);
- parseValidate(null, "CREATE INDEX idx ON tbl(a, b ASC)", null, "TBL", "IDX", "A", false, "B", false);
- parseValidate(null, "CREATE INDEX idx ON tbl(a ASC, b ASC)", null, "TBL", "IDX", "A", false, "B", false);
-
- parseValidate(null, "CREATE INDEX idx ON tbl(a DESC, b)", null, "TBL", "IDX", "A", true, "B", false);
- parseValidate(null, "CREATE INDEX idx ON tbl(a, b DESC)", null, "TBL", "IDX", "A", false, "B", true);
- parseValidate(null, "CREATE INDEX idx ON tbl(a DESC, b DESC)", null, "TBL", "IDX", "A", true, "B", true);
-
- parseValidate(null, "CREATE INDEX idx ON tbl(a ASC, b DESC)", null, "TBL", "IDX", "A", false, "B", true);
- parseValidate(null, "CREATE INDEX idx ON tbl(a DESC, b ASC)", null, "TBL", "IDX", "A", true, "B", false);
-
- parseValidate(null, "CREATE INDEX idx ON tbl(a, b, c)", null, "TBL", "IDX", "A", false, "B", false, "C", false);
- parseValidate(null, "CREATE INDEX idx ON tbl(a DESC, b, c)", null, "TBL", "IDX", "A", true, "B", false, "C", false);
- parseValidate(null, "CREATE INDEX idx ON tbl(a, b DESC, c)", null, "TBL", "IDX", "A", false, "B", true, "C", false);
- parseValidate(null, "CREATE INDEX idx ON tbl(a, b, c DESC)", null, "TBL", "IDX", "A", false, "B", false, "C", true);
-
- // Negative cases.
- parseError(null, "CREATE INDEX idx ON tbl()", "Unexpected token");
- parseError(null, "CREATE INDEX idx ON tbl(a, a)", "Column already defined: A");
- parseError(null, "CREATE INDEX idx ON tbl(a, b, a)", "Column already defined: A");
- parseError(null, "CREATE INDEX idx ON tbl(b, a, a)", "Column already defined: A");
-
- // Tests with schema.
- parseValidate(null, "CREATE INDEX idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
- parseValidate(null, "CREATE INDEX idx ON \"schema\".tbl(a)", "schema", "TBL", "IDX", "A", false);
- parseValidate(null, "CREATE INDEX idx ON \"sChema\".tbl(a)", "sChema", "TBL", "IDX", "A", false);
-
- parseValidate("SCHEMA", "CREATE INDEX idx ON tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
- parseValidate("schema", "CREATE INDEX idx ON tbl(a)", "schema", "TBL", "IDX", "A", false);
- parseValidate("sChema", "CREATE INDEX idx ON tbl(a)", "sChema", "TBL", "IDX", "A", false);
-
- // NOT EXISTS
- SqlCreateIndexCommand cmd;
-
- cmd = parseValidate(null, "CREATE INDEX idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
- assertFalse(cmd.ifNotExists());
-
- cmd = parseValidate(null, "CREATE INDEX IF NOT EXISTS idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
- assertTrue(cmd.ifNotExists());
-
- parseError(null, "CREATE INDEX IF idx ON tbl(a)", "Unexpected token: \"IDX\"");
- parseError(null, "CREATE INDEX IF NOT idx ON tbl(a)", "Unexpected token: \"IDX\"");
- parseError(null, "CREATE INDEX IF EXISTS idx ON tbl(a)", "Unexpected token: \"EXISTS\"");
- parseError(null, "CREATE INDEX NOT EXISTS idx ON tbl(a)", "Unexpected token: \"NOT\"");
-
- // SPATIAL
- cmd = parseValidate(null, "CREATE INDEX idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
- assertFalse(cmd.spatial());
-
- cmd = parseValidate(null, "CREATE SPATIAL INDEX idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
- assertTrue(cmd.spatial());
-
- // UNIQUE
- parseError(null, "CREATE UNIQUE INDEX idx ON tbl(a)", "Unsupported keyword: \"UNIQUE\"");
-
- // HASH
- parseError(null, "CREATE HASH INDEX idx ON tbl(a)", "Unsupported keyword: \"HASH\"");
-
- // PRIMARY KEY
- parseError(null, "CREATE PRIMARY KEY INDEX idx ON tbl(a)", "Unsupported keyword: \"PRIMARY\"");
- }
-
- /**
- * Make sure that parse error occurs.
- *
- * @param schema Schema.
- * @param sql SQL.
- * @param msg Expected error message.
- */
- private static void parseError(final String schema, final String sql, String msg) {
- GridTestUtils.assertThrows(null, new Callable<Void>() {
- @Override public Void call() throws Exception {
- new SqlParser(schema, sql).nextCommand();
-
- return null;
- }
- }, SqlParseException.class, msg);
- }
-
- /**
- * Parse and validate SQL script.
- *
- * @param schema Schema.
- * @param sql SQL.
- * @param expSchemaName Expected schema name.
- * @param expTblName Expected table name.
- * @param expIdxName Expected index name.
- * @param expColDefs Expected column definitions.
- * @return Command.
- */
- private static SqlCreateIndexCommand parseValidate(String schema, String sql, String expSchemaName,
- String expTblName, String expIdxName, Object... expColDefs) {
- SqlCreateIndexCommand cmd = (SqlCreateIndexCommand)new SqlParser(schema, sql).nextCommand();
-
- validate(cmd, expSchemaName, expTblName, expIdxName, expColDefs);
-
- return cmd;
- }
-
- /**
- * Validate create index command.
- *
- * @param cmd Command.
- * @param expSchemaName Expected schema name.
- * @param expTblName Expected table name.
- * @param expIdxName Expected index name.
- * @param expColDefs Expected column definitions.
- */
- private static void validate(SqlCreateIndexCommand cmd, String expSchemaName, String expTblName, String expIdxName,
- Object... expColDefs) {
- assertEquals(expSchemaName, cmd.schemaName());
- assertEquals(expTblName, cmd.tableName());
- assertEquals(expIdxName, cmd.indexName());
-
- if (F.isEmpty(expColDefs) || expColDefs.length % 2 == 1)
- throw new IllegalArgumentException("Column definitions must be even.");
-
- Collection<SqlIndexColumn> cols = cmd.columns();
-
- assertEquals(expColDefs.length / 2, cols.size());
-
- Iterator<SqlIndexColumn> colIter = cols.iterator();
-
- for (int i = 0; i < expColDefs.length;) {
- SqlIndexColumn col = colIter.next();
-
- String expColName = (String)expColDefs[i++];
- Boolean expDesc = (Boolean) expColDefs[i++];
-
- assertEquals(expColName, col.name());
- assertEquals(expDesc, (Boolean)col.descending());
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/ignite/blob/a1b6a33f/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/IgniteH2Indexing.java
----------------------------------------------------------------------
diff --git a/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/IgniteH2Indexing.java b/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/IgniteH2Indexing.java
index 884752d..52185f4 100644
--- a/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/IgniteH2Indexing.java
+++ b/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/IgniteH2Indexing.java
@@ -118,6 +118,7 @@ import org.apache.ignite.internal.processors.timeout.GridTimeoutProcessor;
import org.apache.ignite.internal.sql.SqlParser;
import org.apache.ignite.internal.sql.command.SqlCommand;
import org.apache.ignite.internal.sql.command.SqlCreateIndexCommand;
+import org.apache.ignite.internal.sql.command.SqlDropIndexCommand;
import org.apache.ignite.internal.util.GridBoundedConcurrentLinkedHashMap;
import org.apache.ignite.internal.util.GridEmptyCloseableIterator;
import org.apache.ignite.internal.util.GridSpinBusyLock;
@@ -1348,8 +1349,8 @@ public class IgniteH2Indexing implements GridQueryIndexing {
if (parser.nextCommand() != null)
return null;
- // Only CREATE INDEX is supported for now.
- if (!(cmd instanceof SqlCreateIndexCommand))
+ // Only CREATE/DROP INDEX is supported for now.
+ if (!(cmd instanceof SqlCreateIndexCommand || cmd instanceof SqlDropIndexCommand))
return null;
}
catch (Exception e) {
http://git-wip-us.apache.org/repos/asf/ignite/blob/a1b6a33f/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/ddl/DdlStatementsProcessor.java
----------------------------------------------------------------------
diff --git a/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/ddl/DdlStatementsProcessor.java b/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/ddl/DdlStatementsProcessor.java
index fd425c2..3c8d9fe 100644
--- a/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/ddl/DdlStatementsProcessor.java
+++ b/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/ddl/DdlStatementsProcessor.java
@@ -53,6 +53,7 @@ import org.apache.ignite.internal.processors.query.h2.sql.GridSqlStatement;
import org.apache.ignite.internal.processors.query.schema.SchemaOperationException;
import org.apache.ignite.internal.sql.command.SqlCommand;
import org.apache.ignite.internal.sql.command.SqlCreateIndexCommand;
+import org.apache.ignite.internal.sql.command.SqlDropIndexCommand;
import org.apache.ignite.internal.sql.command.SqlIndexColumn;
import org.apache.ignite.internal.util.future.GridFinishedFuture;
import org.apache.ignite.internal.util.typedef.F;
@@ -135,9 +136,26 @@ public class DdlStatementsProcessor {
newIdx.setFields(flds);
- fut = ctx.query().dynamicIndexCreate(tbl.cacheName(), cmd.schemaName(), typeDesc.tableName(),
+ fut = ctx.query().dynamicIndexCreate(tbl.cacheName(), cmd0.schemaName(), typeDesc.tableName(),
newIdx, cmd0.ifNotExists());
}
+ else if (cmd instanceof SqlDropIndexCommand) {
+ SqlDropIndexCommand cmd0 = (SqlDropIndexCommand)cmd;
+
+ GridH2Table tbl = idx.dataTableForIndex(cmd0.schemaName(), cmd0.indexName());
+
+ if (tbl != null) {
+ fut = ctx.query().dynamicIndexDrop(tbl.cacheName(), cmd0.schemaName(), cmd0.indexName(),
+ cmd0.ifExists());
+ }
+ else {
+ if (cmd0.ifExists())
+ fut = new GridFinishedFuture();
+ else
+ throw new SchemaOperationException(SchemaOperationException.CODE_INDEX_NOT_FOUND,
+ cmd0.indexName());
+ }
+ }
else
throw new IgniteSQLException("Unsupported DDL operation: " + sql,
IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
http://git-wip-us.apache.org/repos/asf/ignite/blob/a1b6a33f/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java b/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java
index 5339865..16fd5e0 100644
--- a/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java
+++ b/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java
@@ -154,7 +154,8 @@ import org.apache.ignite.internal.processors.query.h2.sql.GridQueryParsingTest;
import org.apache.ignite.internal.processors.query.h2.sql.H2CompareBigQueryDistributedJoinsTest;
import org.apache.ignite.internal.processors.query.h2.sql.H2CompareBigQueryTest;
import org.apache.ignite.internal.processors.sql.SqlConnectorConfigurationValidationSelfTest;
-import org.apache.ignite.internal.sql.SqlParserSelfTest;
+import org.apache.ignite.internal.sql.SqlParserCreateIndexSelfTest;
+import org.apache.ignite.internal.sql.SqlParserDropIndexSelfTest;
import org.apache.ignite.spi.communication.tcp.GridOrderedMessageCancelSelfTest;
import org.apache.ignite.testframework.IgniteTestSuite;
@@ -169,7 +170,8 @@ public class IgniteCacheQuerySelfTestSuite extends TestSuite {
public static TestSuite suite() throws Exception {
IgniteTestSuite suite = new IgniteTestSuite("Ignite Cache Queries Test Suite");
- suite.addTestSuite(SqlParserSelfTest.class);
+ suite.addTestSuite(SqlParserCreateIndexSelfTest.class);
+ suite.addTestSuite(SqlParserDropIndexSelfTest.class);
suite.addTestSuite(SqlConnectorConfigurationValidationSelfTest.class);
suite.addTestSuite(ClientConnectorConfigurationValidationSelfTest.class);
[15/28] ignite git commit: ignite-6669 Added eviction policy factory
to cache configuration.
Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/lru/LruEvictionPolicyFactorySelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/lru/LruEvictionPolicyFactorySelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/lru/LruEvictionPolicyFactorySelfTest.java
new file mode 100644
index 0000000..d53cb6f
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/lru/LruEvictionPolicyFactorySelfTest.java
@@ -0,0 +1,352 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.cache.eviction.lru;
+
+import javax.cache.configuration.Factory;
+import org.apache.ignite.cache.eviction.EvictableEntry;
+import org.apache.ignite.cache.eviction.lru.LruEvictionPolicy;
+import org.apache.ignite.cache.eviction.lru.LruEvictionPolicyFactory;
+import org.apache.ignite.internal.processors.cache.eviction.EvictionPolicyFactoryAbstractTest;
+
+/**
+ * LRU Eviction policy tests.
+ */
+public class LruEvictionPolicyFactorySelfTest extends EvictionPolicyFactoryAbstractTest<LruEvictionPolicy<String, String>> {
+ /** {@inheritDoc} */
+ @Override protected Factory<LruEvictionPolicy<String, String>> createPolicyFactory() {
+ return new LruEvictionPolicyFactory<>(plcMax, plcBatchSize, plcMaxMemSize);
+ }
+
+ /** {@inheritDoc} */
+ @Override protected Factory<LruEvictionPolicy<String, String>> createNearPolicyFactory(int nearMax) {
+ LruEvictionPolicyFactory<String, String> plc = new LruEvictionPolicyFactory<>();
+
+ plc.setMaxSize(nearMax);
+ plc.setBatchSize(plcBatchSize);
+
+ return plc;
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMiddleAccess() throws Exception {
+ policyFactory = createPolicyFactory();
+
+ try {
+ startGrid();
+
+ LruEvictionPolicy<String, String> p = policy();
+
+ int max = 8;
+
+ p.setMaxSize(max * MockEntry.ENTRY_SIZE);
+
+ MockEntry entry1 = new MockEntry("1", "1");
+ MockEntry entry2 = new MockEntry("2", "2");
+ MockEntry entry3 = new MockEntry("3", "3");
+
+ p.onEntryAccessed(false, entry1);
+ p.onEntryAccessed(false, entry2);
+ p.onEntryAccessed(false, entry3);
+
+ MockEntry[] freqUsed = new MockEntry[] {
+ new MockEntry("4", "4"),
+ new MockEntry("5", "5"),
+ new MockEntry("6", "6"),
+ new MockEntry("7", "7"),
+ new MockEntry("8", "7")
+ };
+
+ for (MockEntry e : freqUsed)
+ p.onEntryAccessed(false, e);
+
+ for (MockEntry e : freqUsed)
+ assert !e.isEvicted();
+
+ int cnt = 1001;
+
+ for (int i = 0; i < cnt; i++)
+ p.onEntryAccessed(false, entry(freqUsed, i % freqUsed.length));
+
+ info(p);
+
+ check(max, MockEntry.ENTRY_SIZE);
+ }
+ finally {
+ stopGrid();
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void doTestPolicy() throws Exception {
+ policyFactory = createPolicyFactory();
+
+ try {
+ startGrid();
+
+ MockEntry e1 = new MockEntry("1", "1");
+ MockEntry e2 = new MockEntry("2", "2");
+ MockEntry e3 = new MockEntry("3", "3");
+ MockEntry e4 = new MockEntry("4", "4");
+ MockEntry e5 = new MockEntry("5", "5");
+
+ LruEvictionPolicy<String, String> p = policy();
+
+ p.onEntryAccessed(false, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1);
+
+ p.onEntryAccessed(false, e2);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2);
+
+ p.onEntryAccessed(false, e3);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2, e3);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e2.isEvicted());
+ assertFalse(e3.isEvicted());
+
+ p.onEntryAccessed(false, e4);
+
+ check(p.queue(), e2, e3, e4);
+ check(MockEntry.ENTRY_SIZE, p.queue(), e2, e3, e4);
+
+ assertTrue(e1.isEvicted());
+ assertFalse(e2.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+
+ p.onEntryAccessed(false, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5);
+
+ assertTrue(e2.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(false, e1 = new MockEntry("1", "1"));
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e4, e5, e1);
+
+ assertTrue(e3.isEvicted());
+ assertFalse(e1.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(false, e5);
+
+ assertEquals(3, p.getCurrentSize());
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e4, e1, e5);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(false, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e4, e5, e1);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(false, e5);
+
+ assertEquals(3, p.getCurrentSize());
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e4, e1, e5);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(true, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e4, e5);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(true, e4);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e5);
+
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(true, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue());
+
+ assertFalse(e5.isEvicted());
+
+ info(p);
+ }
+ finally {
+ stopGrid();
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void doTestPolicyWithBatch() throws Exception {
+ policyFactory = createPolicyFactory();
+
+ try {
+ startGrid();
+
+ MockEntry e1 = new MockEntry("1", "1");
+ MockEntry e2 = new MockEntry("2", "2");
+ MockEntry e3 = new MockEntry("3", "3");
+ MockEntry e4 = new MockEntry("4", "4");
+ MockEntry e5 = new MockEntry("5", "5");
+
+ LruEvictionPolicy<String, String> p = policy();
+
+ p.onEntryAccessed(false, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1);
+
+ p.onEntryAccessed(false, e2);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2);
+
+ p.onEntryAccessed(false, e3);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2, e3);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e2.isEvicted());
+ assertFalse(e3.isEvicted());
+
+ p.onEntryAccessed(false, e4);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2, e3, e4);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e2.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+
+ p.onEntryAccessed(false, e5);
+
+ // Batch evicted
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5);
+
+ assertTrue(e1.isEvicted());
+ assertTrue(e2.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(false, e1 = new MockEntry("1", "1"));
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5, e1);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+ assertFalse(e1.isEvicted());
+
+ p.onEntryAccessed(false, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e1, e5);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e1.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(false, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5, e1);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+ assertFalse(e1.isEvicted());
+
+ p.onEntryAccessed(false, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e1, e5);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e1.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(true, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(true, e4);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e5);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(true, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3);
+
+ assertFalse(e3.isEvicted());
+
+ p.onEntryAccessed(true, e3);
+
+ check(MockEntry.ENTRY_SIZE, p.queue());
+
+ info(p);
+ }
+ finally {
+ stopGrid();
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void checkNearPolicies(int endNearPlcSize) {
+ for (int i = 0; i < gridCnt; i++)
+ for (EvictableEntry<String, String> e : nearPolicy(i).queue())
+ assert !e.isCached() : "Invalid near policy size: " + nearPolicy(i).queue();
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void checkPolicies() {
+ for (int i = 0; i < gridCnt; i++) {
+ if (plcMaxMemSize > 0) {
+ int size = 0;
+
+ for (EvictableEntry<String, String> entry : policy(i).queue())
+ size += entry.size();
+
+ assertEquals(size, policy(i).getCurrentMemorySize());
+ }
+ else
+ assertTrue(policy(i).queue().size() <= plcMax + plcBatchSize);
+ }
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/sorted/SortedEvictionPolicyFactorySelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/sorted/SortedEvictionPolicyFactorySelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/sorted/SortedEvictionPolicyFactorySelfTest.java
new file mode 100644
index 0000000..a0ab18f
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/sorted/SortedEvictionPolicyFactorySelfTest.java
@@ -0,0 +1,264 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.cache.eviction.sorted;
+
+import javax.cache.configuration.Factory;
+import org.apache.ignite.cache.eviction.sorted.SortedEvictionPolicy;
+import org.apache.ignite.cache.eviction.sorted.SortedEvictionPolicyFactory;
+import org.apache.ignite.internal.processors.cache.eviction.EvictionPolicyFactoryAbstractTest;
+
+/**
+ * Sorted eviction policy tests.
+ */
+public class SortedEvictionPolicyFactorySelfTest extends EvictionPolicyFactoryAbstractTest<SortedEvictionPolicy<String, String>> {
+ /** {@inheritDoc} */
+ @Override protected Factory<SortedEvictionPolicy<String, String>> createPolicyFactory() {
+ return new SortedEvictionPolicyFactory<>(plcMax, plcBatchSize, plcMaxMemSize);
+ }
+
+ /** {@inheritDoc} */
+ @Override protected Factory<SortedEvictionPolicy<String, String>> createNearPolicyFactory(int nearMax) {
+ SortedEvictionPolicyFactory<String, String> plc = new SortedEvictionPolicyFactory<>();
+
+ plc.setMaxSize(nearMax);
+ plc.setBatchSize(plcBatchSize);
+
+ return plc;
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void doTestPolicy() throws Exception {
+ policyFactory = createPolicyFactory();
+
+ try {
+ startGrid();
+
+ MockEntry e1 = new MockEntry("1", "1");
+ MockEntry e2 = new MockEntry("2", "2");
+ MockEntry e3 = new MockEntry("3", "3");
+ MockEntry e4 = new MockEntry("4", "4");
+ MockEntry e5 = new MockEntry("5", "5");
+
+ SortedEvictionPolicy<String, String> p = policy();
+
+ p.onEntryAccessed(false, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1);
+
+ p.onEntryAccessed(false, e2);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2);
+
+ p.onEntryAccessed(false, e3);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2, e3);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e2.isEvicted());
+ assertFalse(e3.isEvicted());
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2, e3);
+
+ p.onEntryAccessed(false, e4);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e2, e3, e4);
+
+ assertTrue(e1.isEvicted());
+ assertFalse(e2.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+
+ p.onEntryAccessed(false, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5);
+
+ assertTrue(e2.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(false, e1 = new MockEntry("1", "1"));
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5);
+
+ assertTrue(e1.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(false, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(false, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5);
+
+ assertTrue(e1.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(false, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(true, e3);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e4, e5);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(true, e4);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e5);
+
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(true, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue());
+
+ assertFalse(e5.isEvicted());
+
+ info(p);
+ }
+ finally {
+ stopAllGrids();
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void doTestPolicyWithBatch() throws Exception {
+ policyFactory = createPolicyFactory();
+
+ try {
+ startGrid();
+
+ MockEntry e1 = new MockEntry("1", "1");
+ MockEntry e2 = new MockEntry("2", "2");
+ MockEntry e3 = new MockEntry("3", "3");
+ MockEntry e4 = new MockEntry("4", "4");
+ MockEntry e5 = new MockEntry("5", "5");
+
+ SortedEvictionPolicy<String, String> p = policy();
+
+ p.onEntryAccessed(false, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1);
+
+ p.onEntryAccessed(false, e2);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2);
+
+ p.onEntryAccessed(false, e3);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2, e3);
+
+ p.onEntryAccessed(false, e4);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2, e3, e4);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e2.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+
+ p.onEntryAccessed(false, e5);
+
+ // Batch evicted.
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5);
+
+ assertTrue(e1.isEvicted());
+ assertTrue(e2.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(false, e1 = new MockEntry("1", "1"));
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e3, e4, e5);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(false, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e3, e4, e5);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(false, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e3, e4, e5);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(true, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(true, e4);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e5);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(true, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3);
+
+ assertFalse(e3.isEvicted());
+
+ p.onEntryAccessed(true, e3);
+
+ check(MockEntry.ENTRY_SIZE, p.queue());
+
+ assertFalse(e3.isEvicted());
+
+ info(p);
+ }
+ finally {
+ stopAllGrids();
+ }
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheEvictionSelfTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheEvictionSelfTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheEvictionSelfTestSuite.java
index 84b1452..cd2ac5c 100644
--- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheEvictionSelfTestSuite.java
+++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheEvictionSelfTestSuite.java
@@ -29,7 +29,9 @@ import org.apache.ignite.internal.processors.cache.eviction.GridCacheEvictableEn
import org.apache.ignite.internal.processors.cache.eviction.GridCacheEvictionFilterSelfTest;
import org.apache.ignite.internal.processors.cache.eviction.GridCacheEvictionLockUnlockSelfTest;
import org.apache.ignite.internal.processors.cache.eviction.GridCacheEvictionTouchSelfTest;
+import org.apache.ignite.internal.processors.cache.eviction.fifo.FifoEvictionPolicyFactorySelfTest;
import org.apache.ignite.internal.processors.cache.eviction.fifo.FifoEvictionPolicySelfTest;
+import org.apache.ignite.internal.processors.cache.eviction.lru.LruEvictionPolicyFactorySelfTest;
import org.apache.ignite.internal.processors.cache.eviction.lru.LruEvictionPolicySelfTest;
import org.apache.ignite.internal.processors.cache.eviction.lru.LruNearEvictionPolicySelfTest;
import org.apache.ignite.internal.processors.cache.eviction.lru.LruNearOnlyNearEvictionPolicySelfTest;
@@ -43,6 +45,7 @@ import org.apache.ignite.internal.processors.cache.eviction.paged.Random2LruPage
import org.apache.ignite.internal.processors.cache.eviction.paged.RandomLruNearEnabledPageEvictionMultinodeTest;
import org.apache.ignite.internal.processors.cache.eviction.paged.RandomLruPageEvictionMultinodeTest;
import org.apache.ignite.internal.processors.cache.eviction.paged.RandomLruPageEvictionWithRebalanceTest;
+import org.apache.ignite.internal.processors.cache.eviction.sorted.SortedEvictionPolicyFactorySelfTest;
import org.apache.ignite.internal.processors.cache.eviction.sorted.SortedEvictionPolicySelfTest;
/**
@@ -59,6 +62,9 @@ public class IgniteCacheEvictionSelfTestSuite extends TestSuite {
suite.addTest(new TestSuite(FifoEvictionPolicySelfTest.class));
suite.addTest(new TestSuite(SortedEvictionPolicySelfTest.class));
suite.addTest(new TestSuite(LruEvictionPolicySelfTest.class));
+ suite.addTest(new TestSuite(FifoEvictionPolicyFactorySelfTest.class));
+ suite.addTest(new TestSuite(SortedEvictionPolicyFactorySelfTest.class));
+ suite.addTest(new TestSuite(LruEvictionPolicyFactorySelfTest.class));
suite.addTest(new TestSuite(LruNearEvictionPolicySelfTest.class));
suite.addTest(new TestSuite(LruNearOnlyNearEvictionPolicySelfTest.class));
suite.addTest(new TestSuite(GridCacheNearEvictionSelfTest.class));
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/platforms/dotnet/Apache.Ignite.Core.Tests/ApiParity/CacheConfigurationParityTest.cs
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/ApiParity/CacheConfigurationParityTest.cs b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/ApiParity/CacheConfigurationParityTest.cs
index 0467937..81303cb 100644
--- a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/ApiParity/CacheConfigurationParityTest.cs
+++ b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/ApiParity/CacheConfigurationParityTest.cs
@@ -56,7 +56,8 @@ namespace Apache.Ignite.Core.Tests.ApiParity
/** Properties that are missing on .NET side. */
private static readonly string[] MissingProperties =
{
- "NodeFilter" // IGNITE-2890
+ "NodeFilter", // IGNITE-2890
+ "EvictionPolicyFactory" // IGNITE-6649
};
/// <summary>
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/yardstick/src/main/java/org/apache/ignite/yardstick/IgniteNode.java
----------------------------------------------------------------------
diff --git a/modules/yardstick/src/main/java/org/apache/ignite/yardstick/IgniteNode.java b/modules/yardstick/src/main/java/org/apache/ignite/yardstick/IgniteNode.java
index 9770fa3..a6850bf 100644
--- a/modules/yardstick/src/main/java/org/apache/ignite/yardstick/IgniteNode.java
+++ b/modules/yardstick/src/main/java/org/apache/ignite/yardstick/IgniteNode.java
@@ -107,8 +107,10 @@ public class IgniteNode implements BenchmarkServer {
if (args.isNearCache()) {
NearCacheConfiguration nearCfg = new NearCacheConfiguration();
- if (args.getNearCacheSize() != 0)
- nearCfg.setNearEvictionPolicy(new LruEvictionPolicy(args.getNearCacheSize()));
+ int nearCacheSize = args.getNearCacheSize();
+
+ if (nearCacheSize != 0)
+ nearCfg.setNearEvictionPolicy(new LruEvictionPolicy(nearCacheSize));
cc.setNearConfiguration(nearCfg);
}
[09/28] ignite git commit: IGNITE-6824 Web Console: Upgraded Angular
from 1.5.x to 1.6.x.
Posted by sb...@apache.org.
IGNITE-6824 Web Console: Upgraded Angular from 1.5.x to 1.6.x.
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/85cf9587
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/85cf9587
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/85cf9587
Branch: refs/heads/ignite-zk
Commit: 85cf95870945c317bc55862a9c66b566b3018f43
Parents: 8c343a1
Author: alexdel <ve...@yandex.ru>
Authored: Fri Nov 10 09:46:43 2017 +0700
Committer: Alexey Kuznetsov <ak...@apache.org>
Committed: Fri Nov 10 09:46:43 2017 +0700
----------------------------------------------------------------------
.../ui-ace-docker/ui-ace-docker.controller.js | 26 ++--
.../ui-ace-java/ui-ace-java.controller.js | 150 ++++++++++---------
.../ui-ace-pojos/ui-ace-pojos.controller.js | 148 +++++++++---------
.../ui-ace-pom/ui-ace-pom.controller.js | 26 ++--
.../ui-ace-sharp/ui-ace-sharp.controller.js | 12 +-
.../ui-ace-spring/ui-ace-spring.controller.js | 146 +++++++++---------
.../app/modules/dialog/dialog.controller.js | 26 ++--
.../app/modules/form/field/down.directive.js | 16 +-
.../app/modules/form/field/up.directive.js | 14 +-
.../app/modules/form/group/add.directive.js | 24 ++-
.../app/modules/form/group/tooltip.directive.js | 24 ++-
.../app/modules/navbar/userbar.directive.js | 32 ++--
modules/web-console/frontend/package.json | 17 +--
13 files changed, 341 insertions(+), 320 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/85cf9587/modules/web-console/frontend/app/directives/ui-ace-docker/ui-ace-docker.controller.js
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/directives/ui-ace-docker/ui-ace-docker.controller.js b/modules/web-console/frontend/app/directives/ui-ace-docker/ui-ace-docker.controller.js
index 8ebaae4..4f443ae 100644
--- a/modules/web-console/frontend/app/directives/ui-ace-docker/ui-ace-docker.controller.js
+++ b/modules/web-console/frontend/app/directives/ui-ace-docker/ui-ace-docker.controller.js
@@ -18,20 +18,22 @@
export default ['$scope', 'IgniteVersion', 'IgniteDockerGenerator', function($scope, Version, docker) {
const ctrl = this;
- // Watchers definition.
- const clusterWatcher = () => {
- delete ctrl.data;
+ this.$onInit = () => {
+ // Watchers definition.
+ const clusterWatcher = () => {
+ delete ctrl.data;
- if (!$scope.cluster)
- return;
+ if (!$scope.cluster)
+ return;
- ctrl.data = docker.generate($scope.cluster, Version.currentSbj.getValue());
- };
+ ctrl.data = docker.generate($scope.cluster, Version.currentSbj.getValue());
+ };
- // Setup watchers.
- Version.currentSbj.subscribe({
- next: clusterWatcher
- });
+ // Setup watchers.
+ Version.currentSbj.subscribe({
+ next: clusterWatcher
+ });
- $scope.$watch('cluster', clusterWatcher);
+ $scope.$watch('cluster', clusterWatcher);
+ };
}];
http://git-wip-us.apache.org/repos/asf/ignite/blob/85cf9587/modules/web-console/frontend/app/directives/ui-ace-java/ui-ace-java.controller.js
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/directives/ui-ace-java/ui-ace-java.controller.js b/modules/web-console/frontend/app/directives/ui-ace-java/ui-ace-java.controller.js
index e50ac6c..22f7d18 100644
--- a/modules/web-console/frontend/app/directives/ui-ace-java/ui-ace-java.controller.js
+++ b/modules/web-console/frontend/app/directives/ui-ace-java/ui-ace-java.controller.js
@@ -18,103 +18,105 @@
export default ['IgniteVersion', 'JavaTransformer', function(Version, java) {
const ctrl = this;
- delete ctrl.data;
+ this.$onInit = () => {
+ delete ctrl.data;
- const client = ctrl.client === 'true';
+ const client = ctrl.client === 'true';
- const available = Version.available.bind(Version);
+ const available = Version.available.bind(Version);
- // Setup generator.
- switch (ctrl.generator) {
- case 'igniteConfiguration':
- const clsName = client ? 'ClientConfigurationFactory' : 'ServerConfigurationFactory';
+ // Setup generator.
+ switch (ctrl.generator) {
+ case 'igniteConfiguration':
+ const clsName = client ? 'ClientConfigurationFactory' : 'ServerConfigurationFactory';
- ctrl.generate = (cluster) => java.cluster(cluster, Version.currentSbj.getValue(), 'config', clsName, client);
+ ctrl.generate = (cluster) => java.cluster(cluster, Version.currentSbj.getValue(), 'config', clsName, client);
- break;
- case 'clusterCaches':
- ctrl.generate = (cluster, caches) => {
- const clusterCaches = _.reduce(caches, (acc, cache) => {
- if (_.includes(cluster.caches, cache.value))
- acc.push(cache.cache);
+ break;
+ case 'clusterCaches':
+ ctrl.generate = (cluster, caches) => {
+ const clusterCaches = _.reduce(caches, (acc, cache) => {
+ if (_.includes(cluster.caches, cache.value))
+ acc.push(cache.cache);
- return acc;
- }, []);
+ return acc;
+ }, []);
- const cfg = java.generator.clusterGeneral(cluster, available);
+ const cfg = java.generator.clusterGeneral(cluster, available);
- java.generator.clusterCaches(cluster, clusterCaches, null, available, false, cfg);
+ java.generator.clusterCaches(cluster, clusterCaches, null, available, false, cfg);
- return java.toSection(cfg);
- };
+ return java.toSection(cfg);
+ };
- break;
- case 'cacheStore':
- case 'cacheQuery':
- ctrl.generate = (cache, domains) => {
- const cacheDomains = _.reduce(domains, (acc, domain) => {
- if (_.includes(cache.domains, domain.value))
- acc.push(domain.meta);
+ break;
+ case 'cacheStore':
+ case 'cacheQuery':
+ ctrl.generate = (cache, domains) => {
+ const cacheDomains = _.reduce(domains, (acc, domain) => {
+ if (_.includes(cache.domains, domain.value))
+ acc.push(domain.meta);
- return acc;
- }, []);
+ return acc;
+ }, []);
- return java[ctrl.generator](cache, cacheDomains, available);
- };
+ return java[ctrl.generator](cache, cacheDomains, available);
+ };
- break;
- case 'cacheNodeFilter':
- ctrl.generate = (cache, igfss) => {
- const cacheIgfss = _.reduce(igfss, (acc, igfs) => {
- acc.push(igfs.igfs);
+ break;
+ case 'cacheNodeFilter':
+ ctrl.generate = (cache, igfss) => {
+ const cacheIgfss = _.reduce(igfss, (acc, igfs) => {
+ acc.push(igfs.igfs);
- return acc;
- }, []);
+ return acc;
+ }, []);
- return java.cacheNodeFilter(cache, cacheIgfss);
- };
+ return java.cacheNodeFilter(cache, cacheIgfss);
+ };
- break;
- case 'clusterServiceConfiguration':
- ctrl.generate = (cluster, caches) => {
- const clusterCaches = _.reduce(caches, (acc, cache) => {
- if (_.includes(cluster.caches, cache.value))
- acc.push(cache.cache);
+ break;
+ case 'clusterServiceConfiguration':
+ ctrl.generate = (cluster, caches) => {
+ const clusterCaches = _.reduce(caches, (acc, cache) => {
+ if (_.includes(cluster.caches, cache.value))
+ acc.push(cache.cache);
- return acc;
- }, []);
+ return acc;
+ }, []);
- return java.clusterServiceConfiguration(cluster.serviceConfigurations, clusterCaches);
- };
+ return java.clusterServiceConfiguration(cluster.serviceConfigurations, clusterCaches);
+ };
- break;
- case 'clusterCheckpoint':
- ctrl.generate = (cluster, caches) => {
- const clusterCaches = _.reduce(caches, (acc, cache) => {
- if (_.includes(cluster.caches, cache.value))
- acc.push(cache.cache);
+ break;
+ case 'clusterCheckpoint':
+ ctrl.generate = (cluster, caches) => {
+ const clusterCaches = _.reduce(caches, (acc, cache) => {
+ if (_.includes(cluster.caches, cache.value))
+ acc.push(cache.cache);
- return acc;
- }, []);
+ return acc;
+ }, []);
- return java.clusterCheckpoint(cluster, clusterCaches);
- };
+ return java.clusterCheckpoint(cluster, clusterCaches);
+ };
- break;
- case 'igfss':
- ctrl.generate = (cluster, igfss) => {
- const clusterIgfss = _.reduce(igfss, (acc, igfs) => {
- if (_.includes(cluster.igfss, igfs.value))
- acc.push(igfs.igfs);
+ break;
+ case 'igfss':
+ ctrl.generate = (cluster, igfss) => {
+ const clusterIgfss = _.reduce(igfss, (acc, igfs) => {
+ if (_.includes(cluster.igfss, igfs.value))
+ acc.push(igfs.igfs);
- return acc;
- }, []);
+ return acc;
+ }, []);
- return java.clusterIgfss(clusterIgfss, available);
- };
+ return java.clusterIgfss(clusterIgfss, available);
+ };
- break;
- default:
- ctrl.generate = (master) => java[ctrl.generator](master, available);
- }
+ break;
+ default:
+ ctrl.generate = (master) => java[ctrl.generator](master, available);
+ }
+ };
}];
http://git-wip-us.apache.org/repos/asf/ignite/blob/85cf9587/modules/web-console/frontend/app/directives/ui-ace-pojos/ui-ace-pojos.controller.js
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/directives/ui-ace-pojos/ui-ace-pojos.controller.js b/modules/web-console/frontend/app/directives/ui-ace-pojos/ui-ace-pojos.controller.js
index 61bf086..774d73e 100644
--- a/modules/web-console/frontend/app/directives/ui-ace-pojos/ui-ace-pojos.controller.js
+++ b/modules/web-console/frontend/app/directives/ui-ace-pojos/ui-ace-pojos.controller.js
@@ -18,78 +18,80 @@
export default ['$scope', 'JavaTypes', 'JavaTransformer', function($scope, JavaTypes, generator) {
const ctrl = this;
- // Watchers definition.
- // Watcher clean instance data if instance to cluster caches was change
- const cleanPojos = () => {
- delete ctrl.class;
- delete ctrl.pojos;
- delete ctrl.classes;
+ this.$onInit = () => {
+ // Watchers definition.
+ // Watcher clean instance data if instance to cluster caches was change
+ const cleanPojos = () => {
+ delete ctrl.class;
+ delete ctrl.pojos;
+ delete ctrl.classes;
+ };
+
+ // Watcher update pojos when changes caches and checkers useConstructor and includeKeyFields
+ const updatePojos = () => {
+ delete ctrl.pojos;
+
+ if (_.isNil(ctrl.cluster) || _.isEmpty(ctrl.cluster.caches))
+ return;
+
+ ctrl.pojos = generator.pojos(ctrl.cluster.caches, ctrl.useConstructor, ctrl.includeKeyFields);
+ };
+
+ // Watcher update classes after
+ const updateClasses = (value) => {
+ delete ctrl.classes;
+
+ if (!value)
+ return;
+
+ const classes = ctrl.classes = [];
+
+ _.forEach(ctrl.pojos, (pojo) => {
+ if (_.nonNil(pojo.keyClass))
+ classes.push(pojo.keyType);
+
+ classes.push(pojo.valueType);
+ });
+ };
+
+ // Update pojos class.
+ const updateClass = (value) => {
+ if (_.isEmpty(value))
+ return;
+
+ const pojo = value[0];
+
+ ctrl.class = ctrl.class || (pojo.keyClass ? pojo.keyType : pojo.valueType);
+ };
+
+ // Update pojos data.
+ const updatePojosData = (value) => {
+ if (_.isNil(value))
+ return;
+
+ _.forEach(ctrl.pojos, (pojo) => {
+ if (pojo.keyType === ctrl.class) {
+ ctrl.data = pojo.keyClass;
+
+ return false;
+ }
+
+ if (pojo.valueType === ctrl.class) {
+ ctrl.data = pojo.valueClass;
+
+ return false;
+ }
+ });
+ };
+
+ // Setup watchers. Watchers order is important.
+ $scope.$watch('ctrl.cluster.caches', cleanPojos);
+ $scope.$watch('ctrl.cluster.caches', updatePojos);
+ $scope.$watch('ctrl.cluster.caches', updateClasses);
+ $scope.$watch('ctrl.useConstructor', updatePojos);
+ $scope.$watch('ctrl.includeKeyFields', updatePojos);
+ $scope.$watch('ctrl.pojos', updateClass);
+ $scope.$watch('ctrl.pojos', updatePojosData);
+ $scope.$watch('ctrl.class', updatePojosData);
};
-
- // Watcher update pojos when changes caches and checkers useConstructor and includeKeyFields
- const updatePojos = () => {
- delete ctrl.pojos;
-
- if (_.isNil(ctrl.cluster) || _.isEmpty(ctrl.cluster.caches))
- return;
-
- ctrl.pojos = generator.pojos(ctrl.cluster.caches, ctrl.useConstructor, ctrl.includeKeyFields);
- };
-
- // Watcher update classes after
- const updateClasses = (value) => {
- delete ctrl.classes;
-
- if (!value)
- return;
-
- const classes = ctrl.classes = [];
-
- _.forEach(ctrl.pojos, (pojo) => {
- if (_.nonNil(pojo.keyClass))
- classes.push(pojo.keyType);
-
- classes.push(pojo.valueType);
- });
- };
-
- // Update pojos class.
- const updateClass = (value) => {
- if (_.isEmpty(value))
- return;
-
- const pojo = value[0];
-
- ctrl.class = ctrl.class || (pojo.keyClass ? pojo.keyType : pojo.valueType);
- };
-
- // Update pojos data.
- const updatePojosData = (value) => {
- if (_.isNil(value))
- return;
-
- _.forEach(ctrl.pojos, (pojo) => {
- if (pojo.keyType === ctrl.class) {
- ctrl.data = pojo.keyClass;
-
- return false;
- }
-
- if (pojo.valueType === ctrl.class) {
- ctrl.data = pojo.valueClass;
-
- return false;
- }
- });
- };
-
- // Setup watchers. Watchers order is important.
- $scope.$watch('ctrl.cluster.caches', cleanPojos);
- $scope.$watch('ctrl.cluster.caches', updatePojos);
- $scope.$watch('ctrl.cluster.caches', updateClasses);
- $scope.$watch('ctrl.useConstructor', updatePojos);
- $scope.$watch('ctrl.includeKeyFields', updatePojos);
- $scope.$watch('ctrl.pojos', updateClass);
- $scope.$watch('ctrl.pojos', updatePojosData);
- $scope.$watch('ctrl.class', updatePojosData);
}];
http://git-wip-us.apache.org/repos/asf/ignite/blob/85cf9587/modules/web-console/frontend/app/directives/ui-ace-pom/ui-ace-pom.controller.js
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/directives/ui-ace-pom/ui-ace-pom.controller.js b/modules/web-console/frontend/app/directives/ui-ace-pom/ui-ace-pom.controller.js
index 0135eb3..2e421b2 100644
--- a/modules/web-console/frontend/app/directives/ui-ace-pom/ui-ace-pom.controller.js
+++ b/modules/web-console/frontend/app/directives/ui-ace-pom/ui-ace-pom.controller.js
@@ -18,20 +18,22 @@
export default ['$scope', 'IgniteVersion', 'IgniteMavenGenerator', function($scope, Version, maven) {
const ctrl = this;
- // Watchers definition.
- const clusterWatcher = (value) => {
- delete ctrl.data;
+ this.$onInit = () => {
+ // Watchers definition.
+ const clusterWatcher = (value) => {
+ delete ctrl.data;
- if (!value)
- return;
+ if (!value)
+ return;
- ctrl.data = maven.generate($scope.cluster, Version.currentSbj.getValue());
- };
+ ctrl.data = maven.generate($scope.cluster, Version.currentSbj.getValue());
+ };
- // Setup watchers.
- Version.currentSbj.subscribe({
- next: clusterWatcher
- });
+ // Setup watchers.
+ Version.currentSbj.subscribe({
+ next: clusterWatcher
+ });
- $scope.$watch('cluster', clusterWatcher);
+ $scope.$watch('cluster', clusterWatcher);
+ };
}];
http://git-wip-us.apache.org/repos/asf/ignite/blob/85cf9587/modules/web-console/frontend/app/directives/ui-ace-sharp/ui-ace-sharp.controller.js
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/directives/ui-ace-sharp/ui-ace-sharp.controller.js b/modules/web-console/frontend/app/directives/ui-ace-sharp/ui-ace-sharp.controller.js
index e600773..e87caca 100644
--- a/modules/web-console/frontend/app/directives/ui-ace-sharp/ui-ace-sharp.controller.js
+++ b/modules/web-console/frontend/app/directives/ui-ace-sharp/ui-ace-sharp.controller.js
@@ -21,12 +21,14 @@ const CLIENT_CFG = 'ClientConfigurationFactory';
export default ['$scope', 'IgniteSharpTransformer', function($scope, generator) {
const ctrl = this;
- delete ctrl.data;
+ this.$onInit = () => {
+ delete ctrl.data;
- // Set default generator
- ctrl.generator = (cluster) => {
- const type = $scope.cfg ? CLIENT_CFG : SERVER_CFG;
+ // Set default generator
+ ctrl.generator = (cluster) => {
+ const type = $scope.cfg ? CLIENT_CFG : SERVER_CFG;
- return generator.cluster(cluster, 'config', type, $scope.cfg);
+ return generator.cluster(cluster, 'config', type, $scope.cfg);
+ };
};
}];
http://git-wip-us.apache.org/repos/asf/ignite/blob/85cf9587/modules/web-console/frontend/app/directives/ui-ace-spring/ui-ace-spring.controller.js
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/directives/ui-ace-spring/ui-ace-spring.controller.js b/modules/web-console/frontend/app/directives/ui-ace-spring/ui-ace-spring.controller.js
index 17da1fd..7eccf6d 100644
--- a/modules/web-console/frontend/app/directives/ui-ace-spring/ui-ace-spring.controller.js
+++ b/modules/web-console/frontend/app/directives/ui-ace-spring/ui-ace-spring.controller.js
@@ -18,99 +18,101 @@
export default ['IgniteVersion', 'SpringTransformer', function(Version, spring) {
const ctrl = this;
- delete ctrl.data;
+ this.$onInit = () => {
+ delete ctrl.data;
- const available = Version.available.bind(Version);
+ const available = Version.available.bind(Version);
- // Setup generator.
- switch (ctrl.generator) {
- case 'igniteConfiguration':
- ctrl.generate = (cluster) => spring.cluster(cluster, Version.currentSbj.getValue(), ctrl.client === 'true');
+ // Setup generator.
+ switch (ctrl.generator) {
+ case 'igniteConfiguration':
+ ctrl.generate = (cluster) => spring.cluster(cluster, Version.currentSbj.getValue(), ctrl.client === 'true');
- break;
- case 'clusterCaches':
- ctrl.generate = (cluster, caches) => {
- const clusterCaches = _.reduce(caches, (acc, cache) => {
- if (_.includes(cluster.caches, cache.value))
- acc.push(cache.cache);
+ break;
+ case 'clusterCaches':
+ ctrl.generate = (cluster, caches) => {
+ const clusterCaches = _.reduce(caches, (acc, cache) => {
+ if (_.includes(cluster.caches, cache.value))
+ acc.push(cache.cache);
- return acc;
- }, []);
+ return acc;
+ }, []);
- const cfg = spring.generator.clusterGeneral(cluster, available);
+ const cfg = spring.generator.clusterGeneral(cluster, available);
- spring.generator.clusterCaches(cluster, clusterCaches, null, available, false, cfg);
+ spring.generator.clusterCaches(cluster, clusterCaches, null, available, false, cfg);
- return spring.toSection(cfg);
- };
+ return spring.toSection(cfg);
+ };
- break;
- case 'cacheStore':
- case 'cacheQuery':
- ctrl.generate = (cache, domains) => {
- const cacheDomains = _.reduce(domains, (acc, domain) => {
- if (_.includes(cache.domains, domain.value))
- acc.push(domain.meta);
+ break;
+ case 'cacheStore':
+ case 'cacheQuery':
+ ctrl.generate = (cache, domains) => {
+ const cacheDomains = _.reduce(domains, (acc, domain) => {
+ if (_.includes(cache.domains, domain.value))
+ acc.push(domain.meta);
- return acc;
- }, []);
+ return acc;
+ }, []);
- return spring[ctrl.generator](cache, cacheDomains, available);
- };
+ return spring[ctrl.generator](cache, cacheDomains, available);
+ };
- break;
- case 'cacheNodeFilter':
- ctrl.generate = (cache, igfss) => {
- const cacheIgfss = _.reduce(igfss, (acc, igfs) => {
- acc.push(igfs.igfs);
+ break;
+ case 'cacheNodeFilter':
+ ctrl.generate = (cache, igfss) => {
+ const cacheIgfss = _.reduce(igfss, (acc, igfs) => {
+ acc.push(igfs.igfs);
- return acc;
- }, []);
+ return acc;
+ }, []);
- return spring.cacheNodeFilter(cache, cacheIgfss);
- };
+ return spring.cacheNodeFilter(cache, cacheIgfss);
+ };
- break;
- case 'clusterServiceConfiguration':
- ctrl.generate = (cluster, caches) => {
- const clusterCaches = _.reduce(caches, (acc, cache) => {
- if (_.includes(cluster.caches, cache.value))
- acc.push(cache.cache);
+ break;
+ case 'clusterServiceConfiguration':
+ ctrl.generate = (cluster, caches) => {
+ const clusterCaches = _.reduce(caches, (acc, cache) => {
+ if (_.includes(cluster.caches, cache.value))
+ acc.push(cache.cache);
- return acc;
- }, []);
+ return acc;
+ }, []);
- return spring.clusterServiceConfiguration(cluster.serviceConfigurations, clusterCaches);
- };
+ return spring.clusterServiceConfiguration(cluster.serviceConfigurations, clusterCaches);
+ };
- break;
- case 'clusterCheckpoint':
- ctrl.generate = (cluster, caches) => {
- const clusterCaches = _.reduce(caches, (acc, cache) => {
- if (_.includes(cluster.caches, cache.value))
- acc.push(cache.cache);
+ break;
+ case 'clusterCheckpoint':
+ ctrl.generate = (cluster, caches) => {
+ const clusterCaches = _.reduce(caches, (acc, cache) => {
+ if (_.includes(cluster.caches, cache.value))
+ acc.push(cache.cache);
- return acc;
- }, []);
+ return acc;
+ }, []);
- return spring.clusterCheckpoint(cluster, clusterCaches);
- };
+ return spring.clusterCheckpoint(cluster, clusterCaches);
+ };
- break;
- case 'igfss':
- ctrl.generate = (cluster, igfss) => {
- const clusterIgfss = _.reduce(igfss, (acc, igfs) => {
- if (_.includes(cluster.igfss, igfs.value))
- acc.push(igfs.igfs);
+ break;
+ case 'igfss':
+ ctrl.generate = (cluster, igfss) => {
+ const clusterIgfss = _.reduce(igfss, (acc, igfs) => {
+ if (_.includes(cluster.igfss, igfs.value))
+ acc.push(igfs.igfs);
- return acc;
- }, []);
+ return acc;
+ }, []);
- return spring.clusterIgfss(clusterIgfss, available);
- };
+ return spring.clusterIgfss(clusterIgfss, available);
+ };
- break;
- default:
- ctrl.generate = (master) => spring[ctrl.generator](master, available);
- }
+ break;
+ default:
+ ctrl.generate = (master) => spring[ctrl.generator](master, available);
+ }
+ };
}];
http://git-wip-us.apache.org/repos/asf/ignite/blob/85cf9587/modules/web-console/frontend/app/modules/dialog/dialog.controller.js
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/modules/dialog/dialog.controller.js b/modules/web-console/frontend/app/modules/dialog/dialog.controller.js
index 0256b84..a75ff1f 100644
--- a/modules/web-console/frontend/app/modules/dialog/dialog.controller.js
+++ b/modules/web-console/frontend/app/modules/dialog/dialog.controller.js
@@ -18,19 +18,21 @@
export default ['$rootScope', '$scope', 'IgniteDialog', function($root, $scope, IgniteDialog) {
const ctrl = this;
- const dialog = new IgniteDialog({
- scope: $scope
- });
+ this.$onInit = () => {
+ const dialog = new IgniteDialog({
+ scope: $scope
+ });
- ctrl.show = () => {
- dialog.$promise.then(dialog.show);
- };
+ ctrl.show = () => {
+ dialog.$promise.then(dialog.show);
+ };
- $scope.$watch(() => ctrl.title, () => {
- $scope.title = ctrl.title;
- });
+ $scope.$watch(() => ctrl.title, () => {
+ $scope.title = ctrl.title;
+ });
- $scope.$watch(() => ctrl.content, () => {
- $scope.content = ctrl.content;
- });
+ $scope.$watch(() => ctrl.content, () => {
+ $scope.content = ctrl.content;
+ });
+ };
}];
http://git-wip-us.apache.org/repos/asf/ignite/blob/85cf9587/modules/web-console/frontend/app/modules/form/field/down.directive.js
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/modules/form/field/down.directive.js b/modules/web-console/frontend/app/modules/form/field/down.directive.js
index e9c2aa7..c957e97 100644
--- a/modules/web-console/frontend/app/modules/form/field/down.directive.js
+++ b/modules/web-console/frontend/app/modules/form/field/down.directive.js
@@ -17,14 +17,20 @@
export default ['igniteFormFieldDown', ['$tooltip', ($tooltip) => {
const controller = ['$element', function($element) {
- $tooltip($element, { title: 'Move item down' });
+ const ctrl = this;
- this.down = () => {
- const i = this.models.indexOf(this.model);
+ this.$onInit = () => {
+ $tooltip($element, { title: 'Move item down' });
- this.models.splice(i, 1);
- this.models.splice(i + 1, 0, this.model);
+ ctrl.down = () => {
+ const i = ctrl.models.indexOf(ctrl.model);
+
+ ctrl.models.splice(i, 1);
+ ctrl.models.splice(i + 1, 0, ctrl.model);
+ };
};
+
+
}];
return {
http://git-wip-us.apache.org/repos/asf/ignite/blob/85cf9587/modules/web-console/frontend/app/modules/form/field/up.directive.js
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/modules/form/field/up.directive.js b/modules/web-console/frontend/app/modules/form/field/up.directive.js
index c0140de..6f87180 100644
--- a/modules/web-console/frontend/app/modules/form/field/up.directive.js
+++ b/modules/web-console/frontend/app/modules/form/field/up.directive.js
@@ -17,13 +17,17 @@
export default ['igniteFormFieldUp', ['$tooltip', ($tooltip) => {
const controller = ['$element', function($element) {
- $tooltip($element, { title: 'Move item up' });
+ const ctrl = this;
- this.up = () => {
- const idx = this.models.indexOf(this.model);
+ this.$onInit = () => {
+ $tooltip($element, { title: 'Move item up' });
- this.models.splice(idx, 1);
- this.models.splice(idx - 1, 0, this.model);
+ this.up = () => {
+ const idx = ctrl.models.indexOf(ctrl.model);
+
+ ctrl.models.splice(idx, 1);
+ ctrl.models.splice(idx - 1, 0, ctrl.model);
+ };
};
}];
http://git-wip-us.apache.org/repos/asf/ignite/blob/85cf9587/modules/web-console/frontend/app/modules/form/group/add.directive.js
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/modules/form/group/add.directive.js b/modules/web-console/frontend/app/modules/form/group/add.directive.js
index 7e9a50c..71070cc 100644
--- a/modules/web-console/frontend/app/modules/form/group/add.directive.js
+++ b/modules/web-console/frontend/app/modules/form/group/add.directive.js
@@ -18,23 +18,21 @@
const template = '<i class="group-legend-btn fa fa-plus"></i>';
export default ['igniteFormGroupAdd', ['$tooltip', ($tooltip) => {
- const link = ($scope, $element, $attrs, $ctrls, $transclude) => {
- const content = Array.prototype.slice
- .apply($transclude($scope))
- .reduce((html, el) => html += el.outerHTML || el.textContent || el, '');
-
- $tooltip($element, { title: content });
-
- $element.closest('.group').find('.group-legend').append($element);
- };
-
return {
restrict: 'E',
scope: {},
template,
- link,
+ link($scope, $el, $attr, $ctrl, $transclude) {
+ $transclude((clone) => {
+ const title = Array.from(clone)
+ .reduce((html, el) => html += el.outerHTML || el.textContent || el, '');
+ const legend = $el.closest('.group').find('.group-legend');
+
+ $tooltip($el, {title});
+ legend.append($el);
+ });
+ },
replace: true,
- transclude: true,
- require: ['^form']
+ transclude: true
};
}]];
http://git-wip-us.apache.org/repos/asf/ignite/blob/85cf9587/modules/web-console/frontend/app/modules/form/group/tooltip.directive.js
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/modules/form/group/tooltip.directive.js b/modules/web-console/frontend/app/modules/form/group/tooltip.directive.js
index 6027765..4190dee 100644
--- a/modules/web-console/frontend/app/modules/form/group/tooltip.directive.js
+++ b/modules/web-console/frontend/app/modules/form/group/tooltip.directive.js
@@ -18,23 +18,21 @@
const template = '<i class="group-legend-btn icon-help"></i>';
export default ['igniteFormGroupTooltip', ['$tooltip', ($tooltip) => {
- const link = ($scope, $element, $attrs, $ctrls, $transclude) => {
- const content = Array.prototype.slice
- .apply($transclude($scope))
- .reduce((html, el) => html += el.outerHTML || el.textContent || el, '');
-
- $tooltip($element, { title: content });
-
- $element.closest('.group').find('.group-legend').append($element);
- };
-
return {
restrict: 'E',
scope: {},
template,
- link,
+ link($scope, $el, $attr, $ctrl, $transclude) {
+ $transclude((clone) => {
+ const title = Array.from(clone)
+ .reduce((html, el) => html += el.outerHTML || el.textContent || el, '');
+ const legend = $el.closest('.group').find('.group-legend');
+
+ $tooltip($el, {title});
+ legend.append($el);
+ });
+ },
replace: true,
- transclude: true,
- require: ['^form']
+ transclude: true
};
}]];
http://git-wip-us.apache.org/repos/asf/ignite/blob/85cf9587/modules/web-console/frontend/app/modules/navbar/userbar.directive.js
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/modules/navbar/userbar.directive.js b/modules/web-console/frontend/app/modules/navbar/userbar.directive.js
index 279314f..51f46d4 100644
--- a/modules/web-console/frontend/app/modules/navbar/userbar.directive.js
+++ b/modules/web-console/frontend/app/modules/navbar/userbar.directive.js
@@ -21,27 +21,29 @@ export default ['igniteUserbar', [function() {
controller: ['$rootScope', 'IgniteUserbar', 'AclService', function($root, IgniteUserbar, AclService) {
const ctrl = this;
- ctrl.items = [
- {text: 'Profile', sref: 'base.settings.profile'},
- {text: 'Getting started', click: 'gettingStarted.tryShow(true)'}
- ];
+ this.$onInit = () => {
+ ctrl.items = [
+ {text: 'Profile', sref: 'base.settings.profile'},
+ {text: 'Getting started', click: 'gettingStarted.tryShow(true)'}
+ ];
- const _rebuildSettings = () => {
- ctrl.items.splice(2);
+ const _rebuildSettings = () => {
+ ctrl.items.splice(2);
- if (AclService.can('admin_page'))
- ctrl.items.push({text: 'Admin panel', sref: 'base.settings.admin'});
+ if (AclService.can('admin_page'))
+ ctrl.items.push({text: 'Admin panel', sref: 'base.settings.admin'});
- ctrl.items.push(...IgniteUserbar);
+ ctrl.items.push(...IgniteUserbar);
- if (AclService.can('logout'))
- ctrl.items.push({text: 'Log out', sref: 'logout'});
- };
+ if (AclService.can('logout'))
+ ctrl.items.push({text: 'Log out', sref: 'logout'});
+ };
- if ($root.user)
- _rebuildSettings(null, $root.user);
+ if ($root.user)
+ _rebuildSettings(null, $root.user);
- $root.$on('user', _rebuildSettings);
+ $root.$on('user', _rebuildSettings);
+ };
}],
controllerAs: 'userbar'
};
http://git-wip-us.apache.org/repos/asf/ignite/blob/85cf9587/modules/web-console/frontend/package.json
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/package.json b/modules/web-console/frontend/package.json
index 38208df..49e69b4 100644
--- a/modules/web-console/frontend/package.json
+++ b/modules/web-console/frontend/package.json
@@ -32,23 +32,22 @@
"win32"
],
"dependencies": {
- "@uirouter/angularjs": "1.0.5",
- "angular": "1.5.11",
- "angular-acl": "0.1.8",
- "angular-animate": "1.5.11",
- "angular-aria": "1.5.11",
- "angular-cookies": "1.5.11",
+ "@uirouter/angularjs": "1.0.10",
+ "angular": "1.6.6",
+ "angular-acl": "0.1.10",
+ "angular-animate": "1.6.6",
+ "angular-aria": "1.6.6",
+ "angular-cookies": "1.6.6",
"angular-drag-and-drop-lists": "1.4.0",
"angular-gridster": "0.13.14",
"angular-motion": "0.4.4",
"angular-nvd3": "1.0.9",
"angular-retina": "0.4.0",
- "angular-sanitize": "1.5.11",
+ "angular-sanitize": "1.6.6",
"angular-smart-table": "2.1.8",
"angular-socket-io": "0.7.0",
"angular-strap": "2.3.12",
- "angular-touch": "1.5.11",
- "angular-translate": "2.15.2",
+ "angular-translate": "2.16.0",
"angular-tree-control": "0.2.28",
"angular-ui-grid": "4.0.7",
"babel-core": "6.25.0",
[03/28] ignite git commit: IGNITE-6480: SQL: implemented base
parser/lexer and CREATE INDEX command support. This closes #3001.
Posted by sb...@apache.org.
IGNITE-6480: SQL: implemented base parser/lexer and CREATE INDEX command support. This closes #3001.
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/145c59dd
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/145c59dd
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/145c59dd
Branch: refs/heads/ignite-zk
Commit: 145c59dd79fd796d6f5590d3e1f822d6a305db41
Parents: ca6a009
Author: devozerov <vo...@gridgain.com>
Authored: Thu Nov 9 11:01:05 2017 +0300
Committer: devozerov <vo...@gridgain.com>
Committed: Thu Nov 9 11:01:05 2017 +0300
----------------------------------------------------------------------
.../apache/ignite/internal/sql/SqlKeyword.java | 237 ++++++++++++
.../apache/ignite/internal/sql/SqlLexer.java | 213 +++++++++++
.../internal/sql/SqlLexerLookAheadToken.java | 75 ++++
.../ignite/internal/sql/SqlLexerToken.java | 48 +++
.../ignite/internal/sql/SqlLexerTokenType.java | 112 ++++++
.../ignite/internal/sql/SqlParseException.java | 99 +++++
.../apache/ignite/internal/sql/SqlParser.java | 174 +++++++++
.../ignite/internal/sql/SqlParserUtils.java | 363 +++++++++++++++++++
.../ignite/internal/sql/command/SqlCommand.java | 43 +++
.../sql/command/SqlCreateIndexCommand.java | 200 ++++++++++
.../internal/sql/command/SqlIndexColumn.java | 61 ++++
.../internal/sql/command/SqlQualifiedName.java | 70 ++++
.../ignite/internal/sql/SqlParserSelfTest.java | 198 ++++++++++
.../processors/query/h2/IgniteH2Indexing.java | 66 +++-
.../query/h2/ddl/DdlStatementsProcessor.java | 80 +++-
.../IgniteCacheQuerySelfTestSuite.java | 3 +
16 files changed, 2037 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlKeyword.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlKeyword.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlKeyword.java
new file mode 100644
index 0000000..ac826cc
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlKeyword.java
@@ -0,0 +1,237 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql;
+
+import org.apache.ignite.IgniteException;
+import org.apache.ignite.internal.util.typedef.F;
+
+import java.lang.reflect.Field;
+import java.util.HashSet;
+
+/**
+ * SQL keyword constants.
+ */
+public class SqlKeyword {
+ /** Keyword: ASC. */
+ public static final String ASC = "ASC";
+
+ /** Keyword: BIGINT */
+ public static final String BIGINT = "BIGINT";
+
+ /** Keyword: BIT. */
+ public static final String BIT = "BIT";
+
+ /** Keyword: BOOL. */
+ public static final String BOOL = "BOOL";
+
+ /** Keyword: BOOLEAN. */
+ public static final String BOOLEAN = "BOOLEAN";
+
+ /** Keyword: CASCADE. */
+ public static final String CASCADE = "CASCADE";
+
+ /** Keyword: CHAR. */
+ public static final String CHAR = "CHAR";
+
+ /** Keyword: CHARACTER. */
+ public static final String CHARACTER = "CHARACTER";
+
+ /** Keyword: CREATE. */
+ public static final String CREATE = "CREATE";
+
+ /** Keyword: DATE. */
+ public static final String DATE = "DATE";
+
+ /** Keyword: DATETIME. */
+ public static final String DATETIME = "DATETIME";
+
+ /** Keyword: DEC. */
+ public static final String DEC = "DEC";
+
+ /** Keyword: DECIMAL. */
+ public static final String DECIMAL = "DECIMAL";
+
+ /** Keyword: DESC. */
+ public static final String DESC = "DESC";
+
+ /** Keyword: DOUBLE. */
+ public static final String DOUBLE = "DOUBLE";
+
+ /** Keyword: DROP. */
+ public static final String DROP = "DROP";
+
+ /** Keyword: EXISTS. */
+ public static final String EXISTS = "EXISTS";
+
+ /** Keyword: FLOAT. */
+ public static final String FLOAT = "FLOAT";
+
+ /** Keyword: FLOAT4. */
+ public static final String FLOAT4 = "FLOAT4";
+
+ /** Keyword: FLOAT8. */
+ public static final String FLOAT8 = "FLOAT8";
+
+ /** Keyword: FULLTEXT. */
+ public static final String FULLTEXT = "FULLTEXT";
+
+ /** Keyword: UNIQUE. */
+ public static final String HASH = "HASH";
+
+ /** Keyword: IF. */
+ public static final String IF = "IF";
+
+ /** Keyword: INDEX. */
+ public static final String INDEX = "INDEX";
+
+ /** Keyword: INT. */
+ public static final String INT = "INT";
+
+ /** Keyword: INT2. */
+ public static final String INT2 = "INT2";
+
+ /** Keyword: INT4. */
+ public static final String INT4 = "INT4";
+
+ /** Keyword: INT8. */
+ public static final String INT8 = "INT8";
+
+ /** Keyword: INTEGER. */
+ public static final String INTEGER = "INTEGER";
+
+ /** Keyword: KEY. */
+ public static final String KEY = "KEY";
+
+ /** Keyword: LONGVARCHAR. */
+ public static final String LONGVARCHAR = "LONGVARCHAR";
+
+ /** Keyword: MEDIUMINT. */
+ public static final String MEDIUMINT = "MEDIUMINT";
+
+ /** Keyword: NCHAR. */
+ public static final String NCHAR = "NCHAR";
+
+ /** Keyword: NOT. */
+ public static final String NOT = "NOT";
+
+ /** Keyword: NUMBER. */
+ public static final String NUMBER = "NUMBER";
+
+ /** Keyword: NUMERIC. */
+ public static final String NUMERIC = "NUMERIC";
+
+ /** Keyword: NVARCHAR. */
+ public static final String NVARCHAR = "NVARCHAR";
+
+ /** Keyword: NVARCHAR2. */
+ public static final String NVARCHAR2 = "NVARCHAR2";
+
+ /** Keyword: ON. */
+ public static final String ON = "ON";
+
+ /** Keyword: PRECISION. */
+ public static final String PRECISION = "PRECISION";
+
+ /** Keyword: PRIMARY. */
+ public static final String PRIMARY = "PRIMARY";
+
+ /** Keyword: REAL. */
+ public static final String REAL = "REAL";
+
+ /** Keyword: RESTRICT. */
+ public static final String RESTRICT = "RESTRICT";
+
+ /** Keyword: SIGNED. */
+ public static final String SIGNED = "SIGNED";
+
+ /** Keyword: SMALLDATETIME. */
+ public static final String SMALLDATETIME = "SMALLDATETIME";
+
+ /** Keyword: SMALLINT. */
+ public static final String SMALLINT = "SMALLINT";
+
+ /** Keyword: SPATIAL. */
+ public static final String SPATIAL = "SPATIAL";
+
+ /** Keyword: TABLE. */
+ public static final String TABLE = "TABLE";
+
+ /** Keyword: TIME. */
+ public static final String TIME = "TIME";
+
+ /** Keyword: TIMESTAMP. */
+ public static final String TIMESTAMP = "TIMESTAMP";
+
+ /** Keyword: TINYINT. */
+ public static final String TINYINT = "TINYINT";
+
+ /** Keyword: UNIQUE. */
+ public static final String UNIQUE = "UNIQUE";
+
+ /** Keyword: UUID. */
+ public static final String UUID = "UUID";
+
+ /** Keyword: VARCHAR. */
+ public static final String VARCHAR = "VARCHAR";
+
+ /** Keyword: VARCHAR2. */
+ public static final String VARCHAR2 = "VARCHAR2";
+
+ /** Keyword: VARCHAR_CASESENSITIVE. */
+ public static final String VARCHAR_CASESENSITIVE = "VARCHAR_CASESENSITIVE";
+
+ /** Keyword: YEAR. */
+ public static final String YEAR = "YEAR";
+
+ /** All keywords. */
+ private static final HashSet<String> KEYWORDS;
+
+ static {
+ KEYWORDS = new HashSet<>();
+
+ try {
+ for (Field field : SqlKeyword.class.getDeclaredFields()) {
+ if (F.eq(String.class, field.getType())) {
+ String val = (String) field.get(null);
+
+ KEYWORDS.add(val);
+ }
+ }
+ }
+ catch (ReflectiveOperationException e) {
+ throw new IgniteException("Failed to initialize keywords collection.", e);
+ }
+ }
+
+ /**
+ * Check if string is a keyword.
+ *
+ * @param str String.
+ * @return {@code True} if it is a keyword.
+ */
+ public static boolean isKeyword(String str) {
+ return KEYWORDS.contains(str);
+ }
+
+ /**
+ * Private constructor.
+ */
+ private SqlKeyword() {
+ // No-op.
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexer.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexer.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexer.java
new file mode 100644
index 0000000..a8009b7
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexer.java
@@ -0,0 +1,213 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql;
+
+import org.apache.ignite.internal.processors.cache.query.IgniteQueryErrorCode;
+
+/**
+ * SQL lexer.
+ */
+public class SqlLexer implements SqlLexerToken {
+ /** Original input. */
+ private final String sql;
+
+ /** Input characters. */
+ private final char[] inputChars;
+
+ /** Current position. */
+ private int pos;
+
+ /** Current token start. */
+ private int tokenPos;
+
+ /** Current token. */
+ private String token;
+
+ /** Token type. */
+ private SqlLexerTokenType tokenTyp;
+
+ /**
+ * Constructor.
+ *
+ * @param sql Input.
+ */
+ public SqlLexer(String sql) {
+ assert sql != null;
+
+ this.sql = sql;
+
+ // Additional slot for look-ahead convenience.
+ inputChars = new char[sql.length() + 1];
+
+ for (int i = 0; i < sql.length(); i++)
+ inputChars[i] = sql.charAt(i);
+ }
+
+ /**
+ * Get next token without lexer state change.
+ *
+ * @return Next token.
+ */
+ public SqlLexerToken lookAhead() {
+ int pos0 = pos;
+ String token0 = token;
+ int tokenPos0 = tokenPos;
+ SqlLexerTokenType tokenTyp0 = tokenTyp;
+
+ try {
+ if (shift())
+ return new SqlLexerLookAheadToken(sql, token, tokenPos, tokenTyp);
+ else
+ return new SqlLexerLookAheadToken(sql, null, tokenPos, SqlLexerTokenType.EOF);
+ }
+ finally {
+ pos = pos0;
+ token = token0;
+ tokenPos = tokenPos0;
+ tokenTyp = tokenTyp0;
+ }
+ }
+
+ /**
+ * Shift lexer to the next position.
+ *
+ * @return {@code True} if next token was found, {@code false} in case of end-of-file.
+ */
+ public boolean shift() {
+ while (!eod()) {
+ int tokenStartPos0 = pos;
+
+ String token0 = null;
+ SqlLexerTokenType tokenTyp0 = null;
+
+ char c = inputChars[pos++];
+
+ switch (c) {
+ case '-':
+ if (inputChars[pos] == '-') {
+ // Full-line comment.
+ pos++;
+
+ while (!eod()) {
+ char c1 = inputChars[pos];
+
+ if (c1 == '\n' || c1 == '\r')
+ break;
+
+ pos++;
+ }
+ }
+ else {
+ // Minus.
+ token0 = "-";
+ tokenTyp0 = SqlLexerTokenType.MINUS;
+ }
+
+ break;
+
+ case '\"':
+ while (true) {
+ if (eod()) {
+ throw new SqlParseException(sql, tokenStartPos0, IgniteQueryErrorCode.PARSING,
+ "Unclosed quoted identifier.");
+ }
+
+ char c1 = inputChars[pos];
+
+ pos++;
+
+ if (c1 == '\"')
+ break;
+ }
+
+ token0 = sql.substring(tokenStartPos0 + 1, pos - 1);
+ tokenTyp0 = SqlLexerTokenType.QUOTED;
+
+ break;
+
+ case '.':
+ case ',':
+ case ';':
+ case '(':
+ case ')':
+ token0 = Character.toString(c);
+ tokenTyp0 = SqlLexerTokenType.forChar(c);
+
+ break;
+
+ default:
+ if (c <= ' ' || Character.isSpaceChar(c))
+ continue;
+
+ while (!eod()) {
+ char c1 = inputChars[pos];
+
+ if (!Character.isJavaIdentifierPart(c1))
+ break;
+
+ pos++;
+ }
+
+ token0 = sql.substring(tokenStartPos0, pos).toUpperCase();
+ tokenTyp0 = SqlLexerTokenType.DEFAULT;
+ }
+
+ if (tokenTyp0 != null) {
+ token = token0;
+ tokenPos = tokenStartPos0;
+ tokenTyp = tokenTyp0;
+
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ /** {@inheritDoc} */
+ public String sql() {
+ return sql;
+ }
+
+ /** {@inheritDoc} */
+ public String token() {
+ return token;
+ }
+
+ /** {@inheritDoc} */
+ public char tokenFirstChar() {
+ return token.charAt(0);
+ }
+
+ /** {@inheritDoc} */
+ public int tokenPosition() {
+ return tokenPos;
+ }
+
+ /** {@inheritDoc} */
+ public SqlLexerTokenType tokenType() {
+ return tokenTyp;
+ }
+
+ /**
+ * @return {@code True} if end of data is reached.
+ */
+ private boolean eod() {
+ return pos == inputChars.length - 1;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexerLookAheadToken.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexerLookAheadToken.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexerLookAheadToken.java
new file mode 100644
index 0000000..e697473
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexerLookAheadToken.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql;
+
+/**
+ * Plain immutable look-ahead parser token.
+ */
+public class SqlLexerLookAheadToken implements SqlLexerToken {
+ /** SQL. */
+ private final String sql;
+
+ /** Token. */
+ private final String token;
+
+ /** Token position. */
+ private final int tokenPos;
+
+ /** Token type. */
+ private final SqlLexerTokenType tokenTyp;
+
+ /**
+ * Constructor.
+ *
+ * @param sql Original SQL.
+ * @param token Token.
+ * @param tokenPos Token position.
+ * @param tokenTyp Token type.
+ */
+ public SqlLexerLookAheadToken(String sql, String token, int tokenPos, SqlLexerTokenType tokenTyp) {
+ this.sql = sql;
+ this.token = token;
+ this.tokenPos = tokenPos;
+ this.tokenTyp = tokenTyp;
+ }
+
+ /** {@inheritDoc} */
+ public String sql() {
+ return sql;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String token() {
+ return token;
+ }
+
+ /** {@inheritDoc} */
+ @Override public char tokenFirstChar() {
+ return token.charAt(0);
+ }
+
+ /** {@inheritDoc} */
+ @Override public int tokenPosition() {
+ return tokenPos;
+ }
+
+ /** {@inheritDoc} */
+ @Override public SqlLexerTokenType tokenType() {
+ return tokenTyp;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexerToken.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexerToken.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexerToken.java
new file mode 100644
index 0000000..a172635
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexerToken.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql;
+
+/**
+ * SQL parser token interface.
+ */
+public interface SqlLexerToken {
+ /**
+ * @return Original SQL.
+ */
+ public String sql();
+
+ /**
+ * @return Current token.
+ */
+ public String token();
+
+ /**
+ * @return First character of the current token.
+ */
+ public char tokenFirstChar();
+
+ /**
+ * @return Current token start position.
+ */
+ public int tokenPosition();
+
+ /**
+ * @return Token type.
+ */
+ public SqlLexerTokenType tokenType();
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexerTokenType.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexerTokenType.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexerTokenType.java
new file mode 100644
index 0000000..693832b
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlLexerTokenType.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the 'License'); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an 'AS IS' BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql;
+
+import java.util.HashMap;
+
+/**
+ * Lexer token type.
+ */
+public enum SqlLexerTokenType {
+ /** Standard word. */
+ DEFAULT,
+
+ /** Quoted phrase. */
+ QUOTED,
+
+ /** Minus sign. */
+ MINUS('-'),
+
+ /** Dot. */
+ DOT('.'),
+
+ /** Comma. */
+ COMMA(','),
+
+ /** Parenthesis: left. */
+ PARENTHESIS_LEFT('('),
+
+ /** Parenthesis: right. */
+ PARENTHESIS_RIGHT(')'),
+
+ /** Semicolon. */
+ SEMICOLON(';'),
+
+ /** End of string. */
+ EOF;
+
+ /** Mapping from character to type.. */
+ private static final HashMap<Character, SqlLexerTokenType> CHAR_TO_TYP = new HashMap<>();
+
+ /** Character. */
+ private final Character c;
+
+ /** Character as string. */
+ private final String str;
+
+ static {
+ for (SqlLexerTokenType typ : SqlLexerTokenType.values()) {
+ Character c = typ.asChar();
+
+ if (c != null)
+ CHAR_TO_TYP.put(c, typ);
+ }
+ }
+
+ /**
+ * Get token type for character.
+ *
+ * @param c Character.
+ * @return Type.
+ */
+ public static SqlLexerTokenType forChar(char c) {
+ return CHAR_TO_TYP.get(c);
+ }
+
+ /**
+ * Constructor.
+ */
+ SqlLexerTokenType() {
+ this(null);
+ }
+
+ /**
+ * Constructor.
+ *
+ * @param c Corresponding character.
+ */
+ SqlLexerTokenType(Character c) {
+ this.c = c;
+
+ str = c != null ? c.toString() : null;
+ }
+
+ /**
+ * @return Character.
+ */
+ public Character asChar() {
+ return c;
+ }
+
+ /**
+ * @return Character as string.
+ */
+ public String asString() {
+ return str;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParseException.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParseException.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParseException.java
new file mode 100644
index 0000000..96d385d
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParseException.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql;
+
+import org.apache.ignite.IgniteException;
+import org.apache.ignite.internal.util.typedef.internal.S;
+
+/**
+ * Parse exception.
+ */
+public class SqlParseException extends IgniteException {
+ /** */
+ private static final long serialVersionUID = 0L;
+
+ /** SQL command. */
+ private final String sql;
+
+ /** Position. */
+ private final int pos;
+
+ /** Error code. */
+ private final int code;
+
+ /**
+ * Constructor.
+ *
+ * @param sql SQL command.
+ * @param pos Position.
+ * @param code Error code (parsing, unsupported operation, etc.).
+ * @param msg Message.
+ */
+ public SqlParseException(String sql, int pos, int code, String msg) {
+ super(prepareMessage(sql, pos, msg));
+
+ this.sql = sql;
+ this.pos = pos;
+ this.code = code;
+ }
+
+ /**
+ * Prepare message.
+ *
+ * @param sql Original SQL.
+ * @param pos Position.
+ * @param msg Message.
+ * @return Prepared message.
+ */
+ private static String prepareMessage(String sql, int pos, String msg) {
+ String sql0;
+
+ if (pos == sql.length())
+ sql0 = sql + "[*]";
+ else
+ sql0 = sql.substring(0, pos) + "[*]" + sql.substring(pos);
+
+ return "Failed to parse SQL statement \"" + sql0 + "\": " + msg;
+ }
+
+ /**
+ * @return SQL command.
+ */
+ public String sql() {
+ return sql;
+ }
+
+ /**
+ * @return Position.
+ */
+ public int position() {
+ return pos;
+ }
+
+ /**
+ * @return Error code.
+ */
+ public int code() {
+ return code;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return S.toString(SqlParseException.class, this, "msg", getMessage());
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParser.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParser.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParser.java
new file mode 100644
index 0000000..9e0eee0
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParser.java
@@ -0,0 +1,174 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql;
+
+import org.apache.ignite.internal.sql.command.SqlCommand;
+import org.apache.ignite.internal.sql.command.SqlCreateIndexCommand;
+import org.jetbrains.annotations.Nullable;
+
+import static org.apache.ignite.internal.sql.SqlKeyword.CREATE;
+import static org.apache.ignite.internal.sql.SqlKeyword.DROP;
+import static org.apache.ignite.internal.sql.SqlKeyword.HASH;
+import static org.apache.ignite.internal.sql.SqlKeyword.INDEX;
+import static org.apache.ignite.internal.sql.SqlKeyword.PRIMARY;
+import static org.apache.ignite.internal.sql.SqlKeyword.SPATIAL;
+import static org.apache.ignite.internal.sql.SqlKeyword.TABLE;
+import static org.apache.ignite.internal.sql.SqlKeyword.UNIQUE;
+import static org.apache.ignite.internal.sql.SqlParserUtils.errorUnexpectedToken;
+import static org.apache.ignite.internal.sql.SqlParserUtils.errorUnsupported;
+import static org.apache.ignite.internal.sql.SqlParserUtils.errorUnsupportedIfMatchesKeyword;
+import static org.apache.ignite.internal.sql.SqlParserUtils.matchesKeyword;
+
+/**
+ * SQL parser.
+ */
+public class SqlParser {
+ /** Scheme name. */
+ private final String schemaName;
+
+ /** Lexer. */
+ private final SqlLexer lex;
+
+ /**
+ * Constructor.
+ *
+ * @param schemaName Schema name.
+ * @param sql Original SQL.
+ */
+ public SqlParser(@Nullable String schemaName, String sql) {
+ this.schemaName = schemaName;
+
+ lex = new SqlLexer(sql);
+ }
+
+ /**
+ * Get next command.
+ *
+ * @return Command or {@code null} if end of script is reached.
+ */
+ public SqlCommand nextCommand() {
+ SqlCommand cmd = nextCommand0();
+
+ if (cmd != null) {
+ if (cmd.schemaName() == null)
+ cmd.schemaName(schemaName);
+ }
+
+ return cmd;
+ }
+
+ /**
+ * Get next command.
+ *
+ * @return Command or {@code null} if end of script is reached.
+ */
+ private SqlCommand nextCommand0() {
+ while (true) {
+ if (!lex.shift())
+ return null;
+
+ switch (lex.tokenType()) {
+ case SEMICOLON:
+ // Empty command, skip.
+ continue;
+
+ case DEFAULT:
+ SqlCommand cmd = null;
+
+ switch (lex.token()) {
+ case CREATE:
+ cmd = processCreate();
+
+ break;
+
+ case DROP:
+ cmd = processDrop();
+
+ break;
+ }
+
+ if (cmd != null) {
+ // If there is something behind the command, this is a syntax error.
+ if (lex.shift() && lex.tokenType() != SqlLexerTokenType.SEMICOLON)
+ throw errorUnexpectedToken(lex);
+
+ return cmd;
+ }
+ else
+ throw errorUnexpectedToken(lex, CREATE, DROP);
+
+ case QUOTED:
+ case MINUS:
+ case DOT:
+ case COMMA:
+ case PARENTHESIS_LEFT:
+ case PARENTHESIS_RIGHT:
+ default:
+ throw errorUnexpectedToken(lex);
+ }
+ }
+ }
+
+ /**
+ * Process CREATE keyword.
+ *
+ * @return Command.
+ */
+ private SqlCommand processCreate() {
+ if (lex.shift() && lex.tokenType() == SqlLexerTokenType.DEFAULT) {
+ SqlCommand cmd = null;
+
+ switch (lex.token()) {
+ case INDEX:
+ cmd = new SqlCreateIndexCommand();
+
+ break;
+
+ case TABLE:
+ throw errorUnsupported(lex);
+
+ case SPATIAL:
+ if (lex.shift() && matchesKeyword(lex, INDEX))
+ cmd = new SqlCreateIndexCommand().spatial(true);
+ else
+ throw errorUnexpectedToken(lex, INDEX);
+
+ break;
+ }
+
+ if (cmd != null)
+ return cmd.parse(lex);
+
+ errorUnsupportedIfMatchesKeyword(lex, HASH, PRIMARY, UNIQUE);
+ }
+
+ throw errorUnexpectedToken(lex, INDEX, TABLE, SPATIAL);
+ }
+
+ /**
+ * Process DROP keyword.
+ *
+ * @return Command.
+ */
+ private SqlCommand processDrop() {
+ if (lex.shift() && lex.tokenType() == SqlLexerTokenType.DEFAULT)
+ throw errorUnsupported(lex);
+
+ throw errorUnexpectedToken(lex, INDEX, TABLE);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParserUtils.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParserUtils.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParserUtils.java
new file mode 100644
index 0000000..cfe4b6f
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/SqlParserUtils.java
@@ -0,0 +1,363 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql;
+
+import org.apache.ignite.internal.processors.cache.query.IgniteQueryErrorCode;
+import org.apache.ignite.internal.sql.command.SqlQualifiedName;
+import org.apache.ignite.internal.util.typedef.F;
+
+import static org.apache.ignite.internal.sql.SqlKeyword.EXISTS;
+import static org.apache.ignite.internal.sql.SqlKeyword.IF;
+import static org.apache.ignite.internal.sql.SqlKeyword.NOT;
+
+/**
+ * Parser utility methods.
+ */
+public class SqlParserUtils {
+ /**
+ * Parse IF EXISTS statement.
+ *
+ * @param lex Lexer.
+ * @return {@code True} if statement is found.
+ */
+ public static boolean parseIfExists(SqlLexer lex) {
+ SqlLexerToken token = lex.lookAhead();
+
+ if (matchesKeyword(token, IF)) {
+ lex.shift();
+
+ skipIfMatchesKeyword(lex, EXISTS);
+
+ return true;
+ }
+
+ return false;
+ }
+
+ /**
+ * Parse IF NOT EXISTS statement.
+ *
+ * @param lex Lexer.
+ * @return {@code True} if statement is found.
+ */
+ public static boolean parseIfNotExists(SqlLexer lex) {
+ SqlLexerToken token = lex.lookAhead();
+
+ if (matchesKeyword(token, IF)) {
+ lex.shift();
+
+ skipIfMatchesKeyword(lex, NOT);
+ skipIfMatchesKeyword(lex, EXISTS);
+
+ return true;
+ }
+
+ return false;
+ }
+
+ /**
+ * Skip commr or right parenthesis.
+ *
+ * @param lex Lexer.
+ * @return {@code True} if right parenthesis is found.
+ */
+ public static boolean skipCommaOrRightParenthesis(SqlLexer lex) {
+ if (lex.shift()) {
+ switch (lex.tokenType()) {
+ case COMMA:
+ return false;
+
+ case PARENTHESIS_RIGHT:
+ return true;
+ }
+ }
+
+ throw errorUnexpectedToken(lex, ",", ")");
+ }
+
+ /**
+ * Parse integer value.
+ *
+ * @param lex Lexer.
+ * @return Integer value.
+ */
+ public static int parseInt(SqlLexer lex) {
+ if (lex.shift() && lex.tokenType() == SqlLexerTokenType.DEFAULT) {
+ try {
+ return Integer.parseInt(lex.token());
+ }
+ catch (NumberFormatException e) {
+ // No-op.
+ }
+ }
+
+ throw errorUnexpectedToken(lex, "[number]");
+ }
+
+ /**
+ * Process name.
+ *
+ * @param lex Lexer.
+ * @param additionalExpTokens Additional expected tokens in case of error.
+ * @return Name.
+ */
+ public static String parseIdentifier(SqlLexer lex, String... additionalExpTokens) {
+ if (lex.shift() && isVaildIdentifier(lex))
+ return lex.token();
+
+ throw errorUnexpectedToken(lex, "[identifier]", additionalExpTokens);
+ }
+
+ /**
+ * Process qualified name.
+ *
+ * @param lex Lexer.
+ * @param additionalExpTokens Additional expected tokens in case of error.
+ * @return Qualified name.
+ */
+ public static SqlQualifiedName parseQualifiedIdentifier(SqlLexer lex, String... additionalExpTokens) {
+ if (lex.shift() && isVaildIdentifier(lex)) {
+ SqlQualifiedName res = new SqlQualifiedName();
+
+ String first = lex.token();
+
+ SqlLexerToken nextToken = lex.lookAhead();
+
+ if (nextToken.tokenType() == SqlLexerTokenType.DOT) {
+ lex.shift();
+
+ String second = parseIdentifier(lex);
+
+ return res.schemaName(first).name(second);
+ }
+ else
+ return res.name(first);
+ }
+
+ throw errorUnexpectedToken(lex, "[qualified identifier]", additionalExpTokens);
+ }
+
+ /**
+ * Check if token is identifier.
+ *
+ * @param token Token.
+ * @return {@code True} if we are standing on possible identifier.
+ */
+ public static boolean isVaildIdentifier(SqlLexerToken token) {
+ switch (token.tokenType()) {
+ case DEFAULT:
+ char c = token.tokenFirstChar();
+
+ if ((c >= 'A' && c <= 'Z') || c == '_') {
+ if (SqlKeyword.isKeyword(token.token()))
+ throw errorUnexpectedToken(token, "[identifier]");
+
+ return true;
+ }
+
+ throw error(token, "Illegal identifier name: " + token.token());
+
+ case QUOTED:
+ return true;
+
+ default:
+ return false;
+ }
+ }
+
+ /**
+ * Check if current lexer token matches expected.
+ *
+ * @param token Token..
+ * @param expKeyword Expected keyword.
+ * @return {@code True} if matches.
+ */
+ public static boolean matchesKeyword(SqlLexerToken token, String expKeyword) {
+ return token.tokenType() == SqlLexerTokenType.DEFAULT && expKeyword.equals(token.token());
+ }
+
+ /**
+ * Skip token if it matches expected keyword.
+ *
+ * @param lex Lexer.
+ * @param expKeyword Expected keyword.
+ */
+ public static void skipIfMatchesKeyword(SqlLexer lex, String expKeyword) {
+ if (lex.shift() && matchesKeyword(lex, expKeyword))
+ return;
+
+ throw errorUnexpectedToken(lex, expKeyword);
+ }
+
+ /**
+ * Skip next token if it matches expected type.
+ *
+ * @param lex Lexer.
+ * @param tokenTyp Expected token type.
+ */
+ public static void skipIfMatches(SqlLexer lex, SqlLexerTokenType tokenTyp) {
+ if (lex.shift() && F.eq(lex.tokenType(), tokenTyp))
+ return;
+
+ throw errorUnexpectedToken(lex, tokenTyp.asString());
+ }
+
+ /**
+ * Create parse exception referring to current lexer position.
+ *
+ * @param token Token.
+ * @param msg Message.
+ * @return Exception.
+ */
+ public static SqlParseException error(SqlLexerToken token, String msg) {
+ return error0(token, IgniteQueryErrorCode.PARSING, msg);
+ }
+
+ /**
+ * Create parse exception referring to current lexer position.
+ *
+ * @param token Token.
+ * @param code Error code.
+ * @param msg Message.
+ * @return Exception.
+ */
+ private static SqlParseException error0(SqlLexerToken token, int code, String msg) {
+ return new SqlParseException(token.sql(), token.tokenPosition(), code, msg);
+ }
+
+ /**
+ * Create generic parse exception due to unexpected token.
+ *
+ * @param token Token.
+ * @return Exception.
+ */
+ public static SqlParseException errorUnexpectedToken(SqlLexerToken token) {
+ return errorUnexpectedToken0(token);
+ }
+
+ /**
+ * Throw unsupported token exception if passed keyword is found.
+ *
+ * @param token Token.
+ * @param keyword Keyword.
+ */
+ public static void errorUnsupportedIfMatchesKeyword(SqlLexerToken token, String keyword) {
+ if (matchesKeyword(token, keyword))
+ throw errorUnsupported(token);
+ }
+
+ /**
+ * Throw unsupported token exception if one of passed keywords is found.
+ *
+ * @param token Token.
+ * @param keywords Keywords.
+ */
+ public static void errorUnsupportedIfMatchesKeyword(SqlLexerToken token, String... keywords) {
+ if (F.isEmpty(keywords))
+ return;
+
+ for (String keyword : keywords)
+ errorUnsupportedIfMatchesKeyword(token, keyword);
+ }
+
+ /**
+ * Error on unsupported keyword.
+ *
+ * @param token Token.
+ * @return Error.
+ */
+ public static SqlParseException errorUnsupported(SqlLexerToken token) {
+ throw error0(token, IgniteQueryErrorCode.UNSUPPORTED_OPERATION,
+ "Unsupported keyword: \"" + token.token() + "\"");
+ }
+
+ /**
+ * Create generic parse exception due to unexpected token.
+ *
+ * @param lex Lexer.
+ * @param expToken Expected token.
+ * @return Exception.
+ */
+ public static SqlParseException errorUnexpectedToken(SqlLexer lex, String expToken) {
+ return errorUnexpectedToken0(lex, expToken);
+ }
+
+ /**
+ * Create generic parse exception due to unexpected token.
+ *
+ * @param token Token.
+ * @param firstExpToken First expected token.
+ * @param expTokens Additional expected tokens (if any).
+ * @return Exception.
+ */
+ public static SqlParseException errorUnexpectedToken(SqlLexerToken token, String firstExpToken,
+ String... expTokens) {
+ if (F.isEmpty(expTokens))
+ return errorUnexpectedToken0(token, firstExpToken);
+ else {
+ String[] expTokens0 = new String[expTokens.length + 1];
+
+ expTokens0[0] = firstExpToken;
+
+ System.arraycopy(expTokens, 0, expTokens0, 1, expTokens.length);
+
+ throw errorUnexpectedToken0(token, expTokens0);
+ }
+ }
+
+ /**
+ * Create generic parse exception due to unexpected token.
+ *
+ * @param token Token.
+ * @param expTokens Expected tokens (if any).
+ * @return Exception.
+ */
+ @SuppressWarnings("StringConcatenationInsideStringBufferAppend")
+ private static SqlParseException errorUnexpectedToken0(SqlLexerToken token, String... expTokens) {
+ String token0 = token.token();
+
+ StringBuilder msg = new StringBuilder(
+ token0 == null ? "Unexpected end of command" : "Unexpected token: \"" + token0 + "\"");
+
+ if (!F.isEmpty(expTokens)) {
+ msg.append(" (expected: ");
+
+ boolean first = true;
+
+ for (String expToken : expTokens) {
+ if (first)
+ first = false;
+ else
+ msg.append(", ");
+
+ msg.append("\"" + expToken + "\"");
+ }
+
+ msg.append(")");
+ }
+
+ throw error(token, msg.toString());
+ }
+
+ /**
+ * Private constructor.
+ */
+ private SqlParserUtils() {
+ // No-op.
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlCommand.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlCommand.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlCommand.java
new file mode 100644
index 0000000..61ff31f
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlCommand.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql.command;
+
+import org.apache.ignite.internal.sql.SqlLexer;
+
+/**
+ * Generic SQL command.
+ */
+public interface SqlCommand {
+ /**
+ * Parse command.
+ *
+ * @param lex Lexer.
+ * @return This instance.
+ */
+ public SqlCommand parse(SqlLexer lex);
+
+ /**
+ * @return Schema name.
+ */
+ public String schemaName();
+
+ /**
+ * @param schemaName Schema name.
+ */
+ public void schemaName(String schemaName);
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlCreateIndexCommand.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlCreateIndexCommand.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlCreateIndexCommand.java
new file mode 100644
index 0000000..897aea5
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlCreateIndexCommand.java
@@ -0,0 +1,200 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql.command;
+
+import org.apache.ignite.internal.sql.SqlLexer;
+import org.apache.ignite.internal.sql.SqlLexerTokenType;
+import org.apache.ignite.internal.sql.SqlLexerToken;
+import org.apache.ignite.internal.util.tostring.GridToStringExclude;
+import org.apache.ignite.internal.util.tostring.GridToStringInclude;
+import org.apache.ignite.internal.util.typedef.internal.S;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.Set;
+
+import static org.apache.ignite.internal.sql.SqlKeyword.ASC;
+import static org.apache.ignite.internal.sql.SqlKeyword.DESC;
+import static org.apache.ignite.internal.sql.SqlKeyword.IF;
+import static org.apache.ignite.internal.sql.SqlKeyword.ON;
+import static org.apache.ignite.internal.sql.SqlParserUtils.error;
+import static org.apache.ignite.internal.sql.SqlParserUtils.errorUnexpectedToken;
+import static org.apache.ignite.internal.sql.SqlParserUtils.matchesKeyword;
+import static org.apache.ignite.internal.sql.SqlParserUtils.parseIdentifier;
+import static org.apache.ignite.internal.sql.SqlParserUtils.parseIfNotExists;
+import static org.apache.ignite.internal.sql.SqlParserUtils.parseQualifiedIdentifier;
+import static org.apache.ignite.internal.sql.SqlParserUtils.skipCommaOrRightParenthesis;
+import static org.apache.ignite.internal.sql.SqlParserUtils.skipIfMatchesKeyword;
+
+/**
+ * CREATE INDEX command.
+ */
+public class SqlCreateIndexCommand implements SqlCommand {
+ /** Schema name. */
+ private String schemaName;
+
+ /** Table name. */
+ private String tblName;
+
+ /** Index name. */
+ private String idxName;
+
+ /** IF NOT EXISTS flag. */
+ private boolean ifNotExists;
+
+ /** Spatial index flag. */
+ private boolean spatial;
+
+ /** Columns. */
+ @GridToStringInclude
+ private Collection<SqlIndexColumn> cols;
+
+ /** Column names. */
+ @GridToStringExclude
+ private Set<String> colNames;
+
+ /** {@inheritDoc} */
+ @Override public String schemaName() {
+ return schemaName;
+ }
+
+ /** {@inheritDoc} */
+ @Override public void schemaName(String schemaName) {
+ this.schemaName = schemaName;
+ }
+
+ /**
+ * @return Table name.
+ */
+ public String tableName() {
+ return tblName;
+ }
+
+ /**
+ * @return Index name.
+ */
+ public String indexName() {
+ return idxName;
+ }
+
+ /**
+ * @return IF NOT EXISTS flag.
+ */
+ public boolean ifNotExists() {
+ return ifNotExists;
+ }
+
+ /**
+ * @return Spatial index flag.
+ */
+ public boolean spatial() {
+ return spatial;
+ }
+
+ /**
+ * @param spatial Spatial index flag.
+ * @return This instance.
+ */
+ public SqlCreateIndexCommand spatial(boolean spatial) {
+ this.spatial = spatial;
+
+ return this;
+ }
+
+ /**
+ * @return Columns.
+ */
+ public Collection<SqlIndexColumn> columns() {
+ return cols != null ? cols : Collections.<SqlIndexColumn>emptySet();
+ }
+
+ /** {@inheritDoc} */
+ @Override public SqlCommand parse(SqlLexer lex) {
+ ifNotExists = parseIfNotExists(lex);
+
+ idxName = parseIdentifier(lex, IF);
+
+ skipIfMatchesKeyword(lex, ON);
+
+ SqlQualifiedName tblQName = parseQualifiedIdentifier(lex);
+
+ schemaName = tblQName.schemaName();
+ tblName = tblQName.name();
+
+ parseColumnList(lex);
+
+ return this;
+ }
+
+ /*
+ * @param lex Lexer.
+ */
+ private void parseColumnList(SqlLexer lex) {
+ if (!lex.shift() || lex.tokenType() != SqlLexerTokenType.PARENTHESIS_LEFT)
+ throw errorUnexpectedToken(lex, "(");
+
+ while (true) {
+ perseIndexColumn(lex);
+
+ if (skipCommaOrRightParenthesis(lex))
+ break;
+ }
+ }
+
+ /**
+ * @param lex Lexer.
+ */
+ private void perseIndexColumn(SqlLexer lex) {
+ String name = parseIdentifier(lex);
+ boolean desc = false;
+
+ SqlLexerToken nextToken = lex.lookAhead();
+
+ if (matchesKeyword(nextToken, ASC) || matchesKeyword(nextToken, DESC)) {
+ lex.shift();
+
+ if (matchesKeyword(lex, DESC))
+ desc = true;
+ }
+
+ addColumn(lex, new SqlIndexColumn(name, desc));
+ }
+
+ /**
+ * @param lex Lexer.
+ * @param col Column.
+ */
+ private void addColumn(SqlLexer lex, SqlIndexColumn col) {
+ if (cols == null) {
+ cols = new LinkedList<>();
+ colNames = new HashSet<>();
+ }
+
+ if (!colNames.add(col.name()))
+ throw error(lex, "Column already defined: " + col.name());
+
+ cols.add(col);
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return S.toString(SqlCreateIndexCommand.class, this);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlIndexColumn.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlIndexColumn.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlIndexColumn.java
new file mode 100644
index 0000000..227c02a
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlIndexColumn.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql.command;
+
+import org.apache.ignite.internal.util.typedef.internal.S;
+
+/**
+ * Index column definition.
+ */
+public class SqlIndexColumn {
+ /** Column name. */
+ private final String name;
+
+ /** Descending flag. */
+ private final boolean desc;
+
+ /**
+ * Constructor.
+ *
+ * @param name Column name.
+ * @param desc Descending flag.
+ */
+ public SqlIndexColumn(String name, boolean desc) {
+ this.name = name;
+ this.desc = desc;
+ }
+
+ /**
+ * @return Column name.
+ */
+ public String name() {
+ return name;
+ }
+
+ /**
+ * @return Descending flag.
+ */
+ public boolean descending() {
+ return desc;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return S.toString(SqlIndexColumn.class, this);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlQualifiedName.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlQualifiedName.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlQualifiedName.java
new file mode 100644
index 0000000..965e0ef
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlQualifiedName.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql.command;
+
+import org.apache.ignite.internal.util.typedef.internal.S;
+
+/**
+ * SQL qualified name.
+ */
+public class SqlQualifiedName {
+ /** Schema name. */
+ private String schemaName;
+
+ /** Object name. */
+ private String name;
+
+ /**
+ * @return Schema name.
+ */
+ public String schemaName() {
+ return schemaName;
+ }
+
+ /**
+ * @param schemaName Schema name.
+ * @return This instance.
+ */
+ public SqlQualifiedName schemaName(String schemaName) {
+ this.schemaName = schemaName;
+
+ return this;
+ }
+
+ /**
+ * @return Object name.
+ */
+ public String name() {
+ return name;
+ }
+
+ /**
+ * @param name Object name.
+ * @return This instance.
+ */
+ public SqlQualifiedName name(String name) {
+ this.name = name;
+
+ return this;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return S.toString(SqlQualifiedName.class, this);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserSelfTest.java
new file mode 100644
index 0000000..98a6aae
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserSelfTest.java
@@ -0,0 +1,198 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.sql;
+
+import org.apache.ignite.internal.sql.command.SqlCreateIndexCommand;
+import org.apache.ignite.internal.sql.command.SqlIndexColumn;
+import org.apache.ignite.internal.util.typedef.F;
+import org.apache.ignite.testframework.GridTestUtils;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.concurrent.Callable;
+
+/**
+ * Test for parser.
+ */
+@SuppressWarnings({"UnusedReturnValue", "ThrowableNotThrown"})
+public class SqlParserSelfTest extends GridCommonAbstractTest {
+ /**
+ * Tests for CREATE INDEX command.
+ *
+ * @throws Exception If failed.
+ */
+ public void testCreateIndex() throws Exception {
+ // Base.
+ parseValidate(null, "CREATE INDEX idx ON tbl(a)", null, "TBL", "IDX", "A", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a ASC)", null, "TBL", "IDX", "A", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a DESC)", null, "TBL", "IDX", "A", true);
+
+ // Case (in)sensitivity.
+ parseValidate(null, "CREATE INDEX IDX ON TBL(COL)", null, "TBL", "IDX", "COL", false);
+ parseValidate(null, "CREATE INDEX iDx ON tBl(cOl)", null, "TBL", "IDX", "COL", false);
+
+ parseValidate(null, "CREATE INDEX \"idx\" ON tbl(col)", null, "TBL", "idx", "COL", false);
+ parseValidate(null, "CREATE INDEX \"iDx\" ON tbl(col)", null, "TBL", "iDx", "COL", false);
+
+ parseValidate(null, "CREATE INDEX idx ON \"tbl\"(col)", null, "tbl", "IDX", "COL", false);
+ parseValidate(null, "CREATE INDEX idx ON \"tBl\"(col)", null, "tBl", "IDX", "COL", false);
+
+ parseValidate(null, "CREATE INDEX idx ON tbl(\"col\")", null, "TBL", "IDX", "col", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(\"cOl\")", null, "TBL", "IDX", "cOl", false);
+
+ parseValidate(null, "CREATE INDEX idx ON tbl(\"cOl\" ASC)", null, "TBL", "IDX", "cOl", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(\"cOl\" DESC)", null, "TBL", "IDX", "cOl", true);
+
+ // Columns.
+ parseValidate(null, "CREATE INDEX idx ON tbl(a, b)", null, "TBL", "IDX", "A", false, "B", false);
+
+ parseValidate(null, "CREATE INDEX idx ON tbl(a ASC, b)", null, "TBL", "IDX", "A", false, "B", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a, b ASC)", null, "TBL", "IDX", "A", false, "B", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a ASC, b ASC)", null, "TBL", "IDX", "A", false, "B", false);
+
+ parseValidate(null, "CREATE INDEX idx ON tbl(a DESC, b)", null, "TBL", "IDX", "A", true, "B", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a, b DESC)", null, "TBL", "IDX", "A", false, "B", true);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a DESC, b DESC)", null, "TBL", "IDX", "A", true, "B", true);
+
+ parseValidate(null, "CREATE INDEX idx ON tbl(a ASC, b DESC)", null, "TBL", "IDX", "A", false, "B", true);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a DESC, b ASC)", null, "TBL", "IDX", "A", true, "B", false);
+
+ parseValidate(null, "CREATE INDEX idx ON tbl(a, b, c)", null, "TBL", "IDX", "A", false, "B", false, "C", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a DESC, b, c)", null, "TBL", "IDX", "A", true, "B", false, "C", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a, b DESC, c)", null, "TBL", "IDX", "A", false, "B", true, "C", false);
+ parseValidate(null, "CREATE INDEX idx ON tbl(a, b, c DESC)", null, "TBL", "IDX", "A", false, "B", false, "C", true);
+
+ // Negative cases.
+ parseError(null, "CREATE INDEX idx ON tbl()", "Unexpected token");
+ parseError(null, "CREATE INDEX idx ON tbl(a, a)", "Column already defined: A");
+ parseError(null, "CREATE INDEX idx ON tbl(a, b, a)", "Column already defined: A");
+ parseError(null, "CREATE INDEX idx ON tbl(b, a, a)", "Column already defined: A");
+
+ // Tests with schema.
+ parseValidate(null, "CREATE INDEX idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
+ parseValidate(null, "CREATE INDEX idx ON \"schema\".tbl(a)", "schema", "TBL", "IDX", "A", false);
+ parseValidate(null, "CREATE INDEX idx ON \"sChema\".tbl(a)", "sChema", "TBL", "IDX", "A", false);
+
+ parseValidate("SCHEMA", "CREATE INDEX idx ON tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
+ parseValidate("schema", "CREATE INDEX idx ON tbl(a)", "schema", "TBL", "IDX", "A", false);
+ parseValidate("sChema", "CREATE INDEX idx ON tbl(a)", "sChema", "TBL", "IDX", "A", false);
+
+ // NOT EXISTS
+ SqlCreateIndexCommand cmd;
+
+ cmd = parseValidate(null, "CREATE INDEX idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
+ assertFalse(cmd.ifNotExists());
+
+ cmd = parseValidate(null, "CREATE INDEX IF NOT EXISTS idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
+ assertTrue(cmd.ifNotExists());
+
+ parseError(null, "CREATE INDEX IF idx ON tbl(a)", "Unexpected token: \"IDX\"");
+ parseError(null, "CREATE INDEX IF NOT idx ON tbl(a)", "Unexpected token: \"IDX\"");
+ parseError(null, "CREATE INDEX IF EXISTS idx ON tbl(a)", "Unexpected token: \"EXISTS\"");
+ parseError(null, "CREATE INDEX NOT EXISTS idx ON tbl(a)", "Unexpected token: \"NOT\"");
+
+ // SPATIAL
+ cmd = parseValidate(null, "CREATE INDEX idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
+ assertFalse(cmd.spatial());
+
+ cmd = parseValidate(null, "CREATE SPATIAL INDEX idx ON schema.tbl(a)", "SCHEMA", "TBL", "IDX", "A", false);
+ assertTrue(cmd.spatial());
+
+ // UNIQUE
+ parseError(null, "CREATE UNIQUE INDEX idx ON tbl(a)", "Unsupported keyword: \"UNIQUE\"");
+
+ // HASH
+ parseError(null, "CREATE HASH INDEX idx ON tbl(a)", "Unsupported keyword: \"HASH\"");
+
+ // PRIMARY KEY
+ parseError(null, "CREATE PRIMARY KEY INDEX idx ON tbl(a)", "Unsupported keyword: \"PRIMARY\"");
+ }
+
+ /**
+ * Make sure that parse error occurs.
+ *
+ * @param schema Schema.
+ * @param sql SQL.
+ * @param msg Expected error message.
+ */
+ private static void parseError(final String schema, final String sql, String msg) {
+ GridTestUtils.assertThrows(null, new Callable<Void>() {
+ @Override public Void call() throws Exception {
+ new SqlParser(schema, sql).nextCommand();
+
+ return null;
+ }
+ }, SqlParseException.class, msg);
+ }
+
+ /**
+ * Parse and validate SQL script.
+ *
+ * @param schema Schema.
+ * @param sql SQL.
+ * @param expSchemaName Expected schema name.
+ * @param expTblName Expected table name.
+ * @param expIdxName Expected index name.
+ * @param expColDefs Expected column definitions.
+ * @return Command.
+ */
+ private static SqlCreateIndexCommand parseValidate(String schema, String sql, String expSchemaName,
+ String expTblName, String expIdxName, Object... expColDefs) {
+ SqlCreateIndexCommand cmd = (SqlCreateIndexCommand)new SqlParser(schema, sql).nextCommand();
+
+ validate(cmd, expSchemaName, expTblName, expIdxName, expColDefs);
+
+ return cmd;
+ }
+
+ /**
+ * Validate create index command.
+ *
+ * @param cmd Command.
+ * @param expSchemaName Expected schema name.
+ * @param expTblName Expected table name.
+ * @param expIdxName Expected index name.
+ * @param expColDefs Expected column definitions.
+ */
+ private static void validate(SqlCreateIndexCommand cmd, String expSchemaName, String expTblName, String expIdxName,
+ Object... expColDefs) {
+ assertEquals(expSchemaName, cmd.schemaName());
+ assertEquals(expTblName, cmd.tableName());
+ assertEquals(expIdxName, cmd.indexName());
+
+ if (F.isEmpty(expColDefs) || expColDefs.length % 2 == 1)
+ throw new IllegalArgumentException("Column definitions must be even.");
+
+ Collection<SqlIndexColumn> cols = cmd.columns();
+
+ assertEquals(expColDefs.length / 2, cols.size());
+
+ Iterator<SqlIndexColumn> colIter = cols.iterator();
+
+ for (int i = 0; i < expColDefs.length;) {
+ SqlIndexColumn col = colIter.next();
+
+ String expColName = (String)expColDefs[i++];
+ Boolean expDesc = (Boolean) expColDefs[i++];
+
+ assertEquals(expColName, col.name());
+ assertEquals(expDesc, (Boolean)col.descending());
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/IgniteH2Indexing.java
----------------------------------------------------------------------
diff --git a/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/IgniteH2Indexing.java b/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/IgniteH2Indexing.java
index 31902ac..884752d 100644
--- a/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/IgniteH2Indexing.java
+++ b/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/IgniteH2Indexing.java
@@ -115,6 +115,9 @@ import org.apache.ignite.internal.processors.query.h2.twostep.MapQueryLazyWorker
import org.apache.ignite.internal.processors.query.schema.SchemaIndexCacheVisitor;
import org.apache.ignite.internal.processors.query.schema.SchemaIndexCacheVisitorClosure;
import org.apache.ignite.internal.processors.timeout.GridTimeoutProcessor;
+import org.apache.ignite.internal.sql.SqlParser;
+import org.apache.ignite.internal.sql.command.SqlCommand;
+import org.apache.ignite.internal.sql.command.SqlCreateIndexCommand;
import org.apache.ignite.internal.util.GridBoundedConcurrentLinkedHashMap;
import org.apache.ignite.internal.util.GridEmptyCloseableIterator;
import org.apache.ignite.internal.util.GridSpinBusyLock;
@@ -1321,9 +1324,65 @@ public class IgniteH2Indexing implements GridQueryIndexing {
};
}
+ /**
+ * Try executing query using native facilities.
+ *
+ * @param schemaName Schema name.
+ * @param qry Query.
+ * @return Result or {@code null} if cannot parse/process this query.
+ */
+ private List<FieldsQueryCursor<List<?>>> tryQueryDistributedSqlFieldsNative(String schemaName, SqlFieldsQuery qry) {
+ // Heuristic check for fast return.
+ if (!qry.getSql().toUpperCase().contains("INDEX"))
+ return null;
+
+ // Parse.
+ SqlCommand cmd;
+
+ try {
+ SqlParser parser = new SqlParser(schemaName, qry.getSql());
+
+ cmd = parser.nextCommand();
+
+ // No support for multiple commands for now.
+ if (parser.nextCommand() != null)
+ return null;
+
+ // Only CREATE INDEX is supported for now.
+ if (!(cmd instanceof SqlCreateIndexCommand))
+ return null;
+ }
+ catch (Exception e) {
+ // Cannot parse, return.
+ if (log.isDebugEnabled())
+ log.debug("Failed to parse SQL with native parser [qry=" + qry.getSql() + ", err=" + e + ']');
+
+ return null;
+ }
+
+ // Execute.
+ try {
+ List<FieldsQueryCursor<List<?>>> ress = new ArrayList<>(1);
+
+ FieldsQueryCursor<List<?>> res = ddlProc.runDdlStatement(qry.getSql(), cmd);
+
+ ress.add(res);
+
+ return ress;
+ }
+ catch (IgniteCheckedException e) {
+ throw new IgniteSQLException("Failed to execute DDL statement [stmt=" + qry.getSql() + ']', e);
+ }
+ }
+
/** {@inheritDoc} */
@Override public List<FieldsQueryCursor<List<?>>> queryDistributedSqlFields(String schemaName, SqlFieldsQuery qry,
boolean keepBinary, GridQueryCancel cancel, @Nullable Integer mainCacheId, boolean failOnMultipleStmts) {
+ List<FieldsQueryCursor<List<?>>> res = tryQueryDistributedSqlFieldsNative(schemaName, qry);
+
+ if (res != null)
+ return res;
+
Connection c = connectionForSchema(schemaName);
final boolean enforceJoinOrder = qry.isEnforceJoinOrder();
@@ -1336,6 +1395,7 @@ public class IgniteH2Indexing implements GridQueryIndexing {
H2TwoStepCachedQueryKey cachedQryKey = new H2TwoStepCachedQueryKey(schemaName, sqlQry, grpByCollocated,
distributedJoins, enforceJoinOrder, qry.isLocal());
+
H2TwoStepCachedQuery cachedQry = twoStepCache.get(cachedQryKey);
if (cachedQry != null) {
@@ -1345,14 +1405,12 @@ public class IgniteH2Indexing implements GridQueryIndexing {
List<GridQueryFieldMetadata> meta = cachedQry.meta();
- List<FieldsQueryCursor<List<?>>> res = Collections.singletonList(executeTwoStepsQuery(schemaName, qry.getPageSize(), qry.getPartitions(),
+ return Collections.singletonList(executeTwoStepsQuery(schemaName, qry.getPageSize(), qry.getPartitions(),
qry.getArgs(), keepBinary, qry.isLazy(), qry.getTimeout(), cancel, sqlQry, enforceJoinOrder,
twoStepQry, meta));
-
- return res;
}
- List<FieldsQueryCursor<List<?>>> res = new ArrayList<>(1);
+ res = new ArrayList<>(1);
Object[] argsOrig = qry.getArgs();
int firstArg = 0;
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/ddl/DdlStatementsProcessor.java
----------------------------------------------------------------------
diff --git a/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/ddl/DdlStatementsProcessor.java b/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/ddl/DdlStatementsProcessor.java
index d29a063..fd425c2 100644
--- a/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/ddl/DdlStatementsProcessor.java
+++ b/modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/ddl/DdlStatementsProcessor.java
@@ -27,6 +27,7 @@ import java.util.Set;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.cache.QueryEntity;
import org.apache.ignite.cache.QueryIndex;
+import org.apache.ignite.cache.QueryIndexType;
import org.apache.ignite.cache.query.FieldsQueryCursor;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.internal.GridKernalContext;
@@ -50,6 +51,9 @@ import org.apache.ignite.internal.processors.query.h2.sql.GridSqlDropTable;
import org.apache.ignite.internal.processors.query.h2.sql.GridSqlQueryParser;
import org.apache.ignite.internal.processors.query.h2.sql.GridSqlStatement;
import org.apache.ignite.internal.processors.query.schema.SchemaOperationException;
+import org.apache.ignite.internal.sql.command.SqlCommand;
+import org.apache.ignite.internal.sql.command.SqlCreateIndexCommand;
+import org.apache.ignite.internal.sql.command.SqlIndexColumn;
import org.apache.ignite.internal.util.future.GridFinishedFuture;
import org.apache.ignite.internal.util.typedef.F;
import org.h2.command.Prepared;
@@ -87,6 +91,79 @@ public class DdlStatementsProcessor {
}
/**
+ * Run DDL statement.
+ *
+ * @param sql Original SQL.
+ * @param cmd Command.
+ * @return Result.
+ * @throws IgniteCheckedException On error.
+ */
+ @SuppressWarnings("unchecked")
+ public FieldsQueryCursor<List<?>> runDdlStatement(String sql, SqlCommand cmd) throws IgniteCheckedException{
+ IgniteInternalFuture fut;
+
+ try {
+ if (cmd instanceof SqlCreateIndexCommand) {
+ SqlCreateIndexCommand cmd0 = (SqlCreateIndexCommand)cmd;
+
+ GridH2Table tbl = idx.dataTable(cmd0.schemaName(), cmd0.tableName());
+
+ if (tbl == null)
+ throw new SchemaOperationException(SchemaOperationException.CODE_TABLE_NOT_FOUND, cmd0.tableName());
+
+ assert tbl.rowDescriptor() != null;
+
+ QueryIndex newIdx = new QueryIndex();
+
+ newIdx.setName(cmd0.indexName());
+
+ newIdx.setIndexType(cmd0.spatial() ? QueryIndexType.GEOSPATIAL : QueryIndexType.SORTED);
+
+ LinkedHashMap<String, Boolean> flds = new LinkedHashMap<>();
+
+ // Let's replace H2's table and property names by those operated by GridQueryProcessor.
+ GridQueryTypeDescriptor typeDesc = tbl.rowDescriptor().type();
+
+ for (SqlIndexColumn col : cmd0.columns()) {
+ GridQueryProperty prop = typeDesc.property(col.name());
+
+ if (prop == null)
+ throw new SchemaOperationException(SchemaOperationException.CODE_COLUMN_NOT_FOUND, col.name());
+
+ flds.put(prop.name(), !col.descending());
+ }
+
+ newIdx.setFields(flds);
+
+ fut = ctx.query().dynamicIndexCreate(tbl.cacheName(), cmd.schemaName(), typeDesc.tableName(),
+ newIdx, cmd0.ifNotExists());
+ }
+ else
+ throw new IgniteSQLException("Unsupported DDL operation: " + sql,
+ IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
+
+ if (fut != null)
+ fut.get();
+
+ QueryCursorImpl<List<?>> resCur = (QueryCursorImpl<List<?>>)new QueryCursorImpl(Collections.singletonList
+ (Collections.singletonList(0L)), null, false);
+
+ resCur.fieldsMeta(UPDATE_RESULT_META);
+
+ return resCur;
+ }
+ catch (SchemaOperationException e) {
+ throw convert(e);
+ }
+ catch (IgniteSQLException e) {
+ throw e;
+ }
+ catch (Exception e) {
+ throw new IgniteSQLException("Unexpected DDL operation failure: " + e.getMessage(), e);
+ }
+ }
+
+ /**
* Execute DDL statement.
*
* @param sql SQL.
@@ -97,7 +174,6 @@ public class DdlStatementsProcessor {
@SuppressWarnings({"unchecked", "ThrowableResultOfMethodCallIgnored"})
public FieldsQueryCursor<List<?>> runDdlStatement(String sql, Prepared prepared)
throws IgniteCheckedException {
-
IgniteInternalFuture fut = null;
try {
@@ -402,6 +478,8 @@ public class DdlStatementsProcessor {
}
}
+ assert valCol != null;
+
valTypeName = DataType.getTypeClassName(valCol.column().getType());
res.setValueFieldName(valCol.columnName());
http://git-wip-us.apache.org/repos/asf/ignite/blob/145c59dd/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java b/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java
index 0b1a753..5339865 100644
--- a/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java
+++ b/modules/indexing/src/test/java/org/apache/ignite/testsuites/IgniteCacheQuerySelfTestSuite.java
@@ -154,6 +154,7 @@ import org.apache.ignite.internal.processors.query.h2.sql.GridQueryParsingTest;
import org.apache.ignite.internal.processors.query.h2.sql.H2CompareBigQueryDistributedJoinsTest;
import org.apache.ignite.internal.processors.query.h2.sql.H2CompareBigQueryTest;
import org.apache.ignite.internal.processors.sql.SqlConnectorConfigurationValidationSelfTest;
+import org.apache.ignite.internal.sql.SqlParserSelfTest;
import org.apache.ignite.spi.communication.tcp.GridOrderedMessageCancelSelfTest;
import org.apache.ignite.testframework.IgniteTestSuite;
@@ -168,6 +169,8 @@ public class IgniteCacheQuerySelfTestSuite extends TestSuite {
public static TestSuite suite() throws Exception {
IgniteTestSuite suite = new IgniteTestSuite("Ignite Cache Queries Test Suite");
+ suite.addTestSuite(SqlParserSelfTest.class);
+
suite.addTestSuite(SqlConnectorConfigurationValidationSelfTest.class);
suite.addTestSuite(ClientConnectorConfigurationValidationSelfTest.class);
[08/28] ignite git commit: IGNITE-6833 Web Console: Fixed incremental
SASS compilation in dev-server mode.
Posted by sb...@apache.org.
IGNITE-6833 Web Console: Fixed incremental SASS compilation in dev-server mode.
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/8c343a19
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/8c343a19
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/8c343a19
Branch: refs/heads/ignite-zk
Commit: 8c343a19b07c8a17350ff1ef6edc5b2b8044d9a0
Parents: b8672d7
Author: alexdel <ve...@yandex.ru>
Authored: Fri Nov 10 09:26:42 2017 +0700
Committer: Alexey Kuznetsov <ak...@apache.org>
Committed: Fri Nov 10 09:26:42 2017 +0700
----------------------------------------------------------------------
modules/web-console/frontend/package.json | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/8c343a19/modules/web-console/frontend/package.json
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/package.json b/modules/web-console/frontend/package.json
index 95b4a2b..38208df 100644
--- a/modules/web-console/frontend/package.json
+++ b/modules/web-console/frontend/package.json
@@ -63,13 +63,13 @@
"bootstrap-sass": "3.3.7",
"brace": "0.10.0",
"copy-webpack-plugin": "4.0.1",
- "css-loader": "0.28.4",
+ "css-loader": "0.28.7",
"eslint": "4.3.0",
"eslint-friendly-formatter": "3.0.0",
"eslint-loader": "1.9.0",
"eslint-plugin-babel": "4.1.1",
"expose-loader": "0.7.3",
- "extract-text-webpack-plugin": "3.0.0",
+ "extract-text-webpack-plugin": "3.0.2",
"file-loader": "0.11.2",
"file-saver": "1.3.3",
"font-awesome": "4.7.0",
@@ -81,7 +81,7 @@
"json-bigint": "0.2.3",
"jszip": "3.1.4",
"lodash": "4.17.4",
- "node-sass": "4.5.3",
+ "node-sass": "4.6.0",
"nvd3": "1.8.4",
"pako": "1.0.6",
"progress-bar-webpack-plugin": "1.10.0",
@@ -93,12 +93,12 @@
"rxjs": "5.4.2",
"sass-loader": "6.0.6",
"socket.io-client": "1.7.3",
- "style-loader": "0.18.2",
+ "style-loader": "0.19.0",
"svg-sprite-loader": "3.0.7",
"tf-metatags": "2.0.0",
- "webpack": "3.3.0",
- "webpack-dev-server": "2.6.1",
- "webpack-merge": "4.1.0",
+ "webpack": "3.8.1",
+ "webpack-dev-server": "2.9.4",
+ "webpack-merge": "4.1.1",
"worker-loader": "0.8.1"
},
"devDependencies": {
[28/28] ignite git commit: zk
Posted by sb...@apache.org.
zk
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/740c3b24
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/740c3b24
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/740c3b24
Branch: refs/heads/ignite-zk
Commit: 740c3b24f5d5d9fec166f9258d7bb0e31b1117fd
Parents: 2b75ecf
Author: sboikov <sb...@gridgain.com>
Authored: Mon Nov 13 12:41:35 2017 +0300
Committer: sboikov <sb...@gridgain.com>
Committed: Mon Nov 13 12:51:45 2017 +0300
----------------------------------------------------------------------
.../apache/ignite/internal/IgniteKernal.java | 2 +-
.../discovery/GridDiscoveryManager.java | 5 ++-
.../spi/discovery/zk/ZookeeperDiscoverySpi.java | 47 +++++++++++++++-----
.../java/org/apache/ZookeeperNodeStart.java | 46 +++++++++++++++++++
.../zk/ZookeeperDiscoverySpiBasicTest.java | 21 +++++++++
5 files changed, 107 insertions(+), 14 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/740c3b24/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java b/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java
index bde7be2..3e25b50 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java
@@ -1315,7 +1315,7 @@ public class IgniteKernal implements IgniteEx, IgniteMXBean, Externalizable {
ackStart(rtBean);
if (!isDaemon())
- ctx.discovery().ackTopology(localNode().order());
+ ctx.discovery().ackTopology(ctx.discovery().localJoin().joinTopologyVersion().topologyVersion());
}
/**
http://git-wip-us.apache.org/repos/asf/ignite/blob/740c3b24/modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java b/modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java
index a6737dc..022dc97 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java
@@ -2522,8 +2522,9 @@ public class GridDiscoveryManager extends GridManagerAdapter<DiscoverySpi> {
switch (type) {
case EVT_NODE_JOINED: {
- assert !discoOrdered || topVer.topologyVersion() == node.order() : "Invalid topology version [topVer=" + topVer +
- ", node=" + node + ']';
+// TODO ZK
+// assert !discoOrdered || topVer.topologyVersion() == node.order() : "Invalid topology version [topVer=" + topVer +
+// ", node=" + node + ']';
try {
checkAttributes(F.asList(node));
http://git-wip-us.apache.org/repos/asf/ignite/blob/740c3b24/modules/zookeeper/src/main/java/org/apache/ignite/spi/discovery/zk/ZookeeperDiscoverySpi.java
----------------------------------------------------------------------
diff --git a/modules/zookeeper/src/main/java/org/apache/ignite/spi/discovery/zk/ZookeeperDiscoverySpi.java b/modules/zookeeper/src/main/java/org/apache/ignite/spi/discovery/zk/ZookeeperDiscoverySpi.java
index 41debd7..04dc947 100644
--- a/modules/zookeeper/src/main/java/org/apache/ignite/spi/discovery/zk/ZookeeperDiscoverySpi.java
+++ b/modules/zookeeper/src/main/java/org/apache/ignite/spi/discovery/zk/ZookeeperDiscoverySpi.java
@@ -292,10 +292,18 @@ public class ZookeeperDiscoverySpi extends IgniteSpiAdapter implements Discovery
private void initLocalNode() {
assert ignite != null;
- locNode = new ZookeeperClusterNode(ignite.configuration().getNodeId(),
+ Serializable consistentId = consistentId();
+
+ UUID nodeId = ignite.configuration().getNodeId();
+
+ // TODO ZK
+ if (consistentId == null)
+ consistentId = nodeId;
+
+ locNode = new ZookeeperClusterNode(nodeId,
locNodeVer,
locNodeAttrs,
- consistentId(),
+ consistentId,
ignite.configuration().isClientMode());
locNode.local(true);
@@ -652,19 +660,35 @@ public class ZookeeperDiscoverySpi extends IgniteSpiAdapter implements Discovery
if (oldNodes.ver == newNodes.ver)
return;
- long nextJoinOrder = oldNodes.nodesByOrder.isEmpty() ? 1 : oldNodes.nodesByOrder.lastKey() + 1;
-
TreeMap<Integer, ZKDiscoveryEvent> evts = new TreeMap<>();
- Set<Long> failed = new HashSet<>();
+ Set<Long> failedNodes = new HashSet<>();
+ Set<Long> joinedNodes = new HashSet<>();
synchronized (curTop) {
for (int v = oldNodes.ver + 1; v <= newNodes.ver; v++) {
- ZKNodeData data = joinHist.get(nextJoinOrder);
+ ZKNodeData joined = null;
+
+ for (ZKNodeData newData : newNodes.nodesByOrder.values()) {
+ if (!curTop.containsKey(newData.order) && !joinedNodes.contains(newData.order)) {
+ joined = newData;
+
+ break;
+ }
+ }
+
+ // TODO ZK: process joinHist
+
+ if (joined != null) {
+ joinedNodes.add(joined.order);
+
+ ZKNodeData data = joinHist.get(joined.order);
- if (data != null) {
ZKJoiningNodeData joinData = data.joinData;
+ if (joinData == null)
+ System.out.println();
+
assert joinData != null : data;
curTop.put(joinData.node.order(), joinData.node);
@@ -702,13 +726,11 @@ public class ZookeeperDiscoverySpi extends IgniteSpiAdapter implements Discovery
failedNode,
new ArrayList<>(curTop.values())));
}
-
- nextJoinOrder++;
}
else {
for (ZKNodeData oldData : oldNodes.nodesByOrder.values()) {
- if (!failed.contains(oldData.order) && !newNodes.nodesByOrder.containsKey(oldData.order)) {
- failed.add(oldData.order);
+ if (!failedNodes.contains(oldData.order) && !newNodes.nodesByOrder.containsKey(oldData.order)) {
+ failedNodes.add(oldData.order);
ZookeeperClusterNode failedNode = curTop.remove(oldData.order);
@@ -1026,6 +1048,9 @@ public class ZookeeperDiscoverySpi extends IgniteSpiAdapter implements Discovery
* @param joiningNodeData Discovery data.
*/
ZKJoiningNodeData(ZookeeperClusterNode node, Map<Integer, Serializable> joiningNodeData) {
+ assert node != null && node.id() != null : node;
+ assert joiningNodeData != null;
+
this.node = node;
this.joiningNodeData = joiningNodeData;
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/740c3b24/modules/zookeeper/src/test/java/org/apache/ZookeeperNodeStart.java
----------------------------------------------------------------------
diff --git a/modules/zookeeper/src/test/java/org/apache/ZookeeperNodeStart.java b/modules/zookeeper/src/test/java/org/apache/ZookeeperNodeStart.java
new file mode 100644
index 0000000..89d6604
--- /dev/null
+++ b/modules/zookeeper/src/test/java/org/apache/ZookeeperNodeStart.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache;
+
+import org.apache.ignite.Ignition;
+import org.apache.ignite.configuration.IgniteConfiguration;
+import org.apache.ignite.spi.discovery.zk.ZookeeperDiscoverySpi;
+
+/**
+ *
+ */
+public class ZookeeperNodeStart {
+ public static void main(String[] args) throws Exception {
+ try {
+ IgniteConfiguration cfg = new IgniteConfiguration();
+
+ ZookeeperDiscoverySpi spi = new ZookeeperDiscoverySpi();
+
+ spi.setConnectString("localhost:2181");
+
+ cfg.setDiscoverySpi(spi);
+
+ Ignition.start(cfg);
+ }
+ catch (Throwable e) {
+ e.printStackTrace(System.out);
+
+ System.exit(1);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/740c3b24/modules/zookeeper/src/test/java/org/apache/ignite/spi/discovery/zk/ZookeeperDiscoverySpiBasicTest.java
----------------------------------------------------------------------
diff --git a/modules/zookeeper/src/test/java/org/apache/ignite/spi/discovery/zk/ZookeeperDiscoverySpiBasicTest.java b/modules/zookeeper/src/test/java/org/apache/ignite/spi/discovery/zk/ZookeeperDiscoverySpiBasicTest.java
index e8d13a1..6e6c528 100644
--- a/modules/zookeeper/src/test/java/org/apache/ignite/spi/discovery/zk/ZookeeperDiscoverySpiBasicTest.java
+++ b/modules/zookeeper/src/test/java/org/apache/ignite/spi/discovery/zk/ZookeeperDiscoverySpiBasicTest.java
@@ -107,6 +107,23 @@ public class ZookeeperDiscoverySpiBasicTest extends GridCommonAbstractTest {
/**
* @throws Exception If failed.
*/
+ public void testRestarts_2_Nodes() throws Exception {
+ startGrid(0);
+
+ for (int i = 0; i < 10; i++) {
+ info("Iteration: " + i);
+
+ startGrid(1);
+
+ waitForTopology(2);
+
+ stopGrid(1);
+ }
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
public void testStartStop_2_Nodes_WithCache() throws Exception {
startGrids(2);
@@ -170,6 +187,10 @@ public class ZookeeperDiscoverySpiBasicTest extends GridCommonAbstractTest {
}, 3, "stop-node-thread");
waitForTopology(7);
+
+ startGridsMultiThreaded(0, 3);
+
+ waitForTopology(10);
}
/**
[17/28] ignite git commit: IGNITE-5218: First version of decision
trees. This closes #2936
Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/util/MnistUtils.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/util/MnistUtils.java b/modules/ml/src/main/java/org/apache/ignite/ml/util/MnistUtils.java
new file mode 100644
index 0000000..03e3198
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/util/MnistUtils.java
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.util;
+
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Random;
+import java.util.stream.Stream;
+import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector;
+
+/**
+ * Utility class for reading MNIST dataset.
+ */
+public class MnistUtils {
+ /**
+ * Read random {@code count} samples from MNIST dataset from two files (images and labels) into a stream of labeled vectors.
+ * @param imagesPath Path to the file with images.
+ * @param labelsPath Path to the file with labels.
+ * @param rnd Random numbers generatror.
+ * @param count Count of samples to read.
+ * @return Stream of MNIST samples.
+ * @throws IOException
+ */
+ public static Stream<DenseLocalOnHeapVector> mnist(String imagesPath, String labelsPath, Random rnd, int count) throws IOException {
+ FileInputStream isImages = new FileInputStream(imagesPath);
+ FileInputStream isLabels = new FileInputStream(labelsPath);
+
+ int magic = read4Bytes(isImages); // Skip magic number.
+ int numOfImages = read4Bytes(isImages);
+ int imgHeight = read4Bytes(isImages);
+ int imgWidth = read4Bytes(isImages);
+
+ read4Bytes(isLabels); // Skip magic number.
+ read4Bytes(isLabels); // Skip number of labels.
+
+ int numOfPixels = imgHeight * imgWidth;
+
+ System.out.println("Magic: " + magic);
+ System.out.println("Num of images: " + numOfImages);
+ System.out.println("Num of pixels: " + numOfPixels);
+
+ double[][] vecs = new double[numOfImages][numOfPixels + 1];
+
+ for (int imgNum = 0; imgNum < numOfImages; imgNum++) {
+ vecs[imgNum][numOfPixels] = isLabels.read();
+ for (int p = 0; p < numOfPixels; p++) {
+ int c = 128 - isImages.read();
+ vecs[imgNum][p] = (double)c / 128;
+ }
+ }
+
+ List<double[]> lst = Arrays.asList(vecs);
+ Collections.shuffle(lst, rnd);
+
+ isImages.close();
+ isLabels.close();
+
+ return lst.subList(0, count).stream().map(DenseLocalOnHeapVector::new);
+ }
+
+ /**
+ * Convert random {@code count} samples from MNIST dataset from two files (images and labels) into libsvm format.
+ * @param imagesPath Path to the file with images.
+ * @param labelsPath Path to the file with labels.
+ * @param outPath Path to output path.
+ * @param rnd Random numbers generator.
+ * @param count Count of samples to read.
+ * @throws IOException
+ */
+ public static void asLIBSVM(String imagesPath, String labelsPath, String outPath, Random rnd, int count) throws IOException {
+
+ try (FileWriter fos = new FileWriter(outPath)) {
+ mnist(imagesPath, labelsPath, rnd, count).forEach(vec -> {
+ try {
+ fos.write((int)vec.get(vec.size() - 1) + " ");
+
+ for (int i = 0; i < vec.size() - 1; i++) {
+ double val = vec.get(i);
+
+ if (val != 0)
+ fos.write((i + 1) + ":" + val + " ");
+ }
+
+ fos.write("\n");
+
+ }
+ catch (IOException e) {
+ e.printStackTrace();
+ }
+ });
+ }
+ }
+
+ /**
+ * Utility method for reading 4 bytes from input stream.
+ * @param is Input stream.
+ * @throws IOException
+ */
+ private static int read4Bytes(FileInputStream is) throws IOException {
+ return (is.read() << 24) | (is.read() << 16) | (is.read() << 8) | (is.read());
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/util/Utils.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/util/Utils.java b/modules/ml/src/main/java/org/apache/ignite/ml/util/Utils.java
new file mode 100644
index 0000000..b7669be
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/util/Utils.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.util;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+
+/**
+ * Class with various utility methods.
+ */
+public class Utils {
+ /**
+ * Perform deep copy of an object.
+ * @param orig Original object.
+ * @param <T> Class of original object;
+ * @return Deep copy of original object.
+ */
+ public static <T> T copy(T orig) {
+ Object obj = null;
+ try {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ ObjectOutputStream out = new ObjectOutputStream(baos);
+ out.writeObject(orig);
+ out.flush();
+ out.close();
+ ObjectInputStream in = new ObjectInputStream(
+ new ByteArrayInputStream(baos.toByteArray()));
+ obj = in.readObject();
+ }
+ catch (IOException | ClassNotFoundException e) {
+ e.printStackTrace();
+ }
+ return (T)obj;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/test/java/org/apache/ignite/ml/IgniteMLTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/IgniteMLTestSuite.java b/modules/ml/src/test/java/org/apache/ignite/ml/IgniteMLTestSuite.java
index 5ac7443..47910c8 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/IgniteMLTestSuite.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/IgniteMLTestSuite.java
@@ -20,6 +20,7 @@ package org.apache.ignite.ml;
import org.apache.ignite.ml.clustering.ClusteringTestSuite;
import org.apache.ignite.ml.math.MathImplMainTestSuite;
import org.apache.ignite.ml.regressions.RegressionsTestSuite;
+import org.apache.ignite.ml.trees.DecisionTreesTestSuite;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
@@ -30,7 +31,8 @@ import org.junit.runners.Suite;
@Suite.SuiteClasses({
MathImplMainTestSuite.class,
RegressionsTestSuite.class,
- ClusteringTestSuite.class
+ ClusteringTestSuite.class,
+ DecisionTreesTestSuite.class
})
public class IgniteMLTestSuite {
// No-op.
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/test/java/org/apache/ignite/ml/TestUtils.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/TestUtils.java b/modules/ml/src/test/java/org/apache/ignite/ml/TestUtils.java
index 62fdf2c..d094813 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/TestUtils.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/TestUtils.java
@@ -23,6 +23,8 @@ import org.apache.ignite.ml.math.Precision;
import org.apache.ignite.ml.math.Vector;
import org.junit.Assert;
+import static org.junit.Assert.assertTrue;
+
/** */
public class TestUtils {
/**
@@ -245,4 +247,17 @@ public class TestUtils {
public static double maximumAbsoluteRowSum(Matrix mtx) {
return IntStream.range(0, mtx.rowSize()).mapToObj(mtx::viewRow).map(v -> Math.abs(v.sum())).reduce(Math::max).get();
}
+
+ /** */
+ public static void checkIsInEpsilonNeighbourhood(Vector[] v1s, Vector[] v2s, double epsilon) {
+ for (int i = 0; i < v1s.length; i++) {
+ assertTrue("Not in epsilon neighbourhood (index " + i + ") ",
+ v1s[i].minus(v2s[i]).kNorm(2) < epsilon);
+ }
+ }
+
+ /** */
+ public static void checkIsInEpsilonNeighbourhood(Vector v1, Vector v2, double epsilon) {
+ checkIsInEpsilonNeighbourhood(new Vector[] {v1}, new Vector[] {v2}, epsilon);
+ }
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/matrix/SparseDistributedBlockMatrixTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/matrix/SparseDistributedBlockMatrixTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/matrix/SparseDistributedBlockMatrixTest.java
index 2943bc0..fd6ed78 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/matrix/SparseDistributedBlockMatrixTest.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/matrix/SparseDistributedBlockMatrixTest.java
@@ -24,6 +24,7 @@ import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Collection;
import java.util.Set;
+import java.util.UUID;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.internal.util.IgniteUtils;
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/test/java/org/apache/ignite/ml/trees/BaseDecisionTreeTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/trees/BaseDecisionTreeTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/trees/BaseDecisionTreeTest.java
new file mode 100644
index 0000000..65f0ae4
--- /dev/null
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/trees/BaseDecisionTreeTest.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees;
+
+import java.util.Arrays;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector;
+import org.apache.ignite.ml.structures.LabeledVectorDouble;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+
+/**
+ * Base class for decision trees test.
+ */
+public class BaseDecisionTreeTest extends GridCommonAbstractTest {
+ /** Count of nodes. */
+ private static final int NODE_COUNT = 4;
+
+ /** Grid instance. */
+ protected Ignite ignite;
+
+ /**
+ * Default constructor.
+ */
+ public BaseDecisionTreeTest() {
+ super(false);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override protected void beforeTest() throws Exception {
+ ignite = grid(NODE_COUNT);
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void beforeTestsStarted() throws Exception {
+ for (int i = 1; i <= NODE_COUNT; i++)
+ startGrid(i);
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void afterTestsStopped() throws Exception {
+ stopAllGrids();
+ }
+
+ /**
+ * Convert double array to {@link LabeledVectorDouble}
+ *
+ * @param arr Array for conversion.
+ * @return LabeledVectorDouble.
+ */
+ protected static LabeledVectorDouble<DenseLocalOnHeapVector> asLabeledVector(double arr[]) {
+ return new LabeledVectorDouble<>(new DenseLocalOnHeapVector(Arrays.copyOf(arr, arr.length - 1)), arr[arr.length - 1]);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/test/java/org/apache/ignite/ml/trees/ColumnDecisionTreeTrainerTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/trees/ColumnDecisionTreeTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/trees/ColumnDecisionTreeTrainerTest.java
new file mode 100644
index 0000000..2b03b47
--- /dev/null
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/trees/ColumnDecisionTreeTrainerTest.java
@@ -0,0 +1,190 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.stream.Collectors;
+import java.util.stream.DoubleStream;
+import org.apache.ignite.internal.util.IgniteUtils;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.math.StorageConstants;
+import org.apache.ignite.ml.math.Tracer;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.math.impls.matrix.SparseDistributedMatrix;
+import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector;
+import org.apache.ignite.ml.structures.LabeledVectorDouble;
+import org.apache.ignite.ml.trees.models.DecisionTreeModel;
+import org.apache.ignite.ml.trees.trainers.columnbased.ColumnDecisionTreeTrainer;
+import org.apache.ignite.ml.trees.trainers.columnbased.ColumnDecisionTreeTrainerInput;
+import org.apache.ignite.ml.trees.trainers.columnbased.MatrixColumnDecisionTreeTrainerInput;
+import org.apache.ignite.ml.trees.trainers.columnbased.contsplitcalcs.ContinuousSplitCalculators;
+import org.apache.ignite.ml.trees.trainers.columnbased.regcalcs.RegionCalculators;
+
+/** Tests behaviour of ColumnDecisionTreeTrainer. */
+public class ColumnDecisionTreeTrainerTest extends BaseDecisionTreeTest {
+ /**
+ * Test {@link ColumnDecisionTreeTrainerTest} for mixed (continuous and categorical) data with Gini impurity.
+ */
+ public void testCacheMixedGini() {
+ IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
+ int totalPts = 1 << 10;
+ int featCnt = 2;
+
+ HashMap<Integer, Integer> catsInfo = new HashMap<>();
+ catsInfo.put(1, 3);
+
+ Random rnd = new Random(12349L);
+
+ SplitDataGenerator<DenseLocalOnHeapVector> gen = new SplitDataGenerator<>(
+ featCnt, catsInfo, () -> new DenseLocalOnHeapVector(featCnt + 1), rnd).
+ split(0, 1, new int[] {0, 2}).
+ split(1, 0, -10.0);
+
+ testByGen(totalPts, catsInfo, gen, ContinuousSplitCalculators.GINI.apply(ignite), RegionCalculators.GINI, RegionCalculators.MEAN, rnd);
+ }
+
+ /**
+ * Test {@link ColumnDecisionTreeTrainerTest} for mixed (continuous and categorical) data with Variance impurity.
+ */
+ public void testCacheMixed() {
+ IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
+ int totalPts = 1 << 10;
+ int featCnt = 2;
+
+ HashMap<Integer, Integer> catsInfo = new HashMap<>();
+ catsInfo.put(1, 3);
+
+ Random rnd = new Random(12349L);
+
+ SplitDataGenerator<DenseLocalOnHeapVector> gen = new SplitDataGenerator<>(
+ featCnt, catsInfo, () -> new DenseLocalOnHeapVector(featCnt + 1), rnd).
+ split(0, 1, new int[] {0, 2}).
+ split(1, 0, -10.0);
+
+ testByGen(totalPts, catsInfo, gen, ContinuousSplitCalculators.VARIANCE, RegionCalculators.VARIANCE, RegionCalculators.MEAN, rnd);
+ }
+
+ /**
+ * Test {@link ColumnDecisionTreeTrainerTest} for continuous data with Variance impurity.
+ */
+ public void testCacheCont() {
+ IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
+ int totalPts = 1 << 10;
+ int featCnt = 12;
+
+ HashMap<Integer, Integer> catsInfo = new HashMap<>();
+
+ Random rnd = new Random(12349L);
+
+ SplitDataGenerator<DenseLocalOnHeapVector> gen = new SplitDataGenerator<>(
+ featCnt, catsInfo, () -> new DenseLocalOnHeapVector(featCnt + 1), rnd).
+ split(0, 0, -10.0).
+ split(1, 0, 0.0).
+ split(1, 1, 2.0).
+ split(3, 7, 50.0);
+
+ testByGen(totalPts, catsInfo, gen, ContinuousSplitCalculators.VARIANCE, RegionCalculators.VARIANCE, RegionCalculators.MEAN, rnd);
+ }
+
+ /**
+ * Test {@link ColumnDecisionTreeTrainerTest} for continuous data with Gini impurity.
+ */
+ public void testCacheContGini() {
+ IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
+ int totalPts = 1 << 10;
+ int featCnt = 12;
+
+ HashMap<Integer, Integer> catsInfo = new HashMap<>();
+
+ Random rnd = new Random(12349L);
+
+ SplitDataGenerator<DenseLocalOnHeapVector> gen = new SplitDataGenerator<>(
+ featCnt, catsInfo, () -> new DenseLocalOnHeapVector(featCnt + 1), rnd).
+ split(0, 0, -10.0).
+ split(1, 0, 0.0).
+ split(1, 1, 2.0).
+ split(3, 7, 50.0);
+
+ testByGen(totalPts, catsInfo, gen, ContinuousSplitCalculators.GINI.apply(ignite), RegionCalculators.GINI, RegionCalculators.MEAN, rnd);
+ }
+
+ /**
+ * Test {@link ColumnDecisionTreeTrainerTest} for categorical data with Variance impurity.
+ */
+ public void testCacheCat() {
+ IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
+ int totalPts = 1 << 10;
+ int featCnt = 12;
+
+ HashMap<Integer, Integer> catsInfo = new HashMap<>();
+ catsInfo.put(5, 7);
+
+ Random rnd = new Random(12349L);
+
+ SplitDataGenerator<DenseLocalOnHeapVector> gen = new SplitDataGenerator<>(
+ featCnt, catsInfo, () -> new DenseLocalOnHeapVector(featCnt + 1), rnd).
+ split(0, 5, new int[] {0, 2, 5});
+
+ testByGen(totalPts, catsInfo, gen, ContinuousSplitCalculators.VARIANCE, RegionCalculators.VARIANCE, RegionCalculators.MEAN, rnd);
+ }
+
+ /** */
+ private <D extends ContinuousRegionInfo> void testByGen(int totalPts, HashMap<Integer, Integer> catsInfo,
+ SplitDataGenerator<DenseLocalOnHeapVector> gen,
+ IgniteFunction<ColumnDecisionTreeTrainerInput, ? extends ContinuousSplitCalculator<D>> calc,
+ IgniteFunction<ColumnDecisionTreeTrainerInput, IgniteFunction<DoubleStream, Double>> catImpCalc,
+ IgniteFunction<DoubleStream, Double> regCalc, Random rnd) {
+
+ List<IgniteBiTuple<Integer, DenseLocalOnHeapVector>> lst = gen.
+ points(totalPts, (i, rn) -> i).
+ collect(Collectors.toList());
+
+ int featCnt = gen.featuresCnt();
+
+ Collections.shuffle(lst, rnd);
+
+ SparseDistributedMatrix m = new SparseDistributedMatrix(totalPts, featCnt + 1, StorageConstants.COLUMN_STORAGE_MODE, StorageConstants.RANDOM_ACCESS_MODE);
+
+ Map<Integer, List<LabeledVectorDouble>> byRegion = new HashMap<>();
+
+ int i = 0;
+ for (IgniteBiTuple<Integer, DenseLocalOnHeapVector> bt : lst) {
+ byRegion.putIfAbsent(bt.get1(), new LinkedList<>());
+ byRegion.get(bt.get1()).add(asLabeledVector(bt.get2().getStorage().data()));
+ m.setRow(i, bt.get2().getStorage().data());
+ i++;
+ }
+
+ ColumnDecisionTreeTrainer<D> trainer =
+ new ColumnDecisionTreeTrainer<>(3, calc, catImpCalc, regCalc, ignite);
+
+ DecisionTreeModel mdl = trainer.train(new MatrixColumnDecisionTreeTrainerInput(m, catsInfo));
+
+ byRegion.keySet().forEach(k -> {
+ LabeledVectorDouble sp = byRegion.get(k).get(0);
+ Tracer.showAscii(sp.vector());
+ System.out.println("Act: " + sp.label() + " " + " pred: " + mdl.predict(sp.vector()));
+ assert mdl.predict(sp.vector()) == sp.doubleLabel();
+ });
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/test/java/org/apache/ignite/ml/trees/DecisionTreesTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/trees/DecisionTreesTestSuite.java b/modules/ml/src/test/java/org/apache/ignite/ml/trees/DecisionTreesTestSuite.java
new file mode 100644
index 0000000..3343503
--- /dev/null
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/trees/DecisionTreesTestSuite.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees;
+
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+
+/**
+ * Test suite for all tests located in org.apache.ignite.ml.trees package
+ */
+@RunWith(Suite.class)
+@Suite.SuiteClasses({
+ ColumnDecisionTreeTrainerTest.class,
+ GiniSplitCalculatorTest.class,
+ VarianceSplitCalculatorTest.class
+})
+public class DecisionTreesTestSuite {
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/test/java/org/apache/ignite/ml/trees/GiniSplitCalculatorTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/trees/GiniSplitCalculatorTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/trees/GiniSplitCalculatorTest.java
new file mode 100644
index 0000000..c92b4f5
--- /dev/null
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/trees/GiniSplitCalculatorTest.java
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees;
+
+import java.util.stream.DoubleStream;
+import org.apache.ignite.ml.trees.trainers.columnbased.contsplitcalcs.GiniSplitCalculator;
+import org.apache.ignite.ml.trees.trainers.columnbased.vectors.SplitInfo;
+import org.junit.Test;
+
+/**
+ * Test of {@link GiniSplitCalculator}.
+ */
+public class GiniSplitCalculatorTest {
+ /** Test calculation of region info consisting from one point. */
+ @Test
+ public void testCalculateRegionInfoSimple() {
+ double labels[] = new double[] {0.0};
+
+ assert new GiniSplitCalculator(labels).calculateRegionInfo(DoubleStream.of(labels), 0).impurity() == 0.0;
+ }
+
+ /** Test calculation of region info consisting from two distinct classes. */
+ @Test
+ public void testCalculateRegionInfoTwoClasses() {
+ double labels[] = new double[] {0.0, 1.0};
+
+ assert new GiniSplitCalculator(labels).calculateRegionInfo(DoubleStream.of(labels), 0).impurity() == 0.5;
+ }
+
+ /** Test calculation of region info consisting from three distinct classes. */
+ @Test
+ public void testCalculateRegionInfoThreeClasses() {
+ double labels[] = new double[] {0.0, 1.0, 2.0};
+
+ assert Math.abs(new GiniSplitCalculator(labels).calculateRegionInfo(DoubleStream.of(labels), 0).impurity() - 2.0 / 3) < 1E-5;
+ }
+
+ /** Test calculation of split of region consisting from one point. */
+ @Test
+ public void testSplitSimple() {
+ double labels[] = new double[] {0.0};
+ double values[] = new double[] {0.0};
+ Integer[] samples = new Integer[] {0};
+
+ int cnts[] = new int[] {1};
+
+ GiniSplitCalculator.GiniData data = new GiniSplitCalculator.GiniData(0.0, 1, cnts, 1);
+
+ assert new GiniSplitCalculator(labels).splitRegion(samples, values, labels, 0, data) == null;
+ }
+
+ /** Test calculation of split of region consisting from two points. */
+ @Test
+ public void testSplitTwoClassesTwoPoints() {
+ double labels[] = new double[] {0.0, 1.0};
+ double values[] = new double[] {0.0, 1.0};
+ Integer[] samples = new Integer[] {0, 1};
+
+ int cnts[] = new int[] {1, 1};
+
+ GiniSplitCalculator.GiniData data = new GiniSplitCalculator.GiniData(0.5, 2, cnts, 1.0 * 1.0 + 1.0 * 1.0);
+
+ SplitInfo<GiniSplitCalculator.GiniData> split = new GiniSplitCalculator(labels).splitRegion(samples, values, labels, 0, data);
+
+ assert split.leftData().impurity() == 0;
+ assert split.leftData().counts()[0] == 1;
+ assert split.leftData().counts()[1] == 0;
+ assert split.leftData().getSize() == 1;
+
+ assert split.rightData().impurity() == 0;
+ assert split.rightData().counts()[0] == 0;
+ assert split.rightData().counts()[1] == 1;
+ assert split.rightData().getSize() == 1;
+ }
+
+ /** Test calculation of split of region consisting from four distinct values. */
+ @Test
+ public void testSplitTwoClassesFourPoints() {
+ double labels[] = new double[] {0.0, 0.0, 1.0, 1.0};
+ double values[] = new double[] {0.0, 1.0, 2.0, 3.0};
+
+ Integer[] samples = new Integer[] {0, 1, 2, 3};
+
+ int[] cnts = new int[] {2, 2};
+
+ GiniSplitCalculator.GiniData data = new GiniSplitCalculator.GiniData(0.5, 4, cnts, 2.0 * 2.0 + 2.0 * 2.0);
+
+ SplitInfo<GiniSplitCalculator.GiniData> split = new GiniSplitCalculator(labels).splitRegion(samples, values, labels, 0, data);
+
+ assert split.leftData().impurity() == 0;
+ assert split.leftData().counts()[0] == 2;
+ assert split.leftData().counts()[1] == 0;
+ assert split.leftData().getSize() == 2;
+
+ assert split.rightData().impurity() == 0;
+ assert split.rightData().counts()[0] == 0;
+ assert split.rightData().counts()[1] == 2;
+ assert split.rightData().getSize() == 2;
+ }
+
+ /** Test calculation of split of region consisting from three distinct values. */
+ @Test
+ public void testSplitThreePoints() {
+ double labels[] = new double[] {0.0, 1.0, 2.0};
+ double values[] = new double[] {0.0, 1.0, 2.0};
+ Integer[] samples = new Integer[] {0, 1, 2};
+
+ int[] cnts = new int[] {1, 1, 1};
+
+ GiniSplitCalculator.GiniData data = new GiniSplitCalculator.GiniData(2.0 / 3, 3, cnts, 1.0 * 1.0 + 1.0 * 1.0 + 1.0 * 1.0);
+
+ SplitInfo<GiniSplitCalculator.GiniData> split = new GiniSplitCalculator(labels).splitRegion(samples, values, labels, 0, data);
+
+ assert split.leftData().impurity() == 0.0;
+ assert split.leftData().counts()[0] == 1;
+ assert split.leftData().counts()[1] == 0;
+ assert split.leftData().counts()[2] == 0;
+ assert split.leftData().getSize() == 1;
+
+ assert split.rightData().impurity() == 0.5;
+ assert split.rightData().counts()[0] == 0;
+ assert split.rightData().counts()[1] == 1;
+ assert split.rightData().counts()[2] == 1;
+ assert split.rightData().getSize() == 2;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/test/java/org/apache/ignite/ml/trees/SplitDataGenerator.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/trees/SplitDataGenerator.java b/modules/ml/src/test/java/org/apache/ignite/ml/trees/SplitDataGenerator.java
new file mode 100644
index 0000000..279e685
--- /dev/null
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/trees/SplitDataGenerator.java
@@ -0,0 +1,390 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ignite.ml.trees;
+
+import java.io.Serializable;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.function.BiFunction;
+import java.util.function.Function;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import java.util.stream.Stream;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.exceptions.MathIllegalArgumentException;
+import org.apache.ignite.ml.util.Utils;
+
+/**
+ * Utility class for generating data which has binary tree split structure.
+ *
+ * @param <V>
+ */
+public class SplitDataGenerator<V extends Vector> {
+ /** */
+ private static final double DELTA = 100.0;
+
+ /** Map of the form of (is categorical -> list of region indexes). */
+ private final Map<Boolean, List<Integer>> di;
+
+ /** List of regions. */
+ private final List<Region> regs;
+
+ /** Data of bounds of regions. */
+ private final Map<Integer, IgniteBiTuple<Double, Double>> boundsData;
+
+ /** Random numbers generator. */
+ private final Random rnd;
+
+ /** Supplier of vectors. */
+ private final Supplier<V> supplier;
+
+ /** Features count. */
+ private final int featCnt;
+
+ /**
+ * Create SplitDataGenerator.
+ *
+ * @param featCnt Features count.
+ * @param catFeaturesInfo Information about categorical features in form of map (feature index -> categories
+ * count).
+ * @param supplier Supplier of vectors.
+ * @param rnd Random numbers generator.
+ */
+ public SplitDataGenerator(int featCnt, Map<Integer, Integer> catFeaturesInfo, Supplier<V> supplier, Random rnd) {
+ regs = new LinkedList<>();
+ boundsData = new HashMap<>();
+ this.rnd = rnd;
+ this.supplier = supplier;
+ this.featCnt = featCnt;
+
+ // Divide indexes into indexes of categorical coordinates and indexes of continuous coordinates.
+ di = IntStream.range(0, featCnt).
+ boxed().
+ collect(Collectors.partitioningBy(catFeaturesInfo::containsKey));
+
+ // Categorical coordinates info.
+ Map<Integer, CatCoordInfo> catCoords = new HashMap<>();
+ di.get(true).forEach(i -> {
+ BitSet bs = new BitSet();
+ bs.set(0, catFeaturesInfo.get(i));
+ catCoords.put(i, new CatCoordInfo(bs));
+ });
+
+ // Continuous coordinates info.
+ Map<Integer, ContCoordInfo> contCoords = new HashMap<>();
+ di.get(false).forEach(i -> {
+ contCoords.put(i, new ContCoordInfo());
+ boundsData.put(i, new IgniteBiTuple<>(-1.0, 1.0));
+ });
+
+ Region firstReg = new Region(catCoords, contCoords, 0);
+ regs.add(firstReg);
+ }
+
+ /**
+ * Categorical coordinate info.
+ */
+ private static class CatCoordInfo implements Serializable {
+ /**
+ * Defines categories which are included in this region
+ */
+ private final BitSet bs;
+
+ /**
+ * Construct CatCoordInfo.
+ *
+ * @param bs Bitset.
+ */
+ CatCoordInfo(BitSet bs) {
+ this.bs = bs;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return "CatCoordInfo [" +
+ "bs=" + bs +
+ ']';
+ }
+ }
+
+ /**
+ * Continuous coordinate info.
+ */
+ private static class ContCoordInfo implements Serializable {
+ /**
+ * Left (min) bound of region.
+ */
+ private double left;
+
+ /**
+ * Right (max) bound of region.
+ */
+ private double right;
+
+ /**
+ * Construct ContCoordInfo.
+ */
+ ContCoordInfo() {
+ left = Double.NEGATIVE_INFINITY;
+ right = Double.POSITIVE_INFINITY;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return "ContCoordInfo [" +
+ "left=" + left +
+ ", right=" + right +
+ ']';
+ }
+ }
+
+ /**
+ * Class representing information about region.
+ */
+ private static class Region implements Serializable {
+ /**
+ * Information about categorical coordinates restrictions of this region in form of
+ * (coordinate index -> restriction)
+ */
+ private final Map<Integer, CatCoordInfo> catCoords;
+
+ /**
+ * Information about continuous coordinates restrictions of this region in form of
+ * (coordinate index -> restriction)
+ */
+ private final Map<Integer, ContCoordInfo> contCoords;
+
+ /**
+ * Region should contain {@code 1/2^twoPow * totalPoints} points.
+ */
+ private int twoPow;
+
+ /**
+ * Construct region by information about restrictions on coordinates (features) values.
+ *
+ * @param catCoords Restrictions on categorical coordinates.
+ * @param contCoords Restrictions on continuous coordinates
+ * @param twoPow Region should contain {@code 1/2^twoPow * totalPoints} points.
+ */
+ Region(Map<Integer, CatCoordInfo> catCoords, Map<Integer, ContCoordInfo> contCoords, int twoPow) {
+ this.catCoords = catCoords;
+ this.contCoords = contCoords;
+ this.twoPow = twoPow;
+ }
+
+ /** */
+ int divideBy() {
+ return 1 << twoPow;
+ }
+
+ /** */
+ void incTwoPow() {
+ twoPow++;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return "Region [" +
+ "catCoords=" + catCoords +
+ ", contCoords=" + contCoords +
+ ", twoPow=" + twoPow +
+ ']';
+ }
+
+ /**
+ * Generate continuous coordinate for this region.
+ *
+ * @param coordIdx Coordinate index.
+ * @param boundsData Data with bounds
+ * @param rnd Random numbers generator.
+ * @return Categorical coordinate value.
+ */
+ double generateContCoord(int coordIdx, Map<Integer, IgniteBiTuple<Double, Double>> boundsData,
+ Random rnd) {
+ ContCoordInfo cci = contCoords.get(coordIdx);
+ double left = cci.left;
+ double right = cci.right;
+
+ if (left == Double.NEGATIVE_INFINITY)
+ left = boundsData.get(coordIdx).get1() - DELTA;
+
+ if (right == Double.POSITIVE_INFINITY)
+ right = boundsData.get(coordIdx).get2() + DELTA;
+
+ double size = right - left;
+
+ return left + rnd.nextDouble() * size;
+ }
+
+ /**
+ * Generate categorical coordinate value for this region.
+ *
+ * @param coordIdx Coordinate index.
+ * @param rnd Random numbers generator.
+ * @return Categorical coordinate value.
+ */
+ double generateCatCoord(int coordIdx, Random rnd) {
+ // Pick random bit.
+ BitSet bs = catCoords.get(coordIdx).bs;
+ int j = rnd.nextInt(bs.length());
+
+ int i = 0;
+ int bn = 0;
+ int bnp = 0;
+
+ while ((bn = bs.nextSetBit(bn)) != -1 && i <= j) {
+ i++;
+ bnp = bn;
+ bn++;
+ }
+
+ return bnp;
+ }
+
+ /**
+ * Generate points for this region.
+ *
+ * @param ptsCnt Count of points to generate.
+ * @param val Label for all points in this region.
+ * @param boundsData Data about bounds of continuous coordinates.
+ * @param catCont Data about which categories can be in this region in the form (coordinate index -> list of
+ * categories indexes).
+ * @param s Vectors supplier.
+ * @param rnd Random numbers generator.
+ * @param <V> Type of vectors.
+ * @return Stream of generated points for this region.
+ */
+ <V extends Vector> Stream<V> generatePoints(int ptsCnt, double val,
+ Map<Integer, IgniteBiTuple<Double, Double>> boundsData, Map<Boolean, List<Integer>> catCont,
+ Supplier<V> s,
+ Random rnd) {
+ return IntStream.range(0, ptsCnt / divideBy()).mapToObj(i -> {
+ V v = s.get();
+ int coordsCnt = v.size();
+ catCont.get(false).forEach(ci -> v.setX(ci, generateContCoord(ci, boundsData, rnd)));
+ catCont.get(true).forEach(ci -> v.setX(ci, generateCatCoord(ci, rnd)));
+
+ v.setX(coordsCnt - 1, val);
+ return v;
+ });
+ }
+ }
+
+ /**
+ * Split region by continuous coordinate.using given threshold.
+ *
+ * @param regIdx Region index.
+ * @param coordIdx Coordinate index.
+ * @param threshold Threshold.
+ * @return {@code this}.
+ */
+ public SplitDataGenerator<V> split(int regIdx, int coordIdx, double threshold) {
+ Region regToSplit = regs.get(regIdx);
+ ContCoordInfo cci = regToSplit.contCoords.get(coordIdx);
+
+ double left = cci.left;
+ double right = cci.right;
+
+ if (threshold < left || threshold > right)
+ throw new MathIllegalArgumentException("Threshold is out of region bounds.");
+
+ regToSplit.incTwoPow();
+
+ Region newReg = Utils.copy(regToSplit);
+ newReg.contCoords.get(coordIdx).left = threshold;
+
+ regs.add(regIdx + 1, newReg);
+ cci.right = threshold;
+
+ IgniteBiTuple<Double, Double> bounds = boundsData.get(coordIdx);
+ double min = bounds.get1();
+ double max = bounds.get2();
+ boundsData.put(coordIdx, new IgniteBiTuple<>(Math.min(threshold, min), Math.max(max, threshold)));
+
+ return this;
+ }
+
+ /**
+ * Split region by categorical coordinate.
+ *
+ * @param regIdx Region index.
+ * @param coordIdx Coordinate index.
+ * @param cats Categories allowed for the left sub region.
+ * @return {@code this}.
+ */
+ public SplitDataGenerator<V> split(int regIdx, int coordIdx, int[] cats) {
+ BitSet subset = new BitSet();
+ Arrays.stream(cats).forEach(subset::set);
+ Region regToSplit = regs.get(regIdx);
+ CatCoordInfo cci = regToSplit.catCoords.get(coordIdx);
+
+ BitSet ssc = (BitSet)subset.clone();
+ BitSet set = cci.bs;
+ ssc.and(set);
+ if (ssc.length() != subset.length())
+ throw new MathIllegalArgumentException("Splitter set is not a subset of a parent subset.");
+
+ ssc.xor(set);
+ set.and(subset);
+
+ regToSplit.incTwoPow();
+ Region newReg = Utils.copy(regToSplit);
+ newReg.catCoords.put(coordIdx, new CatCoordInfo(ssc));
+
+ regs.add(regIdx + 1, newReg);
+
+ return this;
+ }
+
+ /**
+ * Get stream of points generated by this generator.
+ *
+ * @param ptsCnt Points count.
+ */
+ public Stream<IgniteBiTuple<Integer, V>> points(int ptsCnt, BiFunction<Double, Random, Double> f) {
+ regs.forEach(System.out::println);
+
+ return IntStream.range(0, regs.size()).
+ boxed().
+ map(i -> regs.get(i).generatePoints(ptsCnt, f.apply((double)i, rnd), boundsData, di, supplier, rnd).map(v -> new IgniteBiTuple<>(i, v))).flatMap(Function.identity());
+ }
+
+ /**
+ * Count of regions.
+ *
+ * @return Count of regions.
+ */
+ public int regsCount() {
+ return regs.size();
+ }
+
+ /**
+ * Get features count.
+ *
+ * @return Features count.
+ */
+ public int featuresCnt() {
+ return featCnt;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/test/java/org/apache/ignite/ml/trees/VarianceSplitCalculatorTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/trees/VarianceSplitCalculatorTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/trees/VarianceSplitCalculatorTest.java
new file mode 100644
index 0000000..d67cbc6
--- /dev/null
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/trees/VarianceSplitCalculatorTest.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees;
+
+import java.util.stream.DoubleStream;
+import org.apache.ignite.ml.trees.trainers.columnbased.contsplitcalcs.VarianceSplitCalculator;
+import org.apache.ignite.ml.trees.trainers.columnbased.vectors.SplitInfo;
+import org.junit.Test;
+
+/**
+ * Test for {@link VarianceSplitCalculator}.
+ */
+public class VarianceSplitCalculatorTest {
+ /** Test calculation of region info consisting from one point. */
+ @Test
+ public void testCalculateRegionInfoSimple() {
+ double labels[] = new double[] {0.0};
+
+ assert new VarianceSplitCalculator().calculateRegionInfo(DoubleStream.of(labels), 1).impurity() == 0.0;
+ }
+
+ /** Test calculation of region info consisting from two classes. */
+ @Test
+ public void testCalculateRegionInfoTwoClasses() {
+ double labels[] = new double[] {0.0, 1.0};
+
+ assert new VarianceSplitCalculator().calculateRegionInfo(DoubleStream.of(labels), 2).impurity() == 0.25;
+ }
+
+ /** Test calculation of region info consisting from three classes. */
+ @Test
+ public void testCalculateRegionInfoThreeClasses() {
+ double labels[] = new double[] {1.0, 2.0, 3.0};
+
+ assert Math.abs(new VarianceSplitCalculator().calculateRegionInfo(DoubleStream.of(labels), 3).impurity() - 2.0 / 3) < 1E-10;
+ }
+
+ /** Test calculation of split of region consisting from one point. */
+ @Test
+ public void testSplitSimple() {
+ double labels[] = new double[] {0.0};
+ double values[] = new double[] {0.0};
+ Integer[] samples = new Integer[] {0};
+
+ VarianceSplitCalculator.VarianceData data = new VarianceSplitCalculator.VarianceData(0.0, 1, 0.0);
+
+ assert new VarianceSplitCalculator().splitRegion(samples, values, labels, 0, data) == null;
+ }
+
+ /** Test calculation of split of region consisting from two classes. */
+ @Test
+ public void testSplitTwoClassesTwoPoints() {
+ double labels[] = new double[] {0.0, 1.0};
+ double values[] = new double[] {0.0, 1.0};
+ Integer[] samples = new Integer[] {0, 1};
+
+ VarianceSplitCalculator.VarianceData data = new VarianceSplitCalculator.VarianceData(0.25, 2, 0.5);
+
+ SplitInfo<VarianceSplitCalculator.VarianceData> split = new VarianceSplitCalculator().splitRegion(samples, values, labels, 0, data);
+
+ assert split.leftData().impurity() == 0;
+ assert split.leftData().mean() == 0;
+ assert split.leftData().getSize() == 1;
+
+ assert split.rightData().impurity() == 0;
+ assert split.rightData().mean() == 1;
+ assert split.rightData().getSize() == 1;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/test/java/org/apache/ignite/ml/trees/performance/ColumnDecisionTreeTrainerBenchmark.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/trees/performance/ColumnDecisionTreeTrainerBenchmark.java b/modules/ml/src/test/java/org/apache/ignite/ml/trees/performance/ColumnDecisionTreeTrainerBenchmark.java
new file mode 100644
index 0000000..4e7cc24
--- /dev/null
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/trees/performance/ColumnDecisionTreeTrainerBenchmark.java
@@ -0,0 +1,455 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.performance;
+
+import it.unimi.dsi.fastutil.ints.Int2DoubleOpenHashMap;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Random;
+import java.util.UUID;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import java.util.stream.DoubleStream;
+import java.util.stream.IntStream;
+import java.util.stream.Stream;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.IgniteDataStreamer;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.CacheAtomicityMode;
+import org.apache.ignite.cache.CacheMode;
+import org.apache.ignite.cache.CacheWriteSynchronizationMode;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.configuration.IgniteConfiguration;
+import org.apache.ignite.internal.processors.cache.GridCacheProcessor;
+import org.apache.ignite.internal.util.IgniteUtils;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.Model;
+import org.apache.ignite.ml.estimators.Estimators;
+import org.apache.ignite.ml.math.StorageConstants;
+import org.apache.ignite.ml.math.Tracer;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.distributed.keys.impl.SparseMatrixKey;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.math.functions.IgniteTriFunction;
+import org.apache.ignite.ml.math.impls.matrix.SparseDistributedMatrix;
+import org.apache.ignite.ml.math.impls.storage.matrix.SparseDistributedMatrixStorage;
+import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector;
+import org.apache.ignite.ml.structures.LabeledVectorDouble;
+import org.apache.ignite.ml.trees.BaseDecisionTreeTest;
+import org.apache.ignite.ml.trees.SplitDataGenerator;
+import org.apache.ignite.ml.trees.models.DecisionTreeModel;
+import org.apache.ignite.ml.trees.trainers.columnbased.BiIndex;
+import org.apache.ignite.ml.trees.trainers.columnbased.BiIndexedCacheColumnDecisionTreeTrainerInput;
+import org.apache.ignite.ml.trees.trainers.columnbased.ColumnDecisionTreeTrainer;
+import org.apache.ignite.ml.trees.trainers.columnbased.MatrixColumnDecisionTreeTrainerInput;
+import org.apache.ignite.ml.trees.trainers.columnbased.caches.ContextCache;
+import org.apache.ignite.ml.trees.trainers.columnbased.caches.FeaturesCache;
+import org.apache.ignite.ml.trees.trainers.columnbased.caches.ProjectionsCache;
+import org.apache.ignite.ml.trees.trainers.columnbased.caches.SplitCache;
+import org.apache.ignite.ml.trees.trainers.columnbased.contsplitcalcs.ContinuousSplitCalculators;
+import org.apache.ignite.ml.trees.trainers.columnbased.contsplitcalcs.GiniSplitCalculator;
+import org.apache.ignite.ml.trees.trainers.columnbased.contsplitcalcs.VarianceSplitCalculator;
+import org.apache.ignite.ml.trees.trainers.columnbased.regcalcs.RegionCalculators;
+import org.apache.ignite.ml.util.MnistUtils;
+import org.apache.ignite.stream.StreamTransformer;
+import org.apache.ignite.testframework.junits.IgniteTestResources;
+import org.apache.log4j.Level;
+import org.junit.Assert;
+
+/**
+ * Various benchmarks for hand runs.
+ */
+public class ColumnDecisionTreeTrainerBenchmark extends BaseDecisionTreeTest {
+ /** Name of the property specifying path to training set images. */
+ private static final String PROP_TRAINING_IMAGES = "mnist.training.images";
+
+ /** Name of property specifying path to training set labels. */
+ private static final String PROP_TRAINING_LABELS = "mnist.training.labels";
+
+ /** Name of property specifying path to test set images. */
+ private static final String PROP_TEST_IMAGES = "mnist.test.images";
+
+ /** Name of property specifying path to test set labels. */
+ private static final String PROP_TEST_LABELS = "mnist.test.labels";
+
+ /** Function to approximate. */
+ private static final Function<Vector, Double> f1 = v -> v.get(0) * v.get(0) + 2 * Math.sin(v.get(1)) + v.get(2);
+
+ /** {@inheritDoc} */
+ @Override protected long getTestTimeout() {
+ return 6000000;
+ }
+
+ /** {@inheritDoc} */
+ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName,
+ IgniteTestResources rsrcs) throws Exception {
+ IgniteConfiguration configuration = super.getConfiguration(igniteInstanceName, rsrcs);
+ // We do not need any extra event types.
+ configuration.setIncludeEventTypes();
+ configuration.setPeerClassLoadingEnabled(false);
+
+ resetLog4j(Level.INFO, false, GridCacheProcessor.class.getPackage().getName());
+
+ return configuration;
+ }
+
+ /**
+ * This test is for manual run only.
+ * To run this test rename this method so it starts from 'test'.
+ */
+ public void tstCacheMixed() {
+ IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
+ int ptsPerReg = 150;
+ int featCnt = 10;
+
+ HashMap<Integer, Integer> catsInfo = new HashMap<>();
+ catsInfo.put(1, 3);
+
+ Random rnd = new Random(12349L);
+
+ SplitDataGenerator<DenseLocalOnHeapVector> gen = new SplitDataGenerator<>(
+ featCnt, catsInfo, () -> new DenseLocalOnHeapVector(featCnt + 1), rnd).
+ split(0, 1, new int[] {0, 2}).
+ split(1, 0, -10.0).
+ split(0, 0, 0.0);
+
+ testByGenStreamerLoad(ptsPerReg, catsInfo, gen, rnd);
+ }
+
+ /**
+ * Run decision tree classifier on MNIST using bi-indexed cache as a storage for dataset.
+ * To run this test rename this method so it starts from 'test'.
+ *
+ * @throws IOException In case of loading MNIST dataset errors.
+ */
+ public void tstMNISTBiIndexedCache() throws IOException {
+ IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
+
+ int ptsCnt = 40_000;
+ int featCnt = 28 * 28;
+
+ Properties props = loadMNISTProperties();
+
+ Stream<DenseLocalOnHeapVector> trainingMnistStream = MnistUtils.mnist(props.getProperty(PROP_TRAINING_IMAGES), props.getProperty(PROP_TRAINING_LABELS), new Random(123L), ptsCnt);
+ Stream<DenseLocalOnHeapVector> testMnistStream = MnistUtils.mnist(props.getProperty(PROP_TEST_IMAGES), props.getProperty(PROP_TEST_LABELS), new Random(123L), 10_000);
+
+ IgniteCache<BiIndex, Double> cache = createBiIndexedCache();
+
+ loadVectorsIntoBiIndexedCache(cache.getName(), trainingMnistStream.iterator(), featCnt + 1);
+
+ ColumnDecisionTreeTrainer<GiniSplitCalculator.GiniData> trainer =
+ new ColumnDecisionTreeTrainer<>(10, ContinuousSplitCalculators.GINI.apply(ignite), RegionCalculators.GINI, RegionCalculators.MOST_COMMON, ignite);
+
+ System.out.println(">>> Training started");
+ long before = System.currentTimeMillis();
+ DecisionTreeModel mdl = trainer.train(new BiIndexedCacheColumnDecisionTreeTrainerInput(cache, new HashMap<>(), ptsCnt, featCnt));
+ System.out.println(">>> Training finished in " + (System.currentTimeMillis() - before));
+
+ IgniteTriFunction<Model<Vector, Double>, Stream<IgniteBiTuple<Vector, Double>>, Function<Double, Double>, Double> mse = Estimators.errorsPercentage();
+ Double accuracy = mse.apply(mdl, testMnistStream.map(v -> new IgniteBiTuple<>(v.viewPart(0, featCnt), v.getX(featCnt))), Function.identity());
+ System.out.println(">>> Errs percentage: " + accuracy);
+
+ Assert.assertEquals(0, SplitCache.getOrCreate(ignite).size());
+ Assert.assertEquals(0, FeaturesCache.getOrCreate(ignite).size());
+ Assert.assertEquals(0, ContextCache.getOrCreate(ignite).size());
+ Assert.assertEquals(0, ProjectionsCache.getOrCreate(ignite).size());
+ }
+
+ /**
+ * Run decision tree classifier on MNIST using sparse distributed matrix as a storage for dataset.
+ * To run this test rename this method so it starts from 'test'.
+ *
+ * @throws IOException In case of loading MNIST dataset errors.
+ */
+ public void tstMNISTSparseDistributedMatrix() throws IOException {
+ IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
+
+ int ptsCnt = 30_000;
+ int featCnt = 28 * 28;
+
+ Properties props = loadMNISTProperties();
+
+ Stream<DenseLocalOnHeapVector> trainingMnistStream = MnistUtils.mnist(props.getProperty(PROP_TRAINING_IMAGES), props.getProperty(PROP_TRAINING_LABELS), new Random(123L), ptsCnt);
+ Stream<DenseLocalOnHeapVector> testMnistStream = MnistUtils.mnist(props.getProperty(PROP_TEST_IMAGES), props.getProperty(PROP_TEST_LABELS), new Random(123L), 10_000);
+
+ SparseDistributedMatrix m = new SparseDistributedMatrix(ptsCnt, featCnt + 1, StorageConstants.COLUMN_STORAGE_MODE, StorageConstants.RANDOM_ACCESS_MODE);
+
+ SparseDistributedMatrixStorage sto = (SparseDistributedMatrixStorage)m.getStorage();
+
+ loadVectorsIntoSparseDistributedMatrixCache(sto.cache().getName(), sto.getUUID(), trainingMnistStream.iterator(), featCnt + 1);
+
+ ColumnDecisionTreeTrainer<GiniSplitCalculator.GiniData> trainer =
+ new ColumnDecisionTreeTrainer<>(10, ContinuousSplitCalculators.GINI.apply(ignite), RegionCalculators.GINI, RegionCalculators.MOST_COMMON, ignite);
+
+ System.out.println(">>> Training started");
+ long before = System.currentTimeMillis();
+ DecisionTreeModel mdl = trainer.train(new MatrixColumnDecisionTreeTrainerInput(m, new HashMap<>()));
+ System.out.println(">>> Training finished in " + (System.currentTimeMillis() - before));
+
+ IgniteTriFunction<Model<Vector, Double>, Stream<IgniteBiTuple<Vector, Double>>, Function<Double, Double>, Double> mse = Estimators.errorsPercentage();
+ Double accuracy = mse.apply(mdl, testMnistStream.map(v -> new IgniteBiTuple<>(v.viewPart(0, featCnt), v.getX(featCnt))), Function.identity());
+ System.out.println(">>> Errs percentage: " + accuracy);
+
+ Assert.assertEquals(0, SplitCache.getOrCreate(ignite).size());
+ Assert.assertEquals(0, FeaturesCache.getOrCreate(ignite).size());
+ Assert.assertEquals(0, ContextCache.getOrCreate(ignite).size());
+ Assert.assertEquals(0, ProjectionsCache.getOrCreate(ignite).size());
+ }
+
+ /** Load properties for MNIST tests. */
+ private static Properties loadMNISTProperties() throws IOException {
+ Properties res = new Properties();
+
+ InputStream is = ColumnDecisionTreeTrainerBenchmark.class.getClassLoader().getResourceAsStream("manualrun/trees/columntrees.manualrun.properties");
+
+ res.load(is);
+
+ return res;
+ }
+
+ /** */
+ private void testByGenStreamerLoad(int ptsPerReg, HashMap<Integer, Integer> catsInfo,
+ SplitDataGenerator<DenseLocalOnHeapVector> gen, Random rnd) {
+
+ List<IgniteBiTuple<Integer, DenseLocalOnHeapVector>> lst = gen.
+ points(ptsPerReg, (i, rn) -> i).
+ collect(Collectors.toList());
+
+ int featCnt = gen.featuresCnt();
+
+ Collections.shuffle(lst, rnd);
+
+ int numRegs = gen.regsCount();
+
+ SparseDistributedMatrix m = new SparseDistributedMatrix(numRegs * ptsPerReg, featCnt + 1, StorageConstants.COLUMN_STORAGE_MODE, StorageConstants.RANDOM_ACCESS_MODE);
+
+ IgniteFunction<DoubleStream, Double> regCalc = s -> s.average().orElse(0.0);
+
+ Map<Integer, List<LabeledVectorDouble>> byRegion = new HashMap<>();
+
+ SparseDistributedMatrixStorage sto = (SparseDistributedMatrixStorage)m.getStorage();
+ long before = System.currentTimeMillis();
+ System.out.println(">>> Batch loading started...");
+ loadVectorsIntoSparseDistributedMatrixCache(sto.cache().getName(), sto.getUUID(), gen.
+ points(ptsPerReg, (i, rn) -> i).map(IgniteBiTuple::get2).iterator(), featCnt + 1);
+ System.out.println(">>> Batch loading took " + (System.currentTimeMillis() - before) + " ms.");
+
+ for (IgniteBiTuple<Integer, DenseLocalOnHeapVector> bt : lst) {
+ byRegion.putIfAbsent(bt.get1(), new LinkedList<>());
+ byRegion.get(bt.get1()).add(asLabeledVector(bt.get2().getStorage().data()));
+ }
+
+ ColumnDecisionTreeTrainer<VarianceSplitCalculator.VarianceData> trainer =
+ new ColumnDecisionTreeTrainer<>(2, ContinuousSplitCalculators.VARIANCE, RegionCalculators.VARIANCE, regCalc, ignite);
+
+ before = System.currentTimeMillis();
+ DecisionTreeModel mdl = trainer.train(new MatrixColumnDecisionTreeTrainerInput(m, catsInfo));
+
+ System.out.println(">>> Took time(ms): " + (System.currentTimeMillis() - before));
+
+ byRegion.keySet().forEach(k -> {
+ LabeledVectorDouble sp = byRegion.get(k).get(0);
+ Tracer.showAscii(sp.vector());
+ System.out.println("Prediction: " + mdl.predict(sp.vector()) + "label: " + sp.doubleLabel());
+ assert mdl.predict(sp.vector()) == sp.doubleLabel();
+ });
+ }
+
+ /**
+ * Test decision tree regression.
+ * To run this test rename this method so it starts from 'test'.
+ */
+ public void tstF1() {
+ IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
+ int ptsCnt = 10000;
+ Map<Integer, double[]> ranges = new HashMap<>();
+
+ ranges.put(0, new double[] {-100.0, 100.0});
+ ranges.put(1, new double[] {-100.0, 100.0});
+ ranges.put(2, new double[] {-100.0, 100.0});
+
+ int featCnt = 100;
+ double[] defRng = {-1.0, 1.0};
+
+ Vector[] trainVectors = vecsFromRanges(ranges, featCnt, defRng, new Random(123L), ptsCnt, f1);
+
+ SparseDistributedMatrix m = new SparseDistributedMatrix(ptsCnt, featCnt + 1, StorageConstants.COLUMN_STORAGE_MODE, StorageConstants.RANDOM_ACCESS_MODE);
+
+ SparseDistributedMatrixStorage sto = (SparseDistributedMatrixStorage)m.getStorage();
+
+ loadVectorsIntoSparseDistributedMatrixCache(sto.cache().getName(), sto.getUUID(), Arrays.stream(trainVectors).iterator(), featCnt + 1);
+
+ IgniteFunction<DoubleStream, Double> regCalc = s -> s.average().orElse(0.0);
+
+ ColumnDecisionTreeTrainer<VarianceSplitCalculator.VarianceData> trainer =
+ new ColumnDecisionTreeTrainer<>(10, ContinuousSplitCalculators.VARIANCE, RegionCalculators.VARIANCE, regCalc, ignite);
+
+ System.out.println(">>> Training started");
+ long before = System.currentTimeMillis();
+ DecisionTreeModel mdl = trainer.train(new MatrixColumnDecisionTreeTrainerInput(m, new HashMap<>()));
+ System.out.println(">>> Training finished in " + (System.currentTimeMillis() - before));
+
+ Vector[] testVectors = vecsFromRanges(ranges, featCnt, defRng, new Random(123L), 20, f1);
+
+ IgniteTriFunction<Model<Vector, Double>, Stream<IgniteBiTuple<Vector, Double>>, Function<Double, Double>, Double> mse = Estimators.MSE();
+ Double accuracy = mse.apply(mdl, Arrays.stream(testVectors).map(v -> new IgniteBiTuple<>(v.viewPart(0, featCnt), v.getX(featCnt))), Function.identity());
+ System.out.println(">>> MSE: " + accuracy);
+ }
+
+ /**
+ * Load vectors into sparse distributed matrix.
+ *
+ * @param cacheName Name of cache where matrix is stored.
+ * @param uuid UUID of matrix.
+ * @param iter Iterator over vectors.
+ * @param vectorSize size of vectors.
+ */
+ private void loadVectorsIntoSparseDistributedMatrixCache(String cacheName, UUID uuid,
+ Iterator<? extends org.apache.ignite.ml.math.Vector> iter, int vectorSize) {
+ try (IgniteDataStreamer<SparseMatrixKey, Map<Integer, Double>> streamer =
+ Ignition.localIgnite().dataStreamer(cacheName)) {
+ int sampleIdx = 0;
+ streamer.allowOverwrite(true);
+
+ streamer.receiver(StreamTransformer.from((e, arg) -> {
+ Map<Integer, Double> val = e.getValue();
+
+ if (val == null)
+ val = new Int2DoubleOpenHashMap();
+
+ val.putAll((Map<Integer, Double>)arg[0]);
+
+ e.setValue(val);
+
+ return null;
+ }));
+
+ // Feature index -> (sample index -> value)
+ Map<Integer, Map<Integer, Double>> batch = new HashMap<>();
+ IntStream.range(0, vectorSize).forEach(i -> batch.put(i, new HashMap<>()));
+ int batchSize = 1000;
+
+ while (iter.hasNext()) {
+ org.apache.ignite.ml.math.Vector next = iter.next();
+
+ for (int i = 0; i < vectorSize; i++)
+ batch.get(i).put(sampleIdx, next.getX(i));
+
+ System.out.println(sampleIdx);
+ if (sampleIdx % batchSize == 0) {
+ batch.keySet().forEach(fi -> streamer.addData(new SparseMatrixKey(fi, uuid, fi), batch.get(fi)));
+ IntStream.range(0, vectorSize).forEach(i -> batch.put(i, new HashMap<>()));
+ }
+ sampleIdx++;
+ }
+ if (sampleIdx % batchSize != 0) {
+ batch.keySet().forEach(fi -> streamer.addData(new SparseMatrixKey(fi, uuid, fi), batch.get(fi)));
+ IntStream.range(0, vectorSize).forEach(i -> batch.put(i, new HashMap<>()));
+ }
+ }
+ }
+
+ /**
+ * Load vectors into bi-indexed cache.
+ *
+ * @param cacheName Name of cache.
+ * @param iter Iterator over vectors.
+ * @param vectorSize size of vectors.
+ */
+ private void loadVectorsIntoBiIndexedCache(String cacheName,
+ Iterator<? extends org.apache.ignite.ml.math.Vector> iter, int vectorSize) {
+ try (IgniteDataStreamer<BiIndex, Double> streamer =
+ Ignition.localIgnite().dataStreamer(cacheName)) {
+ int sampleIdx = 0;
+
+ streamer.perNodeBufferSize(10000);
+
+ while (iter.hasNext()) {
+ org.apache.ignite.ml.math.Vector next = iter.next();
+
+ for (int i = 0; i < vectorSize; i++)
+ streamer.addData(new BiIndex(sampleIdx, i), next.getX(i));
+
+ sampleIdx++;
+
+ if (sampleIdx % 1000 == 0)
+ System.out.println(">>> Loaded " + sampleIdx + " vectors.");
+ }
+ }
+ }
+
+ /**
+ * Create bi-indexed cache for tests.
+ *
+ * @return Bi-indexed cache.
+ */
+ private IgniteCache<BiIndex, Double> createBiIndexedCache() {
+ CacheConfiguration<BiIndex, Double> cfg = new CacheConfiguration<>();
+
+ // Write to primary.
+ cfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.PRIMARY_SYNC);
+
+ // Atomic transactions only.
+ cfg.setAtomicityMode(CacheAtomicityMode.ATOMIC);
+
+ // No eviction.
+ cfg.setEvictionPolicy(null);
+
+ // No copying of values.
+ cfg.setCopyOnRead(false);
+
+ // Cache is partitioned.
+ cfg.setCacheMode(CacheMode.PARTITIONED);
+
+ cfg.setBackups(0);
+
+ cfg.setName("TMP_BI_INDEXED_CACHE");
+
+ return Ignition.localIgnite().getOrCreateCache(cfg);
+ }
+
+ /** */
+ private Vector[] vecsFromRanges(Map<Integer, double[]> ranges, int featCnt, double[] defRng, Random rnd, int ptsCnt,
+ Function<Vector, Double> f) {
+ int vs = featCnt + 1;
+ DenseLocalOnHeapVector[] res = new DenseLocalOnHeapVector[ptsCnt];
+ for (int pt = 0; pt < ptsCnt; pt++) {
+ DenseLocalOnHeapVector v = new DenseLocalOnHeapVector(vs);
+ for (int i = 0; i < featCnt; i++) {
+ double[] range = ranges.getOrDefault(i, defRng);
+ double from = range[0];
+ double to = range[1];
+ double rng = to - from;
+
+ v.setX(i, rnd.nextDouble() * rng);
+ }
+ v.setX(featCnt, f.apply(v));
+ res[pt] = v;
+ }
+
+ return res;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/test/resources/manualrun/trees/columntrees.manualrun.properties
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/resources/manualrun/trees/columntrees.manualrun.properties b/modules/ml/src/test/resources/manualrun/trees/columntrees.manualrun.properties
new file mode 100644
index 0000000..7040010
--- /dev/null
+++ b/modules/ml/src/test/resources/manualrun/trees/columntrees.manualrun.properties
@@ -0,0 +1,5 @@
+# Paths to mnist dataset parts.
+mnist.training.images=/path/to/train-images-idx3-ubyte
+mnist.training.labels=/path/to/train-labels-idx1-ubyte
+mnist.test.images=/path/to/t10k-images-idx3-ubyte
+mnist.test.labels=/path/to/t10k-labels-idx1-ubyte
\ No newline at end of file
[13/28] ignite git commit: IGNITE-6669: Renamed test by code style.
Posted by sb...@apache.org.
IGNITE-6669: Renamed test by code style.
Signed-off-by: nikolay_tikhonov <nt...@gridgain.com>
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/c11fc411
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/c11fc411
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/c11fc411
Branch: refs/heads/ignite-zk
Commit: c11fc41176d7fb5c731ddc55f5f544ded4b26d02
Parents: 291b166
Author: Slava Koptilin <sl...@gmail.com>
Authored: Fri Nov 10 13:07:26 2017 +0300
Committer: nikolay_tikhonov <nt...@gridgain.com>
Committed: Fri Nov 10 13:07:26 2017 +0300
----------------------------------------------------------------------
...oreListenerRWThroughDisabledAtomicCache.java | 33 --
...istenerRWThroughDisabledAtomicCacheTest.java | 33 ++
...enerRWThroughDisabledTransactionalCache.java | 138 ---------
...RWThroughDisabledTransactionalCacheTest.java | 138 +++++++++
...SessionListenerReadWriteThroughDisabled.java | 291 ------------------
...ionListenerReadWriteThroughDisabledTest.java | 291 ++++++++++++++++++
...eStoreSessionListenerWriteBehindEnabled.java | 304 -------------------
...reSessionListenerWriteBehindEnabledTest.java | 304 +++++++++++++++++++
.../testsuites/IgniteCacheTestSuite4.java | 12 +-
9 files changed, 772 insertions(+), 772 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/c11fc411/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledAtomicCache.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledAtomicCache.java b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledAtomicCache.java
deleted file mode 100644
index 9b59940..0000000
--- a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledAtomicCache.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.cache.store;
-
-import org.apache.ignite.cache.CacheAtomicityMode;
-
-import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC;
-
-/**
- * This class tests that redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
- * and {@link CacheStoreSessionListener#onSessionEnd(CacheStoreSession, boolean)} are not executed.
- */
-public class CacheStoreListenerRWThroughDisabledAtomicCache extends CacheStoreSessionListenerReadWriteThroughDisabled {
- /** {@inheritDoc} */
- @Override protected CacheAtomicityMode atomicityMode() {
- return ATOMIC;
- }
-}
http://git-wip-us.apache.org/repos/asf/ignite/blob/c11fc411/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledAtomicCacheTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledAtomicCacheTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledAtomicCacheTest.java
new file mode 100644
index 0000000..6e28a52
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledAtomicCacheTest.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store;
+
+import org.apache.ignite.cache.CacheAtomicityMode;
+
+import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC;
+
+/**
+ * This class tests that redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * and {@link CacheStoreSessionListener#onSessionEnd(CacheStoreSession, boolean)} are not executed.
+ */
+public class CacheStoreListenerRWThroughDisabledAtomicCacheTest extends CacheStoreSessionListenerReadWriteThroughDisabledTest {
+ /** {@inheritDoc} */
+ @Override protected CacheAtomicityMode atomicityMode() {
+ return ATOMIC;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/c11fc411/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledTransactionalCache.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledTransactionalCache.java b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledTransactionalCache.java
deleted file mode 100644
index 6502c97..0000000
--- a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledTransactionalCache.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.cache.store;
-
-import java.util.Random;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.cache.CacheAtomicityMode;
-import org.apache.ignite.transactions.Transaction;
-import org.apache.ignite.transactions.TransactionConcurrency;
-import org.apache.ignite.transactions.TransactionIsolation;
-
-import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
-import static org.apache.ignite.transactions.TransactionConcurrency.OPTIMISTIC;
-import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC;
-import static org.apache.ignite.transactions.TransactionIsolation.READ_COMMITTED;
-import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ;
-import static org.apache.ignite.transactions.TransactionIsolation.SERIALIZABLE;
-
-/**
- * This class tests that redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
- * and {@link CacheStoreSessionListener#onSessionEnd(CacheStoreSession, boolean)} are not executed.
- */
-public class CacheStoreListenerRWThroughDisabledTransactionalCache extends CacheStoreSessionListenerReadWriteThroughDisabled {
- /** {@inheritDoc} */
- @Override protected CacheAtomicityMode atomicityMode() {
- return TRANSACTIONAL;
- }
-
- /**
- * Tests {@link IgniteCache#get(Object)} with disabled read-through and write-through modes.
- */
- public void testTransactionalLookup() {
- testTransactionalLookup(OPTIMISTIC, READ_COMMITTED);
- testTransactionalLookup(OPTIMISTIC, REPEATABLE_READ);
- testTransactionalLookup(OPTIMISTIC, SERIALIZABLE);
-
- testTransactionalLookup(PESSIMISTIC, READ_COMMITTED);
- testTransactionalLookup(PESSIMISTIC, REPEATABLE_READ);
- testTransactionalLookup(PESSIMISTIC, SERIALIZABLE);
- }
-
- /**
- * @param concurrency Transaction concurrency level.
- * @param isolation Transaction isolation level.
- */
- private void testTransactionalLookup(TransactionConcurrency concurrency, TransactionIsolation isolation) {
- IgniteCache cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
-
- Random r = new Random();
-
- try (Transaction tx = grid(0).transactions().txStart(concurrency, isolation)) {
- for (int i = 0; i < CNT; ++i)
- cache.get(r.nextInt());
-
- tx.commit();
- }
- }
-
- /**
- * Tests {@link IgniteCache#put(Object, Object)} with disabled read-through and write-through modes.
- */
- public void testTransactionalUpdate() {
- testTransactionalUpdate(OPTIMISTIC, READ_COMMITTED);
- testTransactionalUpdate(OPTIMISTIC, REPEATABLE_READ);
- testTransactionalUpdate(OPTIMISTIC, SERIALIZABLE);
-
- testTransactionalUpdate(PESSIMISTIC, READ_COMMITTED);
- testTransactionalUpdate(PESSIMISTIC, REPEATABLE_READ);
- testTransactionalUpdate(PESSIMISTIC, SERIALIZABLE);
- }
-
- /**
- * @param concurrency Transaction concurrency level.
- * @param isolation Transaction isolation level.
- */
- private void testTransactionalUpdate(TransactionConcurrency concurrency, TransactionIsolation isolation) {
- IgniteCache cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
-
- Random r = new Random();
-
- try (Transaction tx = grid(0).transactions().txStart(concurrency, isolation)) {
- for (int i = 0; i < CNT; ++i)
- cache.put(r.nextInt(), "test-value");
-
- tx.commit();
- }
- }
-
- /**
- * Tests {@link IgniteCache#remove(Object)} with disabled read-through and write-through modes.
- */
- public void testTransactionalRemove() {
- testTransactionalRemove(OPTIMISTIC, READ_COMMITTED);
- testTransactionalRemove(OPTIMISTIC, REPEATABLE_READ);
- testTransactionalRemove(OPTIMISTIC, SERIALIZABLE);
-
- testTransactionalRemove(PESSIMISTIC, READ_COMMITTED);
- testTransactionalRemove(PESSIMISTIC, REPEATABLE_READ);
- testTransactionalRemove(PESSIMISTIC, SERIALIZABLE);
- }
-
- /**
- * @param concurrency Transaction concurrency level.
- * @param isolation Transaction isolation level.
- */
- private void testTransactionalRemove(TransactionConcurrency concurrency, TransactionIsolation isolation) {
- IgniteCache cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
-
- Random r = new Random();
-
- try (Transaction tx = grid(0).transactions().txStart(concurrency, isolation)) {
- for (int i = 0; i < CNT; ++i) {
- int key = r.nextInt();
-
- cache.put(key, "test-value");
-
- cache.remove(key, "test-value");
- }
-
- tx.commit();
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/ignite/blob/c11fc411/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledTransactionalCacheTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledTransactionalCacheTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledTransactionalCacheTest.java
new file mode 100644
index 0000000..fd784a3
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledTransactionalCacheTest.java
@@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store;
+
+import java.util.Random;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.cache.CacheAtomicityMode;
+import org.apache.ignite.transactions.Transaction;
+import org.apache.ignite.transactions.TransactionConcurrency;
+import org.apache.ignite.transactions.TransactionIsolation;
+
+import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
+import static org.apache.ignite.transactions.TransactionConcurrency.OPTIMISTIC;
+import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC;
+import static org.apache.ignite.transactions.TransactionIsolation.READ_COMMITTED;
+import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ;
+import static org.apache.ignite.transactions.TransactionIsolation.SERIALIZABLE;
+
+/**
+ * This class tests that redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * and {@link CacheStoreSessionListener#onSessionEnd(CacheStoreSession, boolean)} are not executed.
+ */
+public class CacheStoreListenerRWThroughDisabledTransactionalCacheTest extends CacheStoreSessionListenerReadWriteThroughDisabledTest {
+ /** {@inheritDoc} */
+ @Override protected CacheAtomicityMode atomicityMode() {
+ return TRANSACTIONAL;
+ }
+
+ /**
+ * Tests {@link IgniteCache#get(Object)} with disabled read-through and write-through modes.
+ */
+ public void testTransactionalLookup() {
+ testTransactionalLookup(OPTIMISTIC, READ_COMMITTED);
+ testTransactionalLookup(OPTIMISTIC, REPEATABLE_READ);
+ testTransactionalLookup(OPTIMISTIC, SERIALIZABLE);
+
+ testTransactionalLookup(PESSIMISTIC, READ_COMMITTED);
+ testTransactionalLookup(PESSIMISTIC, REPEATABLE_READ);
+ testTransactionalLookup(PESSIMISTIC, SERIALIZABLE);
+ }
+
+ /**
+ * @param concurrency Transaction concurrency level.
+ * @param isolation Transaction isolation level.
+ */
+ private void testTransactionalLookup(TransactionConcurrency concurrency, TransactionIsolation isolation) {
+ IgniteCache cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ try (Transaction tx = grid(0).transactions().txStart(concurrency, isolation)) {
+ for (int i = 0; i < CNT; ++i)
+ cache.get(r.nextInt());
+
+ tx.commit();
+ }
+ }
+
+ /**
+ * Tests {@link IgniteCache#put(Object, Object)} with disabled read-through and write-through modes.
+ */
+ public void testTransactionalUpdate() {
+ testTransactionalUpdate(OPTIMISTIC, READ_COMMITTED);
+ testTransactionalUpdate(OPTIMISTIC, REPEATABLE_READ);
+ testTransactionalUpdate(OPTIMISTIC, SERIALIZABLE);
+
+ testTransactionalUpdate(PESSIMISTIC, READ_COMMITTED);
+ testTransactionalUpdate(PESSIMISTIC, REPEATABLE_READ);
+ testTransactionalUpdate(PESSIMISTIC, SERIALIZABLE);
+ }
+
+ /**
+ * @param concurrency Transaction concurrency level.
+ * @param isolation Transaction isolation level.
+ */
+ private void testTransactionalUpdate(TransactionConcurrency concurrency, TransactionIsolation isolation) {
+ IgniteCache cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ try (Transaction tx = grid(0).transactions().txStart(concurrency, isolation)) {
+ for (int i = 0; i < CNT; ++i)
+ cache.put(r.nextInt(), "test-value");
+
+ tx.commit();
+ }
+ }
+
+ /**
+ * Tests {@link IgniteCache#remove(Object)} with disabled read-through and write-through modes.
+ */
+ public void testTransactionalRemove() {
+ testTransactionalRemove(OPTIMISTIC, READ_COMMITTED);
+ testTransactionalRemove(OPTIMISTIC, REPEATABLE_READ);
+ testTransactionalRemove(OPTIMISTIC, SERIALIZABLE);
+
+ testTransactionalRemove(PESSIMISTIC, READ_COMMITTED);
+ testTransactionalRemove(PESSIMISTIC, REPEATABLE_READ);
+ testTransactionalRemove(PESSIMISTIC, SERIALIZABLE);
+ }
+
+ /**
+ * @param concurrency Transaction concurrency level.
+ * @param isolation Transaction isolation level.
+ */
+ private void testTransactionalRemove(TransactionConcurrency concurrency, TransactionIsolation isolation) {
+ IgniteCache cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ try (Transaction tx = grid(0).transactions().txStart(concurrency, isolation)) {
+ for (int i = 0; i < CNT; ++i) {
+ int key = r.nextInt();
+
+ cache.put(key, "test-value");
+
+ cache.remove(key, "test-value");
+ }
+
+ tx.commit();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/c11fc411/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerReadWriteThroughDisabled.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerReadWriteThroughDisabled.java b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerReadWriteThroughDisabled.java
deleted file mode 100644
index 1f6e97d..0000000
--- a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerReadWriteThroughDisabled.java
+++ /dev/null
@@ -1,291 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.cache.store;
-
-import java.io.PrintWriter;
-import java.io.Serializable;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.SQLFeatureNotSupportedException;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Random;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.logging.Logger;
-import javax.cache.Cache;
-import javax.cache.configuration.Factory;
-import javax.cache.configuration.FactoryBuilder;
-import javax.cache.integration.CacheLoaderException;
-import javax.cache.integration.CacheWriterException;
-import javax.sql.DataSource;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.cache.store.jdbc.CacheJdbcStoreSessionListener;
-import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.configuration.NearCacheConfiguration;
-import org.apache.ignite.internal.processors.cache.GridCacheAbstractSelfTest;
-
-/**
- * This class tests that redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
- * and {@link CacheStoreSessionListener#onSessionEnd(CacheStoreSession, boolean)} are not executed.
- */
-public abstract class CacheStoreSessionListenerReadWriteThroughDisabled extends GridCacheAbstractSelfTest {
- /** {@inheritDoc} */
- protected int gridCount() {
- return 2;
- }
-
- /** */
- protected final int CNT = 100;
-
- /** {@inheritDoc} */
- protected CacheConfiguration cacheConfiguration(String igniteInstanceName) throws Exception {
- CacheConfiguration cacheCfg = super.cacheConfiguration(igniteInstanceName);
-
- cacheCfg.setCacheStoreFactory(FactoryBuilder.factoryOf(EmptyCacheStore.class));
-
- cacheCfg.setCacheStoreSessionListenerFactories(new CacheStoreSessionFactory());
-
- cacheCfg.setReadThrough(false);
- cacheCfg.setWriteThrough(false);
-
- cacheCfg.setBackups(0);
-
- return cacheCfg;
- }
-
- /** {@inheritDoc} */
- protected NearCacheConfiguration nearConfiguration() {
- return null;
- }
-
- /**
- * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
- * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
- * while {@link IgniteCache#get(Object)} performed.
- *
- * @throws Exception If failed.
- */
- public void testLookup() throws Exception {
- IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
-
- Random r = new Random();
-
- for (int i = 0; i < CNT; ++i)
- cache.get(r.nextInt());
- }
-
- /**
- * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
- * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
- * while {@link IgniteCache#getAll(Set)} performed.
- *
- * @throws Exception If failed.
- */
- public void testBatchLookup() throws Exception {
- IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
-
- Random r = new Random();
-
- Set<Object> values = new HashSet<>();
-
- for (int i = 0; i < CNT; ++i)
- values.add(r.nextInt());
-
- cache.getAll(values);
- }
-
- /**
- * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
- * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
- * while {@link IgniteCache#put(Object, Object)} performed.
- *
- * @throws Exception If failed.
- */
- public void testUpdate() throws Exception {
- IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
-
- Random r = new Random();
-
- for (int i = 0; i < CNT; ++i)
- cache.put(r.nextInt(), "test-value");
- }
-
- /**
- * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
- * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
- * while {@link IgniteCache#putAll(Map)} performed.
- *
- * @throws Exception If failed.
- */
- public void testBatchUpdate() throws Exception {
- IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
-
- Random r = new Random();
-
- Map<Object, Object> values = new TreeMap<>();
-
- for (int i = 0; i < CNT; ++i)
- values.put(r.nextInt(), "test-value");
-
- cache.putAll(values);
- }
-
- /**
- * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
- * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
- * while {@link IgniteCache#remove(Object)} performed.
- *
- * @throws Exception If failed.
- */
- public void testRemove() throws Exception {
- IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
-
- Random r = new Random();
-
- for (int i = 0; i < CNT; ++i) {
- int key = r.nextInt();
-
- cache.put(key, "test-value");
-
- cache.remove(key);
- }
- }
-
- /**
- * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
- * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
- * while {@link IgniteCache#removeAll(Set)} performed.
- *
- * @throws Exception If failed.
- */
- public void testBatchRemove() throws Exception {
- IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
-
- Random r = new Random();
-
- Set<Object> values = new HashSet<>();
-
- for (int i = 0; i < CNT; ++i) {
- int key = r.nextInt();
-
- cache.put(key, "test-value");
-
- values.add(key);
- }
-
- cache.removeAll(values);
- }
-
- /**
- * Cache store session factory.
- */
- public static class CacheStoreSessionFactory implements Factory<TestCacheStoreSessionListener> {
- /** {@inheritDoc} */
- @Override public TestCacheStoreSessionListener create() {
- TestCacheStoreSessionListener lsnr = new TestCacheStoreSessionListener();
- lsnr.setDataSource(new DataSourceStub());
- return lsnr;
- }
- }
-
- /**
- * Test cache store session listener.
- */
- public static class TestCacheStoreSessionListener extends CacheJdbcStoreSessionListener {
- /** {@inheritDoc} */
- @Override public void onSessionStart(CacheStoreSession ses) {
- fail("TestCacheStoreSessionListener.onSessionStart(CacheStoreSession) should not be called.");
- }
-
- /** {@inheritDoc} */
- @Override public void onSessionEnd(CacheStoreSession ses, boolean commit) {
- fail("TestCacheStoreSessionListener.onSessionEnd(CacheStoreSession, boolean) should not be called.");
- }
- }
-
- /** Empty cache store implementation. All overridden methods should not be called while the test is running. */
- public static class EmptyCacheStore extends CacheStoreAdapter {
- /** {@inheritDoc} */
- @Override public Object load(Object key) throws CacheLoaderException {
- fail("EmptyCacheStore.load(Object) should not be called.");
-
- return null;
- }
-
- /** {@inheritDoc} */
- @Override public void write(Cache.Entry entry) throws CacheWriterException {
- fail("EmptyCacheStore.write(Cache.Entry) should not be called.");
- }
-
- /** {@inheritDoc} */
- @Override public void delete(Object key) throws CacheWriterException {
- fail("EmptyCacheStore.delete(Object) should not be called.");
- }
- }
-
- /**
- * Data source stub which should not be called.
- */
- public static class DataSourceStub implements DataSource, Serializable {
- /** {@inheritDoc} */
- @Override public Connection getConnection() throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public Connection getConnection(String username, String password) throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public <T> T unwrap(Class<T> iface) throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public boolean isWrapperFor(Class<?> iface) throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public PrintWriter getLogWriter() throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public void setLogWriter(PrintWriter out) throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public void setLoginTimeout(int seconds) throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public int getLoginTimeout() throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public Logger getParentLogger() throws SQLFeatureNotSupportedException {
- throw new UnsupportedOperationException();
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/ignite/blob/c11fc411/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerReadWriteThroughDisabledTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerReadWriteThroughDisabledTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerReadWriteThroughDisabledTest.java
new file mode 100644
index 0000000..150e157
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerReadWriteThroughDisabledTest.java
@@ -0,0 +1,291 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store;
+
+import java.io.PrintWriter;
+import java.io.Serializable;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.logging.Logger;
+import javax.cache.Cache;
+import javax.cache.configuration.Factory;
+import javax.cache.configuration.FactoryBuilder;
+import javax.cache.integration.CacheLoaderException;
+import javax.cache.integration.CacheWriterException;
+import javax.sql.DataSource;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.cache.store.jdbc.CacheJdbcStoreSessionListener;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.configuration.NearCacheConfiguration;
+import org.apache.ignite.internal.processors.cache.GridCacheAbstractSelfTest;
+
+/**
+ * This class tests that redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * and {@link CacheStoreSessionListener#onSessionEnd(CacheStoreSession, boolean)} are not executed.
+ */
+public abstract class CacheStoreSessionListenerReadWriteThroughDisabledTest extends GridCacheAbstractSelfTest {
+ /** {@inheritDoc} */
+ protected int gridCount() {
+ return 2;
+ }
+
+ /** */
+ protected final int CNT = 100;
+
+ /** {@inheritDoc} */
+ protected CacheConfiguration cacheConfiguration(String igniteInstanceName) throws Exception {
+ CacheConfiguration cacheCfg = super.cacheConfiguration(igniteInstanceName);
+
+ cacheCfg.setCacheStoreFactory(FactoryBuilder.factoryOf(EmptyCacheStore.class));
+
+ cacheCfg.setCacheStoreSessionListenerFactories(new CacheStoreSessionFactory());
+
+ cacheCfg.setReadThrough(false);
+ cacheCfg.setWriteThrough(false);
+
+ cacheCfg.setBackups(0);
+
+ return cacheCfg;
+ }
+
+ /** {@inheritDoc} */
+ protected NearCacheConfiguration nearConfiguration() {
+ return null;
+ }
+
+ /**
+ * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
+ * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#get(Object)} performed.
+ *
+ * @throws Exception If failed.
+ */
+ public void testLookup() throws Exception {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ for (int i = 0; i < CNT; ++i)
+ cache.get(r.nextInt());
+ }
+
+ /**
+ * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
+ * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#getAll(Set)} performed.
+ *
+ * @throws Exception If failed.
+ */
+ public void testBatchLookup() throws Exception {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ Set<Object> values = new HashSet<>();
+
+ for (int i = 0; i < CNT; ++i)
+ values.add(r.nextInt());
+
+ cache.getAll(values);
+ }
+
+ /**
+ * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
+ * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#put(Object, Object)} performed.
+ *
+ * @throws Exception If failed.
+ */
+ public void testUpdate() throws Exception {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ for (int i = 0; i < CNT; ++i)
+ cache.put(r.nextInt(), "test-value");
+ }
+
+ /**
+ * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
+ * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#putAll(Map)} performed.
+ *
+ * @throws Exception If failed.
+ */
+ public void testBatchUpdate() throws Exception {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ Map<Object, Object> values = new TreeMap<>();
+
+ for (int i = 0; i < CNT; ++i)
+ values.put(r.nextInt(), "test-value");
+
+ cache.putAll(values);
+ }
+
+ /**
+ * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
+ * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#remove(Object)} performed.
+ *
+ * @throws Exception If failed.
+ */
+ public void testRemove() throws Exception {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ for (int i = 0; i < CNT; ++i) {
+ int key = r.nextInt();
+
+ cache.put(key, "test-value");
+
+ cache.remove(key);
+ }
+ }
+
+ /**
+ * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
+ * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#removeAll(Set)} performed.
+ *
+ * @throws Exception If failed.
+ */
+ public void testBatchRemove() throws Exception {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ Set<Object> values = new HashSet<>();
+
+ for (int i = 0; i < CNT; ++i) {
+ int key = r.nextInt();
+
+ cache.put(key, "test-value");
+
+ values.add(key);
+ }
+
+ cache.removeAll(values);
+ }
+
+ /**
+ * Cache store session factory.
+ */
+ public static class CacheStoreSessionFactory implements Factory<TestCacheStoreSessionListener> {
+ /** {@inheritDoc} */
+ @Override public TestCacheStoreSessionListener create() {
+ TestCacheStoreSessionListener lsnr = new TestCacheStoreSessionListener();
+ lsnr.setDataSource(new DataSourceStub());
+ return lsnr;
+ }
+ }
+
+ /**
+ * Test cache store session listener.
+ */
+ public static class TestCacheStoreSessionListener extends CacheJdbcStoreSessionListener {
+ /** {@inheritDoc} */
+ @Override public void onSessionStart(CacheStoreSession ses) {
+ fail("TestCacheStoreSessionListener.onSessionStart(CacheStoreSession) should not be called.");
+ }
+
+ /** {@inheritDoc} */
+ @Override public void onSessionEnd(CacheStoreSession ses, boolean commit) {
+ fail("TestCacheStoreSessionListener.onSessionEnd(CacheStoreSession, boolean) should not be called.");
+ }
+ }
+
+ /** Empty cache store implementation. All overridden methods should not be called while the test is running. */
+ public static class EmptyCacheStore extends CacheStoreAdapter {
+ /** {@inheritDoc} */
+ @Override public Object load(Object key) throws CacheLoaderException {
+ fail("EmptyCacheStore.load(Object) should not be called.");
+
+ return null;
+ }
+
+ /** {@inheritDoc} */
+ @Override public void write(Cache.Entry entry) throws CacheWriterException {
+ fail("EmptyCacheStore.write(Cache.Entry) should not be called.");
+ }
+
+ /** {@inheritDoc} */
+ @Override public void delete(Object key) throws CacheWriterException {
+ fail("EmptyCacheStore.delete(Object) should not be called.");
+ }
+ }
+
+ /**
+ * Data source stub which should not be called.
+ */
+ public static class DataSourceStub implements DataSource, Serializable {
+ /** {@inheritDoc} */
+ @Override public Connection getConnection() throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public Connection getConnection(String username, String password) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public <T> T unwrap(Class<T> iface) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean isWrapperFor(Class<?> iface) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public PrintWriter getLogWriter() throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public void setLogWriter(PrintWriter out) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public void setLoginTimeout(int seconds) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public int getLoginTimeout() throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public Logger getParentLogger() throws SQLFeatureNotSupportedException {
+ throw new UnsupportedOperationException();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/c11fc411/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerWriteBehindEnabled.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerWriteBehindEnabled.java b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerWriteBehindEnabled.java
deleted file mode 100644
index fbb881e..0000000
--- a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerWriteBehindEnabled.java
+++ /dev/null
@@ -1,304 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.cache.store;
-
-import java.io.PrintWriter;
-import java.io.Serializable;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.SQLFeatureNotSupportedException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Logger;
-import javax.cache.Cache;
-import javax.cache.configuration.Factory;
-import javax.cache.configuration.FactoryBuilder;
-import javax.cache.integration.CacheLoaderException;
-import javax.cache.integration.CacheWriterException;
-import javax.sql.DataSource;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.IgniteException;
-import org.apache.ignite.cache.store.jdbc.CacheJdbcStoreSessionListener;
-import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.internal.processors.cache.GridCacheAbstractSelfTest;
-import org.apache.ignite.internal.processors.cache.store.GridCacheWriteBehindStore;
-import org.apache.ignite.resources.IgniteInstanceResource;
-
-/**
- * This class tests that calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
- * and {@link CacheStoreSessionListener#onSessionEnd(CacheStoreSession, boolean)} are executed from
- * {@link GridCacheWriteBehindStore} only.
- */
-public class CacheStoreSessionListenerWriteBehindEnabled extends GridCacheAbstractSelfTest {
- /** */
- protected final static int CNT = 100;
-
- /** */
- private final static int WRITE_BEHIND_FLUSH_FREQUENCY = 1000;
-
- /** */
- private static final List<OperationType> operations = Collections.synchronizedList(new ArrayList<OperationType>());
-
- /** */
- private static final AtomicInteger entryCnt = new AtomicInteger();
-
- /** {@inheritDoc} */
- @Override protected int gridCount() {
- return 1;
- }
-
- /** {@inheritDoc} */
- @Override protected CacheConfiguration cacheConfiguration(String igniteInstanceName) throws Exception {
- CacheConfiguration cacheCfg = super.cacheConfiguration(igniteInstanceName);
-
- cacheCfg.setCacheStoreFactory(FactoryBuilder.factoryOf(EmptyCacheStore.class));
-
- cacheCfg.setCacheStoreSessionListenerFactories(new CacheStoreSessionFactory());
-
- cacheCfg.setReadThrough(true);
- cacheCfg.setWriteThrough(true);
-
- cacheCfg.setWriteBehindEnabled(true);
- cacheCfg.setWriteBehindBatchSize(CNT * 2);
- cacheCfg.setWriteBehindFlushFrequency(WRITE_BEHIND_FLUSH_FREQUENCY);
-
- cacheCfg.setBackups(0);
-
- return cacheCfg;
- }
-
- /** {@inheritDoc} */
- @Override protected void beforeTest() throws Exception {
- super.beforeTest();
-
- operations.clear();
-
- entryCnt.set(0);
- }
-
- /**
- * Tests that there are no redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
- * while {@link IgniteCache#get(Object)} performed.
- */
- public void testLookup() {
- IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
-
- for (int i = 0; i < CNT; ++i)
- cache.get(i);
-
- checkSessionCounters(CNT);
- }
-
- /**
- * Tests that there are no redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
- * while {@link IgniteCache#put(Object, Object)} performed.
- */
- public void testUpdate() {
- IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
-
- for (int i = 0; i < CNT; ++i)
- cache.put(i, i);
-
- checkSessionCounters(1);
- }
-
- /**
- * Tests that there are no redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
- * while {@link IgniteCache#remove(Object)} performed.
- */
- public void testRemove() {
- IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
-
- for (int i = 0; i < CNT; ++i) {
- cache.remove(i);
- }
-
- checkSessionCounters(1);
- }
-
- /**
- * @param startedSessions Number of expected sessions.
- */
- private void checkSessionCounters(int startedSessions) {
- try {
- // Wait for GridCacheWriteBehindStore
- Thread.sleep(WRITE_BEHIND_FLUSH_FREQUENCY * 4);
-
- assertEquals(CNT, entryCnt.get());
-
- checkOpCount(operations, OperationType.SESSION_START, startedSessions);
-
- checkOpCount(operations, OperationType.SESSION_END, startedSessions);
- }
- catch (InterruptedException e) {
- throw new IgniteException("Failed to wait for the GridCacheWriteBehindStore due to interruption.", e);
- }
- }
-
- /**
- * @param operations List of {@link OperationType}.
- * @param op Operation.
- * @param expected Expected number of operations for the given {@code op}.
- */
- private void checkOpCount(List<OperationType> operations, OperationType op, int expected) {
- int n = 0;
-
- for (OperationType o : operations) {
- if (op.equals(o))
- ++n;
- }
-
- assertEquals("Operation=" + op.name(), expected, n);
- }
-
- /**
- * Operation type.
- */
- public enum OperationType {
- /**
- * Cache store session started.
- */
- SESSION_START,
-
- /**
- * Cache store session ended.
- */
- SESSION_END,
- }
-
- /**
- * Cache store session factory.
- */
- public static class CacheStoreSessionFactory implements Factory<TestCacheStoreSessionListener> {
- /** {@inheritDoc} */
- @Override public TestCacheStoreSessionListener create() {
- TestCacheStoreSessionListener lsnr = new TestCacheStoreSessionListener();
- lsnr.setDataSource(new DataSourceStub());
- return lsnr;
- }
- }
-
- /**
- * Test cache store session listener.
- */
- public static class TestCacheStoreSessionListener extends CacheJdbcStoreSessionListener {
- /** */
- @IgniteInstanceResource
- private Ignite ignite;
-
- /** {@inheritDoc} */
- @Override public void onSessionStart(CacheStoreSession ses) {
- operations.add(OperationType.SESSION_START);
- }
-
- /** {@inheritDoc} */
- @Override public void onSessionEnd(CacheStoreSession ses, boolean commit) {
- operations.add(OperationType.SESSION_END);
- }
- }
-
- /**
- * Test cache store.
- *
- * {@link EmptyCacheStore#writeAll(Collection)} and {@link EmptyCacheStore#deleteAll(Collection)} should be called
- * by {@link GridCacheWriteBehindStore}.
- */
- public static class EmptyCacheStore extends CacheStoreAdapter<Object, Object> {
- /** */
- @IgniteInstanceResource
- private Ignite ignite;
-
- /** {@inheritDoc} */
- @Override public Object load(Object key) throws CacheLoaderException {
- entryCnt.getAndIncrement();
- return null;
- }
-
- /** {@inheritDoc} */
- @Override public void writeAll(Collection<Cache.Entry<?, ?>> entries) {
- entryCnt.addAndGet(entries.size());
- }
-
- /** {@inheritDoc} */
- @Override public void write(Cache.Entry entry) throws CacheWriterException {
- }
-
- /** {@inheritDoc} */
- @Override public void deleteAll(Collection<?> keys) {
- entryCnt.addAndGet(keys.size());
- }
-
- /** {@inheritDoc} */
- @Override public void delete(Object key) throws CacheWriterException {
- }
- }
-
- /**
- * Data source stub which should not be called.
- */
- public static class DataSourceStub implements DataSource, Serializable {
- /** {@inheritDoc} */
- @Override public Connection getConnection() throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public Connection getConnection(String username, String password) throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public <T> T unwrap(Class<T> iface) throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public boolean isWrapperFor(Class<?> iface) throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public PrintWriter getLogWriter() throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public void setLogWriter(PrintWriter out) throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public void setLoginTimeout(int seconds) throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public int getLoginTimeout() throws SQLException {
- throw new UnsupportedOperationException();
- }
-
- /** {@inheritDoc} */
- @Override public Logger getParentLogger() throws SQLFeatureNotSupportedException {
- throw new UnsupportedOperationException();
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/ignite/blob/c11fc411/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerWriteBehindEnabledTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerWriteBehindEnabledTest.java b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerWriteBehindEnabledTest.java
new file mode 100644
index 0000000..b9095d0
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerWriteBehindEnabledTest.java
@@ -0,0 +1,304 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store;
+
+import java.io.PrintWriter;
+import java.io.Serializable;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.logging.Logger;
+import javax.cache.Cache;
+import javax.cache.configuration.Factory;
+import javax.cache.configuration.FactoryBuilder;
+import javax.cache.integration.CacheLoaderException;
+import javax.cache.integration.CacheWriterException;
+import javax.sql.DataSource;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.IgniteException;
+import org.apache.ignite.cache.store.jdbc.CacheJdbcStoreSessionListener;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.internal.processors.cache.GridCacheAbstractSelfTest;
+import org.apache.ignite.internal.processors.cache.store.GridCacheWriteBehindStore;
+import org.apache.ignite.resources.IgniteInstanceResource;
+
+/**
+ * This class tests that calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * and {@link CacheStoreSessionListener#onSessionEnd(CacheStoreSession, boolean)} are executed from
+ * {@link GridCacheWriteBehindStore} only.
+ */
+public class CacheStoreSessionListenerWriteBehindEnabledTest extends GridCacheAbstractSelfTest {
+ /** */
+ protected final static int CNT = 100;
+
+ /** */
+ private final static int WRITE_BEHIND_FLUSH_FREQUENCY = 1000;
+
+ /** */
+ private static final List<OperationType> operations = Collections.synchronizedList(new ArrayList<OperationType>());
+
+ /** */
+ private static final AtomicInteger entryCnt = new AtomicInteger();
+
+ /** {@inheritDoc} */
+ @Override protected int gridCount() {
+ return 1;
+ }
+
+ /** {@inheritDoc} */
+ @Override protected CacheConfiguration cacheConfiguration(String igniteInstanceName) throws Exception {
+ CacheConfiguration cacheCfg = super.cacheConfiguration(igniteInstanceName);
+
+ cacheCfg.setCacheStoreFactory(FactoryBuilder.factoryOf(EmptyCacheStore.class));
+
+ cacheCfg.setCacheStoreSessionListenerFactories(new CacheStoreSessionFactory());
+
+ cacheCfg.setReadThrough(true);
+ cacheCfg.setWriteThrough(true);
+
+ cacheCfg.setWriteBehindEnabled(true);
+ cacheCfg.setWriteBehindBatchSize(CNT * 2);
+ cacheCfg.setWriteBehindFlushFrequency(WRITE_BEHIND_FLUSH_FREQUENCY);
+
+ cacheCfg.setBackups(0);
+
+ return cacheCfg;
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void beforeTest() throws Exception {
+ super.beforeTest();
+
+ operations.clear();
+
+ entryCnt.set(0);
+ }
+
+ /**
+ * Tests that there are no redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#get(Object)} performed.
+ */
+ public void testLookup() {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ for (int i = 0; i < CNT; ++i)
+ cache.get(i);
+
+ checkSessionCounters(CNT);
+ }
+
+ /**
+ * Tests that there are no redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#put(Object, Object)} performed.
+ */
+ public void testUpdate() {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ for (int i = 0; i < CNT; ++i)
+ cache.put(i, i);
+
+ checkSessionCounters(1);
+ }
+
+ /**
+ * Tests that there are no redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#remove(Object)} performed.
+ */
+ public void testRemove() {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ for (int i = 0; i < CNT; ++i) {
+ cache.remove(i);
+ }
+
+ checkSessionCounters(1);
+ }
+
+ /**
+ * @param startedSessions Number of expected sessions.
+ */
+ private void checkSessionCounters(int startedSessions) {
+ try {
+ // Wait for GridCacheWriteBehindStore
+ Thread.sleep(WRITE_BEHIND_FLUSH_FREQUENCY * 4);
+
+ assertEquals(CNT, entryCnt.get());
+
+ checkOpCount(operations, OperationType.SESSION_START, startedSessions);
+
+ checkOpCount(operations, OperationType.SESSION_END, startedSessions);
+ }
+ catch (InterruptedException e) {
+ throw new IgniteException("Failed to wait for the GridCacheWriteBehindStore due to interruption.", e);
+ }
+ }
+
+ /**
+ * @param operations List of {@link OperationType}.
+ * @param op Operation.
+ * @param expected Expected number of operations for the given {@code op}.
+ */
+ private void checkOpCount(List<OperationType> operations, OperationType op, int expected) {
+ int n = 0;
+
+ for (OperationType o : operations) {
+ if (op.equals(o))
+ ++n;
+ }
+
+ assertEquals("Operation=" + op.name(), expected, n);
+ }
+
+ /**
+ * Operation type.
+ */
+ public enum OperationType {
+ /**
+ * Cache store session started.
+ */
+ SESSION_START,
+
+ /**
+ * Cache store session ended.
+ */
+ SESSION_END,
+ }
+
+ /**
+ * Cache store session factory.
+ */
+ public static class CacheStoreSessionFactory implements Factory<TestCacheStoreSessionListener> {
+ /** {@inheritDoc} */
+ @Override public TestCacheStoreSessionListener create() {
+ TestCacheStoreSessionListener lsnr = new TestCacheStoreSessionListener();
+ lsnr.setDataSource(new DataSourceStub());
+ return lsnr;
+ }
+ }
+
+ /**
+ * Test cache store session listener.
+ */
+ public static class TestCacheStoreSessionListener extends CacheJdbcStoreSessionListener {
+ /** */
+ @IgniteInstanceResource
+ private Ignite ignite;
+
+ /** {@inheritDoc} */
+ @Override public void onSessionStart(CacheStoreSession ses) {
+ operations.add(OperationType.SESSION_START);
+ }
+
+ /** {@inheritDoc} */
+ @Override public void onSessionEnd(CacheStoreSession ses, boolean commit) {
+ operations.add(OperationType.SESSION_END);
+ }
+ }
+
+ /**
+ * Test cache store.
+ *
+ * {@link EmptyCacheStore#writeAll(Collection)} and {@link EmptyCacheStore#deleteAll(Collection)} should be called
+ * by {@link GridCacheWriteBehindStore}.
+ */
+ public static class EmptyCacheStore extends CacheStoreAdapter<Object, Object> {
+ /** */
+ @IgniteInstanceResource
+ private Ignite ignite;
+
+ /** {@inheritDoc} */
+ @Override public Object load(Object key) throws CacheLoaderException {
+ entryCnt.getAndIncrement();
+ return null;
+ }
+
+ /** {@inheritDoc} */
+ @Override public void writeAll(Collection<Cache.Entry<?, ?>> entries) {
+ entryCnt.addAndGet(entries.size());
+ }
+
+ /** {@inheritDoc} */
+ @Override public void write(Cache.Entry entry) throws CacheWriterException {
+ }
+
+ /** {@inheritDoc} */
+ @Override public void deleteAll(Collection<?> keys) {
+ entryCnt.addAndGet(keys.size());
+ }
+
+ /** {@inheritDoc} */
+ @Override public void delete(Object key) throws CacheWriterException {
+ }
+ }
+
+ /**
+ * Data source stub which should not be called.
+ */
+ public static class DataSourceStub implements DataSource, Serializable {
+ /** {@inheritDoc} */
+ @Override public Connection getConnection() throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public Connection getConnection(String username, String password) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public <T> T unwrap(Class<T> iface) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean isWrapperFor(Class<?> iface) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public PrintWriter getLogWriter() throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public void setLogWriter(PrintWriter out) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public void setLoginTimeout(int seconds) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public int getLoginTimeout() throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public Logger getParentLogger() throws SQLFeatureNotSupportedException {
+ throw new UnsupportedOperationException();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/c11fc411/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java
index e4930e0..ba7aa1b 100644
--- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java
+++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java
@@ -18,9 +18,9 @@
package org.apache.ignite.testsuites;
import junit.framework.TestSuite;
-import org.apache.ignite.cache.store.CacheStoreListenerRWThroughDisabledAtomicCache;
-import org.apache.ignite.cache.store.CacheStoreListenerRWThroughDisabledTransactionalCache;
-import org.apache.ignite.cache.store.CacheStoreSessionListenerWriteBehindEnabled;
+import org.apache.ignite.cache.store.CacheStoreListenerRWThroughDisabledAtomicCacheTest;
+import org.apache.ignite.cache.store.CacheStoreListenerRWThroughDisabledTransactionalCacheTest;
+import org.apache.ignite.cache.store.CacheStoreSessionListenerWriteBehindEnabledTest;
import org.apache.ignite.cache.store.jdbc.CacheJdbcStoreSessionListenerSelfTest;
import org.apache.ignite.internal.processors.GridCacheTxLoadFromStoreOnLockSelfTest;
import org.apache.ignite.internal.processors.cache.CacheClientStoreSelfTest;
@@ -279,9 +279,9 @@ public class IgniteCacheTestSuite4 extends TestSuite {
suite.addTestSuite(CacheOffheapMapEntrySelfTest.class);
suite.addTestSuite(CacheJdbcStoreSessionListenerSelfTest.class);
- suite.addTestSuite(CacheStoreListenerRWThroughDisabledAtomicCache.class);
- suite.addTestSuite(CacheStoreListenerRWThroughDisabledTransactionalCache.class);
- suite.addTestSuite(CacheStoreSessionListenerWriteBehindEnabled.class);
+ suite.addTestSuite(CacheStoreListenerRWThroughDisabledAtomicCacheTest.class);
+ suite.addTestSuite(CacheStoreListenerRWThroughDisabledTransactionalCacheTest.class);
+ suite.addTestSuite(CacheStoreSessionListenerWriteBehindEnabledTest.class);
suite.addTestSuite(CacheClientStoreSelfTest.class);
suite.addTestSuite(CacheStoreUsageMultinodeStaticStartAtomicTest.class);
[21/28] ignite git commit: IGNITE-5218: Added missed licenses headers.
Posted by sb...@apache.org.
IGNITE-5218: Added missed licenses headers.
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/131d80d1
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/131d80d1
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/131d80d1
Branch: refs/heads/ignite-zk
Commit: 131d80d1cb24e4078798791b91b13fd4229534a8
Parents: db7697b
Author: Artem Malykh <am...@gridgain.com>
Authored: Sat Nov 11 14:04:57 2017 +0300
Committer: Tikhonov Nikolay <ti...@gmail.com>
Committed: Sat Nov 11 14:04:57 2017 +0300
----------------------------------------------------------------------
.../columnbased/vectors/ContinuousSplitInfo.java | 17 +++++++++++++++++
.../trees/columntrees.manualrun.properties | 17 +++++++++++++++++
2 files changed, 34 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/131d80d1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/ContinuousSplitInfo.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/ContinuousSplitInfo.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/ContinuousSplitInfo.java
index d6f2847..8b45cb5 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/ContinuousSplitInfo.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/vectors/ContinuousSplitInfo.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package org.apache.ignite.ml.trees.trainers.columnbased.vectors;
import org.apache.ignite.ml.trees.RegionInfo;
http://git-wip-us.apache.org/repos/asf/ignite/blob/131d80d1/modules/ml/src/test/resources/manualrun/trees/columntrees.manualrun.properties
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/resources/manualrun/trees/columntrees.manualrun.properties b/modules/ml/src/test/resources/manualrun/trees/columntrees.manualrun.properties
index 7040010..57a6b23 100644
--- a/modules/ml/src/test/resources/manualrun/trees/columntrees.manualrun.properties
+++ b/modules/ml/src/test/resources/manualrun/trees/columntrees.manualrun.properties
@@ -1,3 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
# Paths to mnist dataset parts.
mnist.training.images=/path/to/train-images-idx3-ubyte
mnist.training.labels=/path/to/train-labels-idx1-ubyte
[16/28] ignite git commit: ignite-6669 Added eviction policy factory
to cache configuration.
Posted by sb...@apache.org.
ignite-6669 Added eviction policy factory to cache configuration.
Signed-off-by: Andrey Gura <ag...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/6579e69f
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/6579e69f
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/6579e69f
Branch: refs/heads/ignite-zk
Commit: 6579e69f20197567af43b14f72bb3a8852849353
Parents: 954e47b
Author: Andrey V. Mashenkov <an...@gmail.com>
Authored: Fri Nov 10 17:33:52 2017 +0300
Committer: Andrey Gura <ag...@apache.org>
Committed: Fri Nov 10 17:33:52 2017 +0300
----------------------------------------------------------------------
.../java/org/apache/ignite/cache/CacheMode.java | 2 +-
.../eviction/AbstractEvictionPolicyFactory.java | 104 ++
.../fifo/FifoEvictionPolicyFactory.java | 72 ++
.../eviction/lru/LruEvictionPolicyFactory.java | 72 ++
.../sorted/SortedEvictionPolicyFactory.java | 98 ++
.../configuration/CacheConfiguration.java | 42 +-
.../configuration/NearCacheConfiguration.java | 37 +
.../processors/cache/ClusterCachesInfo.java | 7 +
.../processors/cache/GridCacheAttributes.java | 19 +
.../cache/GridCacheEvictionManager.java | 15 +-
.../processors/cache/GridCacheProcessor.java | 19 +-
.../processors/cache/GridCacheUtils.java | 1 +
.../processors/igfs/IgfsHelperImpl.java | 8 +-
.../internal/processors/igfs/IgfsImpl.java | 4 +-
.../cache/VisorCacheNearConfiguration.java | 1 +
...idCacheConfigurationConsistencySelfTest.java | 52 +
.../GridCacheNearEvictionEventSelfTest.java | 5 -
.../EvictionPolicyFactoryAbstractTest.java | 1073 ++++++++++++++++++
.../fifo/FifoEvictionPolicyFactorySelfTest.java | 261 +++++
.../lru/LruEvictionPolicyFactorySelfTest.java | 352 ++++++
.../SortedEvictionPolicyFactorySelfTest.java | 264 +++++
.../IgniteCacheEvictionSelfTestSuite.java | 6 +
.../ApiParity/CacheConfigurationParityTest.cs | 3 +-
.../org/apache/ignite/yardstick/IgniteNode.java | 6 +-
24 files changed, 2502 insertions(+), 21 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/cache/CacheMode.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/CacheMode.java b/modules/core/src/main/java/org/apache/ignite/cache/CacheMode.java
index e75fa0c..4171b1a 100644
--- a/modules/core/src/main/java/org/apache/ignite/cache/CacheMode.java
+++ b/modules/core/src/main/java/org/apache/ignite/cache/CacheMode.java
@@ -55,7 +55,7 @@ public enum CacheMode {
* <p>
* Note that partitioned cache is always fronted by local
* {@code 'near'} cache which stores most recent data. You
- * can configure the size of near cache via {@link NearCacheConfiguration#getNearEvictionPolicy()}
+ * can configure the size of near cache via {@link NearCacheConfiguration#getNearEvictionPolicyFactory()}
* configuration property.
*/
PARTITIONED;
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/cache/eviction/AbstractEvictionPolicyFactory.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/eviction/AbstractEvictionPolicyFactory.java b/modules/core/src/main/java/org/apache/ignite/cache/eviction/AbstractEvictionPolicyFactory.java
new file mode 100644
index 0000000..012c7ee
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/cache/eviction/AbstractEvictionPolicyFactory.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.eviction;
+
+import javax.cache.configuration.Factory;
+import org.apache.ignite.internal.util.typedef.internal.A;
+
+/**
+ * Common functionality implementation for eviction policies factories.
+ */
+public abstract class AbstractEvictionPolicyFactory<T> implements Factory<T> {
+ /** */
+ private int maxSize;
+
+ /** */
+ private int batchSize = 1;
+
+ /** */
+ private long maxMemSize;
+
+ /**
+ * Sets maximum allowed size of cache before entry will start getting evicted.
+ *
+ * @param max Maximum allowed size of cache before entry will start getting evicted.
+ * @return {@code this} for chaining.
+ */
+ public AbstractEvictionPolicyFactory setMaxSize(int max) {
+ A.ensure(max >= 0, "max >= 0");
+
+ this.maxSize = max;
+
+ return this;
+ }
+
+ /**
+ * Gets maximum allowed size of cache before entry will start getting evicted.
+ *
+ * @return Maximum allowed size of cache before entry will start getting evicted.
+ */
+ public int getMaxSize() {
+ return maxSize;
+ }
+
+ /**
+ * Sets batch size.
+ *
+ * @param batchSize Batch size.
+ * @return {@code this} for chaining.
+ */
+ public AbstractEvictionPolicyFactory setBatchSize(int batchSize) {
+ A.ensure(batchSize > 0, "batchSize > 0");
+
+ this.batchSize = batchSize;
+
+ return this;
+ }
+
+ /**
+ * Gets batch size.
+ *
+ * @return batch size.
+ */
+ public int getBatchSize() {
+ return batchSize;
+ }
+
+ /**
+ * Sets maximum allowed cache size in bytes.
+ *
+ * @return {@code this} for chaining.
+ */
+ public AbstractEvictionPolicyFactory setMaxMemorySize(long maxMemSize) {
+ A.ensure(maxMemSize >= 0, "maxMemSize >= 0");
+
+ this.maxMemSize = maxMemSize;
+
+ return this;
+ }
+
+ /**
+ * Gets maximum allowed cache size in bytes.
+ *
+ * @return maximum allowed cache size in bytes.
+ */
+ public long getMaxMemorySize() {
+ return maxMemSize;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/cache/eviction/fifo/FifoEvictionPolicyFactory.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/eviction/fifo/FifoEvictionPolicyFactory.java b/modules/core/src/main/java/org/apache/ignite/cache/eviction/fifo/FifoEvictionPolicyFactory.java
new file mode 100644
index 0000000..856865a
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/cache/eviction/fifo/FifoEvictionPolicyFactory.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.eviction.fifo;
+
+import org.apache.ignite.cache.eviction.AbstractEvictionPolicyFactory;
+
+/**
+ * Factory class for {@link FifoEvictionPolicy}.
+ *
+ * Creates cache Eviction policy based on {@code First In First Out (FIFO)} algorithm and supports batch eviction.
+ * <p>
+ * The eviction starts in the following cases:
+ * <ul>
+ * <li>The cache size becomes {@code batchSize} elements greater than the maximum size.</li>
+ * <li>
+ * The size of cache entries in bytes becomes greater than the maximum memory size.
+ * The size of cache entry calculates as sum of key size and value size.
+ * </li>
+ * </ul>
+ * <b>Note:</b>Batch eviction is enabled only if maximum memory limit isn't set ({@code maxMemSize == 0}).
+ * {@code batchSize} elements will be evicted in this case. The default {@code batchSize} value is {@code 1}.
+ * <p>
+ * {@link FifoEvictionPolicy} implementation is very efficient since it does not create any additional
+ * table-like data structures. The {@code FIFO} ordering information is
+ * maintained by attaching ordering metadata to cache entries.
+ */
+public class FifoEvictionPolicyFactory<K, V> extends AbstractEvictionPolicyFactory<FifoEvictionPolicy<K, V>> {
+ /** */
+ private static final long serialVersionUID = 0L;
+
+ /** Constructor. */
+ public FifoEvictionPolicyFactory() {
+ }
+
+ /** Constructor. */
+ public FifoEvictionPolicyFactory(int maxSize) {
+ setMaxSize(maxSize);
+ }
+
+ /** */
+ public FifoEvictionPolicyFactory(int maxSize, int batchSize, long maxMemSize) {
+ setMaxSize(maxSize);
+ setBatchSize(batchSize);
+ setMaxMemorySize(maxMemSize);
+ }
+
+ /** {@inheritDoc} */
+ @Override public FifoEvictionPolicy<K, V> create() {
+ FifoEvictionPolicy<K, V> policy = new FifoEvictionPolicy<>();
+
+ policy.setBatchSize(getBatchSize());
+ policy.setMaxMemorySize(getMaxMemorySize());
+ policy.setMaxSize(getMaxSize());
+
+ return policy;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/cache/eviction/lru/LruEvictionPolicyFactory.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/eviction/lru/LruEvictionPolicyFactory.java b/modules/core/src/main/java/org/apache/ignite/cache/eviction/lru/LruEvictionPolicyFactory.java
new file mode 100644
index 0000000..8f7fbc5
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/cache/eviction/lru/LruEvictionPolicyFactory.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.eviction.lru;
+
+import org.apache.ignite.cache.eviction.AbstractEvictionPolicyFactory;
+
+/**
+ * Factory class for {@link LruEvictionPolicy}.
+ *
+ * Creates cache Eviction policy based on {@code Least Recently Used (LRU)} algorithm and supports batch eviction.
+ * <p>
+ * The eviction starts in the following cases:
+ * <ul>
+ * <li>The cache size becomes {@code batchSize} elements greater than the maximum size.</li>
+ * <li>
+ * The size of cache entries in bytes becomes greater than the maximum memory size.
+ * The size of cache entry calculates as sum of key size and value size.
+ * </li>
+ * </ul>
+ * <b>Note:</b>Batch eviction is enabled only if maximum memory limit isn't set ({@code maxMemSize == 0}).
+ * {@code batchSize} elements will be evicted in this case. The default {@code batchSize} value is {@code 1}.
+
+ * {@link LruEvictionPolicy} implementation is very efficient since it is lock-free and does not create any additional table-like
+ * data structures. The {@code LRU} ordering information is maintained by attaching ordering metadata to cache entries.
+ */
+public class LruEvictionPolicyFactory<K, V> extends AbstractEvictionPolicyFactory<LruEvictionPolicy<K, V>> {
+ /** */
+ private static final long serialVersionUID = 0L;
+
+ /** */
+ public LruEvictionPolicyFactory() {
+ }
+
+ /** */
+ public LruEvictionPolicyFactory(int maxSize) {
+ setMaxSize(maxSize);
+ }
+
+ /** */
+ public LruEvictionPolicyFactory(int maxSize, int batchSize, long maxMemSize) {
+ setMaxSize(maxSize);
+ setBatchSize(batchSize);
+ setMaxMemorySize(maxMemSize);
+ }
+
+ /** {@inheritDoc} */
+ @Override public LruEvictionPolicy<K, V> create() {
+ LruEvictionPolicy<K, V> policy = new LruEvictionPolicy<>();
+
+ policy.setBatchSize(getBatchSize());
+ policy.setMaxMemorySize(getMaxMemorySize());
+ policy.setMaxSize(getMaxSize());
+
+ return policy;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/cache/eviction/sorted/SortedEvictionPolicyFactory.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/eviction/sorted/SortedEvictionPolicyFactory.java b/modules/core/src/main/java/org/apache/ignite/cache/eviction/sorted/SortedEvictionPolicyFactory.java
new file mode 100644
index 0000000..a88c277
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/cache/eviction/sorted/SortedEvictionPolicyFactory.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.eviction.sorted;
+
+import java.io.Serializable;
+import java.util.Comparator;
+import org.apache.ignite.cache.eviction.AbstractEvictionPolicyFactory;
+import org.apache.ignite.cache.eviction.EvictableEntry;
+
+/**
+ * Factory class for {@link SortedEvictionPolicy}.
+ *
+ * Creates cache Eviction policy which will select the minimum cache entry for eviction.
+ * <p>
+ * The eviction starts in the following cases:
+ * <ul>
+ * <li>The cache size becomes {@code batchSize} elements greater than the maximum size.</li>
+ * <li>
+ * The size of cache entries in bytes becomes greater than the maximum memory size.
+ * The size of cache entry calculates as sum of key size and value size.
+ * </li>
+ * </ul>
+ * <b>Note:</b>Batch eviction is enabled only if maximum memory limit isn't set ({@code maxMemSize == 0}).
+ * {@code batchSize} elements will be evicted in this case. The default {@code batchSize} value is {@code 1}.
+ * <p>
+ * Entries comparison based on {@link Comparator} instance if provided.
+ * Default {@code Comparator} behaviour is use cache entries keys for comparison that imposes a requirement for keys
+ * to implement {@link Comparable} interface.
+ * <p>
+ * User defined comparator should implement {@link Serializable} interface.
+ */
+public class SortedEvictionPolicyFactory<K,V> extends AbstractEvictionPolicyFactory<SortedEvictionPolicy<K, V>> {
+ /** */
+ private static final long serialVersionUID = 0L;
+
+ /** Comparator. */
+ private Comparator<EvictableEntry<K, V>> comp;
+
+ /** */
+ public SortedEvictionPolicyFactory() {
+ }
+
+ /** */
+ public SortedEvictionPolicyFactory(int maxSize) {
+ setMaxSize(maxSize);
+ }
+
+ /** */
+ public SortedEvictionPolicyFactory(int maxSize, int batchSize, long maxMemSize) {
+ setMaxSize(maxSize);
+ setBatchSize(batchSize);
+ setMaxMemorySize(maxMemSize);
+ }
+
+ /**
+ * Gets entries comparator.
+ * @return entry comparator.
+ */
+ public Comparator<EvictableEntry<K, V>> getComp() {
+ return comp;
+ }
+
+ /**
+ * Sets entries comparator.
+ *
+ * @param comp entry comparator.
+ */
+ public void setComp(Comparator<EvictableEntry<K, V>> comp) {
+ this.comp = comp;
+ }
+
+ /** {@inheritDoc} */
+ @Override public SortedEvictionPolicy<K, V> create() {
+ SortedEvictionPolicy<K, V> policy = new SortedEvictionPolicy<>(comp);
+
+ policy.setBatchSize(getBatchSize());
+ policy.setMaxMemorySize(getMaxMemorySize());
+ policy.setMaxSize(getMaxSize());
+
+ return policy;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/configuration/CacheConfiguration.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/configuration/CacheConfiguration.java b/modules/core/src/main/java/org/apache/ignite/configuration/CacheConfiguration.java
index 807c817..aeb1b2f 100644
--- a/modules/core/src/main/java/org/apache/ignite/configuration/CacheConfiguration.java
+++ b/modules/core/src/main/java/org/apache/ignite/configuration/CacheConfiguration.java
@@ -200,9 +200,13 @@ public class CacheConfiguration<K, V> extends MutableConfiguration<K, V> {
/** Rebalance timeout. */
private long rebalanceTimeout = DFLT_REBALANCE_TIMEOUT;
- /** Cache expiration policy. */
+ /** Cache eviction policy. */
+ @Deprecated
private EvictionPolicy evictPlc;
+ /** Cache eviction policy factory. */
+ private Factory evictPlcFactory;
+
/** */
private boolean onheapCache;
@@ -395,6 +399,7 @@ public class CacheConfiguration<K, V> extends MutableConfiguration<K, V> {
eagerTtl = cc.isEagerTtl();
evictFilter = cc.getEvictionFilter();
evictPlc = cc.getEvictionPolicy();
+ evictPlcFactory = cc.getEvictionPolicyFactory();
expiryPolicyFactory = cc.getExpiryPolicyFactory();
grpName = cc.getGroupName();
indexedTypes = cc.getIndexedTypes();
@@ -553,7 +558,10 @@ public class CacheConfiguration<K, V> extends MutableConfiguration<K, V> {
* which means that evictions are disabled for cache.
*
* @return Cache eviction policy or {@code null} if evictions should be disabled.
+ *
+ * @deprecated Use {@link #getEvictionPolicyFactory()} instead.
*/
+ @Deprecated
@SuppressWarnings({"unchecked"})
@Nullable public EvictionPolicy<K, V> getEvictionPolicy() {
return evictPlc;
@@ -562,9 +570,12 @@ public class CacheConfiguration<K, V> extends MutableConfiguration<K, V> {
/**
* Sets cache eviction policy.
*
- * @param evictPlc Cache expiration policy.
+ * @param evictPlc Cache eviction policy.
* @return {@code this} for chaining.
+ *
+ * @deprecated Use {@link #setEvictionPolicyFactory(Factory)} instead.
*/
+ @Deprecated
public CacheConfiguration<K, V> setEvictionPolicy(@Nullable EvictionPolicy evictPlc) {
this.evictPlc = evictPlc;
@@ -572,6 +583,31 @@ public class CacheConfiguration<K, V> extends MutableConfiguration<K, V> {
}
/**
+ * Gets cache eviction policy factory. By default, returns {@code null}
+ * which means that evictions are disabled for cache.
+ *
+ * @return Cache eviction policy factory or {@code null} if evictions should be disabled
+ * or if {@link #getEvictionPolicy()} should be used instead.
+ */
+ @Nullable public Factory<EvictionPolicy<? super K, ? super V>> getEvictionPolicyFactory() {
+ return evictPlcFactory;
+ }
+
+ /**
+ * Sets cache eviction policy factory.
+ * Note: Eviction policy factory should be {@link Serializable}.
+ *
+ * @param evictPlcFactory Cache eviction policy factory.
+ * @return {@code this} for chaining.
+ */
+ public CacheConfiguration<K, V> setEvictionPolicyFactory(
+ @Nullable Factory<? extends EvictionPolicy<? super K, ? super V>> evictPlcFactory) {
+ this.evictPlcFactory = evictPlcFactory;
+
+ return this;
+ }
+
+ /**
* Checks if the on-heap cache is enabled for the off-heap based page memory.
*
* @return On-heap cache enabled flag.
@@ -664,7 +700,7 @@ public class CacheConfiguration<K, V> extends MutableConfiguration<K, V> {
* never be evicted.
* <p>
* If not provided, any entry may be evicted depending on
- * {@link #getEvictionPolicy() eviction policy} configuration.
+ * {@link #getEvictionPolicyFactory()} eviction policy} configuration.
*
* @return Eviction filter or {@code null}.
*/
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/configuration/NearCacheConfiguration.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/configuration/NearCacheConfiguration.java b/modules/core/src/main/java/org/apache/ignite/configuration/NearCacheConfiguration.java
index 7b3022c..ae6e98b 100644
--- a/modules/core/src/main/java/org/apache/ignite/configuration/NearCacheConfiguration.java
+++ b/modules/core/src/main/java/org/apache/ignite/configuration/NearCacheConfiguration.java
@@ -18,9 +18,11 @@
package org.apache.ignite.configuration;
import java.io.Serializable;
+import javax.cache.configuration.Factory;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.eviction.EvictionPolicy;
import org.apache.ignite.internal.util.typedef.internal.S;
+import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.configuration.CacheConfiguration.DFLT_NEAR_START_SIZE;
@@ -37,8 +39,12 @@ public class NearCacheConfiguration<K, V> implements Serializable {
private static final long serialVersionUID = 0L;
/** Near cache eviction policy. */
+ @Deprecated
private EvictionPolicy<K, V> nearEvictPlc;
+ /** Near cache eviction policy factory. */
+ private Factory nearEvictPlcFactory;
+
/** Default near cache start size. */
private int nearStartSize = DFLT_NEAR_START_SIZE;
@@ -55,6 +61,7 @@ public class NearCacheConfiguration<K, V> implements Serializable {
* @param ccfg Configuration to copy.
*/
public NearCacheConfiguration(NearCacheConfiguration<K, V> ccfg) {
+ nearEvictPlcFactory = ccfg.getNearEvictionPolicyFactory();
nearEvictPlc = ccfg.getNearEvictionPolicy();
nearStartSize = ccfg.getNearStartSize();
}
@@ -65,7 +72,10 @@ public class NearCacheConfiguration<K, V> implements Serializable {
*
* @return Near eviction policy.
* @see CacheConfiguration#getEvictionPolicy()
+ *
+ * @deprecated Use {@link #getNearEvictionPolicyFactory()} instead.
*/
+ @Deprecated
public EvictionPolicy<K, V> getNearEvictionPolicy() {
return nearEvictPlc;
}
@@ -75,7 +85,10 @@ public class NearCacheConfiguration<K, V> implements Serializable {
*
* @param nearEvictPlc Near eviction policy.
* @return {@code this} for chaining.
+ *
+ * @deprecated Use {@link #setNearEvictionPolicyFactory(Factory)} instead.
*/
+ @Deprecated
public NearCacheConfiguration<K, V> setNearEvictionPolicy(EvictionPolicy<K, V> nearEvictPlc) {
this.nearEvictPlc = nearEvictPlc;
@@ -83,6 +96,30 @@ public class NearCacheConfiguration<K, V> implements Serializable {
}
/**
+ * Gets cache eviction policy factory. By default, returns {@code null}
+ * which means that evictions are disabled for cache.
+ *
+ * @return Cache eviction policy or {@code null} if evictions should be disabled.
+ */
+ @Nullable public Factory<EvictionPolicy<? super K, ? super V>> getNearEvictionPolicyFactory() {
+ return nearEvictPlcFactory;
+ }
+
+ /**
+ * Sets cache eviction policy factory.
+ * Note: Eviction policy factory should be {@link Serializable}.
+ *
+ * @param nearEvictPlcFactory Cache expiration policy.
+ * @return {@code this} for chaining.
+ */
+ public NearCacheConfiguration<K, V> setNearEvictionPolicyFactory(
+ @Nullable Factory<? extends EvictionPolicy<? super K, ? super V>> nearEvictPlcFactory) {
+ this.nearEvictPlcFactory = nearEvictPlcFactory;
+
+ return this;
+ }
+
+ /**
* Gets initial cache size for near cache which will be used to pre-create internal
* hash table after start. Default value is defined by {@link CacheConfiguration#DFLT_NEAR_START_SIZE}.
*
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/ClusterCachesInfo.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/ClusterCachesInfo.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/ClusterCachesInfo.java
index 8382821..69f1a27 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/ClusterCachesInfo.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/ClusterCachesInfo.java
@@ -275,6 +275,9 @@ class ClusterCachesInfo {
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "evictionPolicy", "Eviction policy",
locAttr.evictionPolicyClassName(), rmtAttr.evictionPolicyClassName(), true);
+ CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "evictionPolicyFactory", "Eviction policy factory",
+ locAttr.evictionPolicyFactoryClassName(), rmtAttr.evictionPolicyFactoryClassName(), true);
+
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "transactionManagerLookup",
"Transaction manager lookup", locAttr.transactionManagerLookupClassName(),
rmtAttr.transactionManagerLookupClassName(), false);
@@ -333,6 +336,10 @@ class ClusterCachesInfo {
"Near eviction policy", locAttr.nearEvictionPolicyClassName(),
rmtAttr.nearEvictionPolicyClassName(), false);
+ CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "nearEvictionPolicyFactory",
+ "Near eviction policy factory", locAttr.nearEvictionPolicyFactoryClassName(),
+ rmtAttr.nearEvictionPolicyFactoryClassName(), false);
+
CU.checkAttributeMismatch(log, rmtAttr.cacheName(), rmt, "affinityIncludeNeighbors",
"Affinity include neighbors", locAttr.affinityIncludeNeighbors(),
rmtAttr.affinityIncludeNeighbors(), true);
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheAttributes.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheAttributes.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheAttributes.java
index d64ee8b..faad1ec 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheAttributes.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheAttributes.java
@@ -154,13 +154,25 @@ public class GridCacheAttributes implements Serializable {
/**
* @return Eviction policy class name.
+ *
+ * @deprecated Use evictionPolicyFactoryClassName() instead.
*/
+ @Deprecated
public String evictionPolicyClassName() {
return className(ccfg.getEvictionPolicy());
}
/**
+ * @return Eviction policy factory class name.
+ */
+ public String evictionPolicyFactoryClassName() {
+ return className(ccfg.getEvictionPolicyFactory());
+ }
+
+ /**
* @return Near eviction policy class name.
+ *
+ * @deprecated Use nearEvictionPolicyFactoryClassName() instead.
*/
public String nearEvictionPolicyClassName() {
NearCacheConfiguration nearCfg = ccfg.getNearConfiguration();
@@ -172,6 +184,13 @@ public class GridCacheAttributes implements Serializable {
}
/**
+ * @return Near eviction policy factory class name.
+ */
+ public String nearEvictionPolicyFactoryClassName() {
+ return className(ccfg.getEvictionPolicyFactory());
+ }
+
+ /**
* @return Store class name.
*/
public String storeFactoryClassName() {
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheEvictionManager.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheEvictionManager.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheEvictionManager.java
index 7735f74..084b235 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheEvictionManager.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheEvictionManager.java
@@ -60,7 +60,15 @@ public class GridCacheEvictionManager extends GridCacheManagerAdapter implements
@Override public void start0() throws IgniteCheckedException {
CacheConfiguration cfg = cctx.config();
- plc = cctx.isNear() ? cfg.getNearConfiguration().getNearEvictionPolicy() : cfg.getEvictionPolicy();
+ if (cctx.isNear()) {
+ plc = (cfg.getNearConfiguration().getNearEvictionPolicyFactory() != null) ?
+ (EvictionPolicy)cfg.getNearConfiguration().getNearEvictionPolicyFactory().create() :
+ cfg.getNearConfiguration().getNearEvictionPolicy();
+ }
+ else if (cfg.getEvictionPolicyFactory() != null)
+ plc = (EvictionPolicy)cfg.getEvictionPolicyFactory().create();
+ else
+ plc = cfg.getEvictionPolicy();
plcEnabled = plc != null;
@@ -298,4 +306,9 @@ public class GridCacheEvictionManager extends GridCacheManagerAdapter implements
X.println(">>> Eviction manager memory stats [igniteInstanceName=" + cctx.igniteInstanceName() +
", cache=" + cctx.name() + ']');
}
+
+ /** For test purposes. */
+ public EvictionPolicy getEvictionPolicy() {
+ return plc;
+ }
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java
index dc599c7..59d170c 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheProcessor.java
@@ -255,7 +255,7 @@ public class GridCacheProcessor extends GridProcessorAdapter {
String msg = "Disable eviction policy (remove from configuration)";
- if (cfg.getEvictionPolicy() != null)
+ if (cfg.getEvictionPolicyFactory() != null || cfg.getEvictionPolicy() != null)
perf.add(msg, false);
else
perf.add(msg, true);
@@ -470,7 +470,7 @@ public class GridCacheProcessor extends GridProcessorAdapter {
assertParameter(cc.getTransactionManagerLookupClassName() == null,
"transaction manager can not be used with ATOMIC cache");
- if (cc.getEvictionPolicy() != null && !cc.isOnheapCacheEnabled())
+ if ((cc.getEvictionPolicyFactory() != null || cc.getEvictionPolicy() != null)&& !cc.isOnheapCacheEnabled())
throw new IgniteCheckedException("Onheap cache must be enabled if eviction policy is configured [cacheName="
+ U.maskName(cc.getName()) + "]");
@@ -511,6 +511,7 @@ public class GridCacheProcessor extends GridProcessorAdapter {
* @throws IgniteCheckedException If failed to inject.
*/
private void prepare(CacheConfiguration cfg, Collection<Object> objs) throws IgniteCheckedException {
+ prepare(cfg, cfg.getEvictionPolicyFactory(), false);
prepare(cfg, cfg.getEvictionPolicy(), false);
prepare(cfg, cfg.getAffinity(), false);
prepare(cfg, cfg.getAffinityMapper(), false);
@@ -519,8 +520,10 @@ public class GridCacheProcessor extends GridProcessorAdapter {
NearCacheConfiguration nearCfg = cfg.getNearConfiguration();
- if (nearCfg != null)
+ if (nearCfg != null) {
+ prepare(cfg, nearCfg.getNearEvictionPolicyFactory(), true);
prepare(cfg, nearCfg.getNearEvictionPolicy(), true);
+ }
for (Object obj : objs)
prepare(cfg, obj, false);
@@ -548,6 +551,7 @@ public class GridCacheProcessor extends GridProcessorAdapter {
private void cleanup(GridCacheContext cctx) {
CacheConfiguration cfg = cctx.config();
+ cleanup(cfg, cfg.getEvictionPolicyFactory(), false);
cleanup(cfg, cfg.getEvictionPolicy(), false);
cleanup(cfg, cfg.getAffinity(), false);
cleanup(cfg, cfg.getAffinityMapper(), false);
@@ -562,8 +566,10 @@ public class GridCacheProcessor extends GridProcessorAdapter {
NearCacheConfiguration nearCfg = cfg.getNearConfiguration();
- if (nearCfg != null)
+ if (nearCfg != null) {
+ cleanup(cfg, nearCfg.getNearEvictionPolicyFactory(), true);
cleanup(cfg, nearCfg.getNearEvictionPolicy(), true);
+ }
cctx.cleanup();
}
@@ -3721,13 +3727,16 @@ public class GridCacheProcessor extends GridProcessorAdapter {
ret.add(ccfg.getAffinityMapper());
ret.add(ccfg.getEvictionFilter());
+ ret.add(ccfg.getEvictionPolicyFactory());
ret.add(ccfg.getEvictionPolicy());
ret.add(ccfg.getInterceptor());
NearCacheConfiguration nearCfg = ccfg.getNearConfiguration();
- if (nearCfg != null)
+ if (nearCfg != null) {
+ ret.add(nearCfg.getNearEvictionPolicyFactory());
ret.add(nearCfg.getNearEvictionPolicy());
+ }
Collections.addAll(ret, objs);
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheUtils.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheUtils.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheUtils.java
index 53fb4d3..248f2aa 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheUtils.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheUtils.java
@@ -1046,6 +1046,7 @@ public class GridCacheUtils {
cache.setAtomicityMode(TRANSACTIONAL);
cache.setWriteSynchronizationMode(FULL_SYNC);
+ cache.setEvictionPolicyFactory(null);
cache.setEvictionPolicy(null);
cache.setCacheStoreFactory(null);
cache.setNodeFilter(CacheConfiguration.ALL_NODES);
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsHelperImpl.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsHelperImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsHelperImpl.java
index 29e75a5..f20b787 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsHelperImpl.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsHelperImpl.java
@@ -31,7 +31,9 @@ import org.apache.ignite.internal.util.typedef.internal.U;
public class IgfsHelperImpl implements IgfsHelper {
/** {@inheritDoc} */
@Override public void preProcessCacheConfiguration(CacheConfiguration cfg) {
- EvictionPolicy evictPlc = cfg.getEvictionPolicy();
+ EvictionPolicy evictPlc = cfg.getEvictionPolicyFactory() != null ?
+ (EvictionPolicy)cfg.getEvictionPolicyFactory().create()
+ : cfg.getEvictionPolicy();
if (evictPlc instanceof IgfsPerBlockLruEvictionPolicy && cfg.getEvictionFilter() == null)
cfg.setEvictionFilter(new IgfsEvictionFilter());
@@ -39,7 +41,9 @@ public class IgfsHelperImpl implements IgfsHelper {
/** {@inheritDoc} */
@Override public void validateCacheConfiguration(CacheConfiguration cfg) throws IgniteCheckedException {
- EvictionPolicy evictPlc = cfg.getEvictionPolicy();
+ EvictionPolicy evictPlc = cfg.getEvictionPolicyFactory() != null ?
+ (EvictionPolicy)cfg.getEvictionPolicyFactory().create()
+ : cfg.getEvictionPolicy();
if (evictPlc != null && evictPlc instanceof IgfsPerBlockLruEvictionPolicy) {
EvictionFilter evictFilter = cfg.getEvictionFilter();
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java
index 5808e7c..5a9e10a 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java
@@ -237,7 +237,9 @@ public final class IgfsImpl implements IgfsEx {
for (CacheConfiguration cacheCfg : igfsCtx.kernalContext().config().getCacheConfiguration()) {
if (F.eq(dataCacheName, cacheCfg.getName())) {
- EvictionPolicy evictPlc = cacheCfg.getEvictionPolicy();
+ EvictionPolicy evictPlc = cacheCfg.getEvictionPolicyFactory() != null ?
+ (EvictionPolicy)cacheCfg.getEvictionPolicyFactory().create()
+ : cacheCfg.getEvictionPolicy();
if (evictPlc != null & evictPlc instanceof IgfsPerBlockLruEvictionPolicy)
this.evictPlc = (IgfsPerBlockLruEvictionPolicy)evictPlc;
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/main/java/org/apache/ignite/internal/visor/cache/VisorCacheNearConfiguration.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/visor/cache/VisorCacheNearConfiguration.java b/modules/core/src/main/java/org/apache/ignite/internal/visor/cache/VisorCacheNearConfiguration.java
index ae55f83..a4cd3d9 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/visor/cache/VisorCacheNearConfiguration.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/visor/cache/VisorCacheNearConfiguration.java
@@ -20,6 +20,7 @@ package org.apache.ignite.internal.visor.cache;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
+import org.apache.ignite.cache.eviction.EvictionPolicy;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.internal.processors.cache.GridCacheUtils;
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheConfigurationConsistencySelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheConfigurationConsistencySelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheConfigurationConsistencySelfTest.java
index 2865627..3f4efc2 100644
--- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheConfigurationConsistencySelfTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheConfigurationConsistencySelfTest.java
@@ -29,8 +29,10 @@ import org.apache.ignite.cache.affinity.AffinityFunction;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.cache.eviction.EvictionFilter;
import org.apache.ignite.cache.eviction.fifo.FifoEvictionPolicy;
+import org.apache.ignite.cache.eviction.fifo.FifoEvictionPolicyFactory;
import org.apache.ignite.cache.eviction.lru.LruEvictionPolicy;
import org.apache.ignite.cache.eviction.sorted.SortedEvictionPolicy;
+import org.apache.ignite.cache.eviction.sorted.SortedEvictionPolicyFactory;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.DeploymentMode;
@@ -377,6 +379,28 @@ public class GridCacheConfigurationConsistencySelfTest extends GridCommonAbstrac
/**
* @throws Exception If failed.
*/
+ public void testDifferentEvictionPolicyEnabled() throws Exception {
+ checkSecondGridStartFails(
+ new C1<CacheConfiguration, Void>() {
+ /** {@inheritDoc} */
+ @Override public Void apply(CacheConfiguration cfg) {
+ cfg.setEvictionPolicyFactory(new FifoEvictionPolicyFactory<>());
+ cfg.setOnheapCacheEnabled(true);
+ return null;
+ }
+ },
+ new C1<CacheConfiguration, Void>() {
+ /** {@inheritDoc} */
+ @Override public Void apply(CacheConfiguration cfg) {
+ return null;
+ }
+ }
+ );
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
public void testDifferentEvictionPolicies() throws Exception {
checkSecondGridStartFails(
new C1<CacheConfiguration, Void>() {
@@ -401,6 +425,30 @@ public class GridCacheConfigurationConsistencySelfTest extends GridCommonAbstrac
/**
* @throws Exception If failed.
*/
+ public void testDifferentEvictionPolicyFactories() throws Exception {
+ checkSecondGridStartFails(
+ new C1<CacheConfiguration, Void>() {
+ /** {@inheritDoc} */
+ @Override public Void apply(CacheConfiguration cfg) {
+ cfg.setEvictionPolicyFactory(new SortedEvictionPolicyFactory());
+ cfg.setOnheapCacheEnabled(true);
+ return null;
+ }
+ },
+ new C1<CacheConfiguration, Void>() {
+ /** {@inheritDoc} */
+ @Override public Void apply(CacheConfiguration cfg) {
+ cfg.setEvictionPolicyFactory(new FifoEvictionPolicyFactory<>());
+ cfg.setOnheapCacheEnabled(true);
+ return null;
+ }
+ }
+ );
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
public void testDifferentEvictionFilters() throws Exception {
checkSecondGridStartFails(
new C1<CacheConfiguration, Void>() {
@@ -585,6 +633,7 @@ public class GridCacheConfigurationConsistencySelfTest extends GridCommonAbstrac
@Override public Void apply(CacheConfiguration cfg) {
NearCacheConfiguration nearCfg = new NearCacheConfiguration();
+ nearCfg.setNearEvictionPolicyFactory(new FifoEvictionPolicyFactory<>());
nearCfg.setNearEvictionPolicy(new LruEvictionPolicy());
cfg.setNearConfiguration(nearCfg);
@@ -599,6 +648,7 @@ public class GridCacheConfigurationConsistencySelfTest extends GridCommonAbstrac
@Override public Void apply(CacheConfiguration cfg) {
NearCacheConfiguration nearCfg = new NearCacheConfiguration();
+ nearCfg.setNearEvictionPolicyFactory(new FifoEvictionPolicyFactory<>());
nearCfg.setNearEvictionPolicy(new FifoEvictionPolicy());
cfg.setNearConfiguration(nearCfg);
@@ -624,6 +674,7 @@ public class GridCacheConfigurationConsistencySelfTest extends GridCommonAbstrac
@Override public Void apply(CacheConfiguration cfg) {
cfg.setAffinity(new TestRendezvousAffinityFunction());
+ cfg.setEvictionPolicyFactory(new FifoEvictionPolicyFactory<>());
cfg.setEvictionPolicy(new FifoEvictionPolicy());
cfg.setOnheapCacheEnabled(true);
@@ -643,6 +694,7 @@ public class GridCacheConfigurationConsistencySelfTest extends GridCommonAbstrac
@Override public Void apply(CacheConfiguration cfg) {
cfg.setAffinity(new RendezvousAffinityFunction());
+ cfg.setEvictionPolicyFactory(new FifoEvictionPolicyFactory<>());
cfg.setEvictionPolicy(new LruEvictionPolicy());
cfg.setOnheapCacheEnabled(true);
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheNearEvictionEventSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheNearEvictionEventSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheNearEvictionEventSelfTest.java
index 7088ad7..0d36a5a 100644
--- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheNearEvictionEventSelfTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCacheNearEvictionEventSelfTest.java
@@ -36,9 +36,4 @@ public class GridCacheNearEvictionEventSelfTest extends GridCacheEvictionEventAb
@Override protected CacheAtomicityMode atomicityMode() {
return TRANSACTIONAL;
}
-
- /** {@inheritDoc} */
- @Override public void testEvictionEvent() throws Exception {
- super.testEvictionEvent();
- }
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/EvictionPolicyFactoryAbstractTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/EvictionPolicyFactoryAbstractTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/EvictionPolicyFactoryAbstractTest.java
new file mode 100644
index 0000000..0aa2d7f
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/EvictionPolicyFactoryAbstractTest.java
@@ -0,0 +1,1073 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.cache.eviction;
+
+import java.lang.reflect.InvocationTargetException;
+import java.util.Collection;
+import java.util.List;
+import java.util.Random;
+import java.util.concurrent.Callable;
+import java.util.concurrent.atomic.AtomicInteger;
+import javax.cache.Cache;
+import javax.cache.configuration.Factory;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.cache.CacheMode;
+import org.apache.ignite.cache.eviction.EvictableEntry;
+import org.apache.ignite.cache.eviction.EvictionFilter;
+import org.apache.ignite.cache.eviction.EvictionPolicy;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.configuration.IgniteConfiguration;
+import org.apache.ignite.configuration.NearCacheConfiguration;
+import org.apache.ignite.internal.IgniteEx;
+import org.apache.ignite.internal.processors.cache.CacheEvictionManager;
+import org.apache.ignite.internal.processors.cache.GridCacheEvictionManager;
+import org.apache.ignite.internal.processors.cache.distributed.dht.colocated.GridDhtColocatedCache;
+import org.apache.ignite.internal.util.typedef.C2;
+import org.apache.ignite.internal.util.typedef.F;
+import org.apache.ignite.internal.util.typedef.internal.S;
+import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.apache.ignite.transactions.Transaction;
+import org.jetbrains.annotations.Nullable;
+
+import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
+import static org.apache.ignite.cache.CacheMode.LOCAL;
+import static org.apache.ignite.cache.CacheMode.PARTITIONED;
+import static org.apache.ignite.cache.CacheMode.REPLICATED;
+import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_ASYNC;
+import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC;
+import static org.apache.ignite.events.EventType.EVT_JOB_MAPPED;
+import static org.apache.ignite.events.EventType.EVT_TASK_FAILED;
+import static org.apache.ignite.events.EventType.EVT_TASK_FINISHED;
+import static org.apache.ignite.internal.processors.cache.eviction.EvictionPolicyFactoryAbstractTest.EvictionPolicyProxy.proxy;
+import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC;
+import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ;
+
+/**
+ * Base class for eviction tests.
+ */
+public abstract class EvictionPolicyFactoryAbstractTest<T extends EvictionPolicy<?, ?>>
+ extends GridCommonAbstractTest {
+ /** IP finder. */
+ protected static final TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
+
+ /** Put entry size. */
+ protected static final int PUT_ENTRY_SIZE = 10;
+
+ /** Replicated cache. */
+ protected CacheMode mode = REPLICATED;
+
+ /** Near enabled flag. */
+ protected boolean nearEnabled;
+
+ /** Policy max. */
+ protected int plcMax = 10;
+
+ /** Policy batch size. */
+ protected int plcBatchSize = 1;
+
+ /** Policy max memory size. */
+ protected long plcMaxMemSize = 0;
+
+ protected Factory<T> policyFactory;
+
+ /** Near policy max. */
+ protected int nearMax = 3;
+
+ /** Synchronous commit. */
+ protected boolean syncCommit;
+
+ /** */
+ protected int gridCnt = 2;
+
+ /** */
+ protected EvictionFilter<?, ?> filter;
+
+ /** {@inheritDoc} */
+ @Override protected void afterTest() throws Exception {
+ super.afterTest();
+
+ policyFactory = null;
+ }
+
+ /** {@inheritDoc} */
+ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
+ assert policyFactory != null;
+
+ IgniteConfiguration c = super.getConfiguration(igniteInstanceName);
+
+ CacheConfiguration cc = defaultCacheConfiguration();
+
+ cc.setCacheMode(mode);
+ cc.setOnheapCacheEnabled(true);
+ cc.setEvictionPolicyFactory(policyFactory);
+ cc.setWriteSynchronizationMode(syncCommit ? FULL_SYNC : FULL_ASYNC);
+ cc.setAtomicityMode(TRANSACTIONAL);
+
+ if (nearEnabled) {
+ NearCacheConfiguration nearCfg = new NearCacheConfiguration();
+
+ nearCfg.setNearEvictionPolicyFactory(createNearPolicyFactory(nearMax));
+
+ cc.setNearConfiguration(nearCfg);
+ }
+ else
+ cc.setNearConfiguration(null);
+
+ if (mode == PARTITIONED)
+ cc.setBackups(1);
+
+ if (filter != null)
+ cc.setEvictionFilter(filter);
+
+ c.setCacheConfiguration(cc);
+
+ TcpDiscoverySpi disco = new TcpDiscoverySpi();
+
+ disco.setIpFinder(ipFinder);
+
+ c.setDiscoverySpi(disco);
+
+ c.setIncludeEventTypes(EVT_TASK_FAILED, EVT_TASK_FINISHED, EVT_JOB_MAPPED);
+
+ c.setIncludeProperties();
+
+ return c;
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void afterTestsStopped() throws Exception {
+ filter = null;
+
+ super.afterTestsStopped();
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxSizePolicy() throws Exception {
+ plcMax = 3;
+ plcMaxMemSize = 0;
+ plcBatchSize = 1;
+
+ doTestPolicy();
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxSizePolicyWithBatch() throws Exception {
+ plcMax = 3;
+ plcMaxMemSize = 0;
+ plcBatchSize = 2;
+
+ doTestPolicyWithBatch();
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxMemSizePolicy() throws Exception {
+ plcMax = 0;
+ plcMaxMemSize = 3 * MockEntry.ENTRY_SIZE;
+ plcBatchSize = 1;
+
+ doTestPolicy();
+ }
+
+ /**
+ * Batch ignored when {@code maxSize > 0} and {@code maxMemSize > 0}.
+ *
+ * @throws Exception If failed.
+ */
+ public void testMaxMemSizePolicyWithBatch() throws Exception {
+ plcMax = 3;
+ plcMaxMemSize = 10 * MockEntry.ENTRY_SIZE;
+ plcBatchSize = 2;
+
+ doTestPolicy();
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxSizeMemory() throws Exception {
+ int max = 10;
+
+ plcMax = max;
+ plcMaxMemSize = 0;
+ plcBatchSize = 1;
+
+ doTestMemory(max);
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxSizeMemoryWithBatch() throws Exception {
+ int max = 10;
+
+ plcMax = max;
+ plcMaxMemSize = 0;
+ plcBatchSize = 2;
+
+ doTestMemory(max);
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxMemSizeMemory() throws Exception {
+ int max = 10;
+
+ plcMax = 0;
+ plcMaxMemSize = max * MockEntry.ENTRY_SIZE;
+ plcBatchSize = 1;
+
+ doTestMemory(max);
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxSizeRandom() throws Exception {
+ plcMax = 10;
+ plcMaxMemSize = 0;
+ plcBatchSize = 1;
+
+ doTestRandom();
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxSizeRandomWithBatch() throws Exception {
+ plcMax = 10;
+ plcMaxMemSize = 0;
+ plcBatchSize = 2;
+
+ doTestRandom();
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxMemSizeRandom() throws Exception {
+ plcMax = 0;
+ plcMaxMemSize = 10 * MockEntry.KEY_SIZE;
+ plcBatchSize = 1;
+
+ doTestRandom();
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxSizeAllowEmptyEntries() throws Exception {
+ plcMax = 10;
+ plcMaxMemSize = 0;
+ plcBatchSize = 1;
+
+ doTestAllowEmptyEntries();
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxSizeAllowEmptyEntriesWithBatch() throws Exception {
+ plcMax = 10;
+ plcMaxMemSize = 0;
+ plcBatchSize = 2;
+
+ doTestAllowEmptyEntries();
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxMemSizeAllowEmptyEntries() throws Exception {
+ plcMax = 0;
+ plcMaxMemSize = 10 * MockEntry.KEY_SIZE;
+ plcBatchSize = 1;
+
+ doTestAllowEmptyEntries();
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxSizePut() throws Exception {
+ plcMax = 100;
+ plcBatchSize = 1;
+ plcMaxMemSize = 0;
+
+ doTestPut(plcMax);
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxSizePutWithBatch() throws Exception {
+ plcMax = 100;
+ plcBatchSize = 2;
+ plcMaxMemSize = 0;
+
+ doTestPut(plcMax);
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testMaxMemSizePut() throws Exception {
+ int max = 100;
+
+ plcMax = 0;
+ plcBatchSize = 2;
+ plcMaxMemSize = max * PUT_ENTRY_SIZE;
+
+ doTestPut(max);
+ }
+
+ /**
+ * Tests policy behaviour.
+ *
+ * @throws Exception If failed.
+ */
+ protected abstract void doTestPolicy() throws Exception;
+
+ /**
+ * Tests policy behaviour with batch enabled.
+ *
+ * @throws Exception If failed.
+ */
+ protected abstract void doTestPolicyWithBatch() throws Exception;
+
+ /**
+ * @throws Exception If failed.
+ */
+ protected void doTestAllowEmptyEntries() throws Exception {
+ policyFactory = createPolicyFactory();
+
+ try {
+ startGrid();
+
+ MockEntry e1 = new MockEntry("1");
+ MockEntry e2 = new MockEntry("2");
+ MockEntry e3 = new MockEntry("3");
+ MockEntry e4 = new MockEntry("4");
+ MockEntry e5 = new MockEntry("5");
+
+ EvictionPolicyProxy p = proxy(policy());
+
+ p.onEntryAccessed(false, e1);
+
+ assertFalse(e1.isEvicted());
+
+ check(p.queue().size(), MockEntry.KEY_SIZE);
+
+ p.onEntryAccessed(false, e2);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e2.isEvicted());
+
+ check(p.queue().size(), MockEntry.KEY_SIZE);
+
+ p.onEntryAccessed(false, e3);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e3.isEvicted());
+
+ check(p.queue().size(), MockEntry.KEY_SIZE);
+
+ p.onEntryAccessed(false, e4);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+
+ check(p.queue().size(), MockEntry.KEY_SIZE);
+
+ p.onEntryAccessed(false, e5);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ check(p.queue().size(), MockEntry.KEY_SIZE);
+ }
+ finally {
+ stopAllGrids();
+ }
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ protected void doTestMemory(int max) throws Exception {
+ policyFactory = createPolicyFactory();
+
+ try {
+ startGrid();
+
+ EvictionPolicyProxy p = proxy(policy());
+
+ int cnt = max + plcBatchSize;
+
+ for (int i = 0; i < cnt; i++)
+ p.onEntryAccessed(false, new MockEntry(Integer.toString(i), Integer.toString(i)));
+
+ info(p);
+
+ check(max, MockEntry.ENTRY_SIZE);
+ }
+ finally {
+ stopAllGrids();
+ }
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ protected void doTestRandom() throws Exception {
+ policyFactory = createPolicyFactory();
+
+ try {
+ startGrid();
+
+ EvictionPolicyProxy p = proxy(policy());
+
+ int max = 10;
+
+ Random rand = new Random();
+
+ int keys = 31;
+
+ MockEntry[] entries = new MockEntry[keys];
+
+ for (int i = 0; i < entries.length; i++)
+ entries[i] = new MockEntry(Integer.toString(i));
+
+ int runs = 5000000;
+
+ for (int i = 0; i < runs; i++) {
+ boolean rmv = rand.nextBoolean();
+
+ int j = rand.nextInt(entries.length);
+
+ MockEntry e = entry(entries, j);
+
+ if (rmv)
+ entries[j] = new MockEntry(Integer.toString(j));
+
+ p.onEntryAccessed(rmv, e);
+ }
+
+ info(p);
+
+ assertTrue(p.getCurrentSize() <= (plcMaxMemSize > 0 ? max : max + plcBatchSize));
+ assertTrue(p.getCurrentMemorySize() <= (plcMaxMemSize > 0 ? max : max + plcBatchSize) * MockEntry.KEY_SIZE);
+ }
+ finally {
+ stopAllGrids();
+ }
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ protected void doTestPut(int max) throws Exception {
+ mode = LOCAL;
+ syncCommit = true;
+
+ policyFactory = createPolicyFactory();
+
+ try {
+ Ignite ignite = startGrid();
+
+ IgniteCache<Object, Object> cache = ignite.cache(DEFAULT_CACHE_NAME);
+
+ int cnt = 500;
+
+ int min = Integer.MAX_VALUE;
+
+ int minIdx = 0;
+
+ for (int i = 0; i < cnt; i++) {
+ cache.put(i, i);
+
+ int cacheSize = cache.size();
+
+ if (i > max && cacheSize < min) {
+ min = cacheSize;
+ minIdx = i;
+ }
+ }
+
+ assertTrue("Min cache size is too small: " + min, min >= max);
+
+ check(max, PUT_ENTRY_SIZE);
+
+ info("Min cache size [min=" + min + ", idx=" + minIdx + ']');
+ info("Current cache size " + cache.size());
+ info("Current cache key size " + cache.size());
+
+ min = Integer.MAX_VALUE;
+
+ minIdx = 0;
+
+ // Touch.
+ for (int i = cnt; --i > cnt - max;) {
+ cache.get(i);
+
+ int cacheSize = cache.size();
+
+ if (cacheSize < min) {
+ min = cacheSize;
+ minIdx = i;
+ }
+ }
+
+ info("----");
+ info("Min cache size [min=" + min + ", idx=" + minIdx + ']');
+ info("Current cache size " + cache.size());
+ info("Current cache key size " + cache.size());
+
+ check(max, PUT_ENTRY_SIZE);
+ }
+ finally {
+ stopAllGrids();
+ }
+ }
+
+ /**
+ * @param arr Array.
+ * @param idx Index.
+ * @return Entry at the index.
+ */
+ protected MockEntry entry(MockEntry[] arr, int idx) {
+ MockEntry e = arr[idx];
+
+ if (e.isEvicted())
+ e = arr[idx] = new MockEntry(e.getKey());
+
+ return e;
+ }
+
+ /**
+ * @param prefix Prefix.
+ * @param p Policy.
+ */
+ protected void info(String prefix, EvictionPolicy<?, ?> p) {
+ info(prefix + ": " + p.toString());
+ }
+
+ /** @param p Policy. */
+ protected void info(EvictionPolicy<?, ?> p) {
+ info(p.toString());
+ }
+
+ /**
+ * @param c1 Policy collection.
+ * @param c2 Expected list.
+ */
+ protected static void check(Collection<EvictableEntry<String, String>> c1, MockEntry... c2) {
+ check(c1, F.asList(c2));
+ }
+
+ /**
+ * @param expSize Expected size.
+ * @param entrySize Entry size.
+ */
+ protected void check(int expSize, int entrySize) {
+ EvictionPolicyProxy proxy = proxy(policy());
+
+ assertEquals(expSize, proxy.getCurrentSize());
+ assertEquals(expSize * entrySize, proxy.getCurrentMemorySize());
+ }
+
+ /**
+ * @param entrySize Entry size.
+ * @param c1 Closure 1.
+ * @param c2 Closure 2.
+ */
+ protected void check(int entrySize, Collection<EvictableEntry<String, String>> c1, MockEntry... c2) {
+ check(c2.length, entrySize);
+
+ check(c1, c2);
+ }
+
+ /** @return Policy. */
+ protected T policy() {
+ CacheEvictionManager evictMgr = grid().cachex(DEFAULT_CACHE_NAME).context().evicts();
+
+ assert evictMgr instanceof GridCacheEvictionManager : evictMgr;
+
+ return (T)((GridCacheEvictionManager)evictMgr).getEvictionPolicy();
+ }
+
+ /**
+ * @param i Grid index.
+ * @return Policy.
+ */
+ @SuppressWarnings({"unchecked"})
+ protected T policy(int i) {
+ CacheEvictionManager evictMgr = grid(i).cachex(DEFAULT_CACHE_NAME).context().evicts();
+
+ assert evictMgr instanceof GridCacheEvictionManager : evictMgr;
+
+ return (T)((GridCacheEvictionManager)evictMgr).getEvictionPolicy();
+ }
+
+ /**
+ * @param i Grid index.
+ * @return Policy.
+ */
+ @SuppressWarnings({"unchecked"})
+ protected T nearPolicy(int i) {
+ CacheEvictionManager evictMgr = grid(i).cachex(DEFAULT_CACHE_NAME).context().near().context().evicts();
+
+ assert evictMgr instanceof GridCacheEvictionManager : evictMgr;
+
+ return (T)((GridCacheEvictionManager)evictMgr).getEvictionPolicy();
+ }
+ /**
+ * @param c1 Policy collection.
+ * @param c2 Expected list.
+ */
+ protected static void check(Collection<EvictableEntry<String, String>> c1, List<MockEntry> c2) {
+ assert c1.size() == c2.size() : "Mismatch [actual=" + string(c1) + ", expected=" + string(c2) + ']';
+
+ assert c1.containsAll(c2) : "Mismatch [actual=" + string(c1) + ", expected=" + string(c2) + ']';
+
+ int i = 0;
+
+ // Check order.
+ for (Cache.Entry<String, String> e : c1)
+ assertEquals(e, c2.get(i++));
+ }
+
+ /**
+ * @param c Collection.
+ * @return String.
+ */
+ @SuppressWarnings("unchecked")
+ protected static String string(Iterable<? extends Cache.Entry> c) {
+ return "[" +
+ F.fold(
+ c,
+ "",
+ new C2<Cache.Entry, String, String>() {
+ @Override public String apply(Cache.Entry e, String b) {
+ return b.isEmpty() ? e.getKey().toString() : b + ", " + e.getKey();
+ }
+ }) +
+ "]]";
+ }
+
+ /** @throws Exception If failed. */
+ public void testMaxSizePartitionedNearDisabled() throws Exception {
+ mode = PARTITIONED;
+ nearEnabled = false;
+ plcMax = 10;
+ syncCommit = true;
+
+ gridCnt = 2;
+
+ checkPartitioned();
+ }
+
+ /** @throws Exception If failed. */
+ public void testMaxSizePartitionedNearDisabledWithBatch() throws Exception {
+ mode = PARTITIONED;
+ nearEnabled = false;
+ plcMax = 10;
+ plcBatchSize = 2;
+ syncCommit = true;
+
+ gridCnt = 2;
+
+ checkPartitioned();
+ }
+
+ /** @throws Exception If failed. */
+ public void testMaxMemSizePartitionedNearDisabled() throws Exception {
+ mode = PARTITIONED;
+ nearEnabled = false;
+ plcMax = 0;
+ plcMaxMemSize = 100;
+ syncCommit = true;
+
+ gridCnt = 2;
+
+ checkPartitioned();
+ }
+
+ /** @throws Exception If failed. */
+ public void testPartitionedNearEnabled() throws Exception {
+ mode = PARTITIONED;
+ nearEnabled = true;
+ nearMax = 3;
+ plcMax = 10;
+ syncCommit = true;
+
+ gridCnt = 2;
+
+ checkPartitioned(); // Near size is 0 because of backups present.
+ }
+
+ /** @throws Exception If failed. */
+ public void testPartitionedNearDisabledMultiThreaded() throws Exception {
+ mode = PARTITIONED;
+ nearEnabled = false;
+ plcMax = 100;
+
+ gridCnt = 2;
+
+ checkPartitionedMultiThreaded();
+ }
+
+ /** @throws Exception If failed. */
+ public void testPartitionedNearEnabledMultiThreaded() throws Exception {
+ mode = PARTITIONED;
+ nearEnabled = true;
+ plcMax = 10;
+
+ gridCnt = 2;
+
+ checkPartitionedMultiThreaded();
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ protected void checkPartitioned() throws Exception {
+ int endSize = nearEnabled ? 0 : plcMax;
+
+ int endPlcSize = nearEnabled ? 0 : plcMax;
+
+ policyFactory = createPolicyFactory();
+
+ startGridsMultiThreaded(gridCnt);
+
+ try {
+ Random rand = new Random();
+
+ int cnt = 500;
+
+ for (int i = 0; i < cnt; i++) {
+ IgniteCache<Integer, String> cache = grid(rand.nextInt(2)).cache(DEFAULT_CACHE_NAME);
+
+ int key = rand.nextInt(100);
+ String val = Integer.toString(key);
+
+ cache.put(key, val);
+
+ if (i % 100 == 0)
+ info("Stored cache object for key [key=" + key + ", idx=" + i + ']');
+ }
+
+ if (nearEnabled) {
+ for (int i = 0; i < gridCnt; i++)
+ assertEquals(endSize, near(i).nearSize());
+
+ if (endPlcSize >= 0)
+ checkNearPolicies(endPlcSize);
+ }
+ else {
+ if (plcMaxMemSize > 0) {
+ for (int i = 0; i < gridCnt; i++) {
+ GridDhtColocatedCache<Object, Object> cache = colocated(i);
+
+ int memSize = 0;
+
+ for (Cache.Entry<Object, Object> entry : cache.entrySet())
+ memSize += entry.unwrap(EvictableEntry.class).size();
+
+ EvictionPolicyProxy plc = proxy(policy(i));
+
+ assertTrue(plc.getCurrentMemorySize() <= memSize);
+ }
+ }
+
+ if (plcMax > 0) {
+ for (int i = 0; i < gridCnt; i++) {
+ int actual = colocated(i).map().internalSize();
+
+ assertTrue("Cache size is greater then policy size [expected=" + endSize + ", actual=" + actual + ']',
+ actual <= endSize + (plcMaxMemSize > 0 ? 1 : plcBatchSize));
+ }
+ }
+
+ checkPolicies();
+ }
+ }
+ finally {
+ stopAllGrids();
+ }
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ protected void checkPartitionedMultiThreaded() throws Exception {
+ policyFactory = createPolicyFactory();
+
+ try {
+ startGridsMultiThreaded(gridCnt);
+
+ final Random rand = new Random();
+
+ final AtomicInteger cntr = new AtomicInteger();
+
+ multithreaded(new Callable() {
+ @Nullable @Override public Object call() throws Exception {
+ int cnt = 100;
+
+ for (int i = 0; i < cnt && !Thread.currentThread().isInterrupted(); i++) {
+ IgniteEx grid = grid(rand.nextInt(2));
+
+ IgniteCache<Integer, String> cache = grid.cache(DEFAULT_CACHE_NAME);
+
+ int key = rand.nextInt(1000);
+ String val = Integer.toString(key);
+
+ try (Transaction tx = grid.transactions().txStart(PESSIMISTIC, REPEATABLE_READ)) {
+ String v = cache.get(key);
+
+ assert v == null || v.equals(Integer.toString(key)) : "Invalid value for key [key=" + key +
+ ", val=" + v + ']';
+
+ cache.put(key, val);
+
+ tx.commit();
+ }
+
+ if (cntr.incrementAndGet() % 100 == 0)
+ info("Stored cache object for key [key=" + key + ", idx=" + i + ']');
+ }
+
+ return null;
+ }
+ }, 10);
+ }
+ finally {
+ stopAllGrids();
+ }
+ }
+
+ /**
+ * @return Policy.
+ *
+ * @deprecated replace with getPolicyFactory();
+ */
+ @Deprecated
+ protected T createPolicy() {
+ return null;
+ };
+
+ /**
+ * @return Policy.
+ */
+ protected abstract Factory<T> createPolicyFactory();
+
+ /**
+ * @param nearMax Near max.
+ * @return Policy.
+ */
+ protected abstract Factory<T> createNearPolicyFactory(int nearMax);
+
+ /**
+ * Performs after-test near policy check.
+ *
+ * @param nearMax Near max.
+ */
+ protected void checkNearPolicies(int nearMax) {
+ for (int i = 0; i < gridCnt; i++) {
+
+ EvictionPolicyProxy proxy = proxy(nearPolicy(i));
+
+ for (EvictableEntry e : proxy.queue())
+ assert !e.isCached() : "Invalid near policy size: " + proxy.queue();
+ }
+ }
+
+ /**
+ * Performs after-test policy check.
+ */
+ protected void checkPolicies() {
+ for (int i = 0; i < gridCnt; i++) {
+ if (plcMaxMemSize > 0) {
+ int size = 0;
+
+ for (EvictableEntry entry : proxy(policy(i)).queue())
+ size += entry.size();
+
+ assertEquals(size, proxy(policy(i)).getCurrentMemorySize());
+ }
+ else
+ assertTrue(proxy(policy(i)).queue().size() <= plcMax + plcBatchSize);
+ }
+ }
+
+ /**
+ *
+ */
+ @SuppressWarnings({"PublicConstructorInNonPublicClass"})
+ protected static class MockEntry extends GridCacheMockEntry<String, String> {
+ /** Key size. */
+ public static final int KEY_SIZE = 1;
+
+ /** Value size. */
+ public static final int VALUE_SIZE = 1;
+
+ /** Entry size. */
+ public static final int ENTRY_SIZE = KEY_SIZE + VALUE_SIZE;
+
+ /** */
+ private IgniteCache<String, String> parent;
+
+ /** Entry value. */
+ private String val;
+
+ /** @param key Key. */
+ public MockEntry(String key) {
+ super(key);
+ }
+
+ /**
+ * @param key Key.
+ * @param val Value.
+ */
+ public MockEntry(String key, String val) {
+ super(key);
+
+ this.val = val;
+ }
+
+ /**
+ * @param key Key.
+ * @param parent Parent.
+ */
+ public MockEntry(String key, @Nullable IgniteCache<String, String> parent) {
+ super(key);
+
+ this.parent = parent;
+ }
+
+ /** {@inheritDoc} */
+ @SuppressWarnings("unchecked")
+ @Override public <T> T unwrap(Class<T> clazz) {
+ if (clazz.isAssignableFrom(IgniteCache.class))
+ return (T)parent;
+
+ return super.unwrap(clazz);
+ }
+
+ /** {@inheritDoc} */
+ @Override public String getValue() throws IllegalStateException {
+ return val;
+ }
+
+ /** {@inheritDoc} */
+ @Override public int size() {
+ return val == null ? KEY_SIZE : ENTRY_SIZE;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return S.toString(MockEntry.class, this, super.toString());
+ }
+ }
+
+ /**
+ * Rvicition policy proxy.
+ */
+ public static class EvictionPolicyProxy implements EvictionPolicy {
+ /** Policy. */
+ private final EvictionPolicy plc;
+
+ /**
+ * @param plc Policy.
+ */
+ private EvictionPolicyProxy(EvictionPolicy plc) {
+ this.plc = plc;
+ }
+
+ /**
+ * @param plc Policy.
+ * @return Policy proxy.
+ */
+ public static EvictionPolicyProxy proxy(EvictionPolicy plc) {
+ return new EvictionPolicyProxy(plc);
+ }
+
+ /**
+ * @return Get current size.
+ */
+ int getCurrentSize() {
+ try {
+ return (Integer)plc.getClass().getDeclaredMethod("getCurrentSize").invoke(plc);
+ }
+ catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * @return Current memory size.
+ */
+ long getCurrentMemorySize() {
+ try {
+ return (Long)plc.getClass().getMethod("getCurrentMemorySize").invoke(plc);
+ }
+ catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * @return Current queue.
+ */
+ public Collection<EvictableEntry> queue() {
+ try {
+ return (Collection<EvictableEntry>)plc.getClass().getDeclaredMethod("queue").invoke(plc);
+ }
+ catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * @param rmv Remove.
+ * @param entry Entry.
+ */
+ @Override public void onEntryAccessed(boolean rmv, EvictableEntry entry) {
+ try {
+ plc.getClass()
+ .getMethod("onEntryAccessed", boolean.class, EvictableEntry.class)
+ .invoke(plc, rmv, entry);
+ }
+ catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/6579e69f/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/fifo/FifoEvictionPolicyFactorySelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/fifo/FifoEvictionPolicyFactorySelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/fifo/FifoEvictionPolicyFactorySelfTest.java
new file mode 100644
index 0000000..472bf41
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/eviction/fifo/FifoEvictionPolicyFactorySelfTest.java
@@ -0,0 +1,261 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.cache.eviction.fifo;
+
+import javax.cache.configuration.Factory;
+import org.apache.ignite.cache.eviction.fifo.FifoEvictionPolicy;
+import org.apache.ignite.cache.eviction.fifo.FifoEvictionPolicyFactory;
+import org.apache.ignite.internal.processors.cache.eviction.EvictionPolicyFactoryAbstractTest;
+
+/**
+ * FIFO eviction policy tests.
+ */
+public class FifoEvictionPolicyFactorySelfTest extends EvictionPolicyFactoryAbstractTest<FifoEvictionPolicy<String, String>> {
+ /** {@inheritDoc} */
+ @Override protected Factory<FifoEvictionPolicy<String, String>> createPolicyFactory() {
+ return new FifoEvictionPolicyFactory<>(plcMax, plcBatchSize, plcMaxMemSize);
+ }
+
+ /** {@inheritDoc} */
+ @Override protected Factory<FifoEvictionPolicy<String, String>> createNearPolicyFactory(int nearMax) {
+ FifoEvictionPolicyFactory<String, String> plc = new FifoEvictionPolicyFactory<>();
+
+ plc.setMaxSize(nearMax);
+ plc.setBatchSize(plcBatchSize);
+
+ return plc;
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void doTestPolicy() throws Exception {
+ policyFactory = createPolicyFactory();
+
+ try {
+ startGrid();
+
+ MockEntry e1 = new MockEntry("1", "1");
+ MockEntry e2 = new MockEntry("2", "2");
+ MockEntry e3 = new MockEntry("3", "3");
+ MockEntry e4 = new MockEntry("4", "4");
+ MockEntry e5 = new MockEntry("5", "5");
+
+ FifoEvictionPolicy<String, String> p = policy();
+
+ p.onEntryAccessed(false, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1);
+
+ p.onEntryAccessed(false, e2);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2);
+
+ p.onEntryAccessed(false, e3);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2, e3);
+
+ assert !e1.isEvicted();
+ assert !e2.isEvicted();
+ assert !e3.isEvicted();
+
+ p.onEntryAccessed(false, e4);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e2, e3, e4);
+
+ assert e1.isEvicted();
+ assert !e2.isEvicted();
+ assert !e3.isEvicted();
+ assert !e4.isEvicted();
+
+ p.onEntryAccessed(false, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5);
+
+ assert e2.isEvicted();
+ assert !e3.isEvicted();
+ assert !e4.isEvicted();
+ assert !e5.isEvicted();
+
+ p.onEntryAccessed(false, e1 = new MockEntry("1", "1"));
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e4, e5, e1);
+
+ assert e3.isEvicted();
+ assert !e1.isEvicted();
+ assert !e4.isEvicted();
+ assert !e5.isEvicted();
+
+ p.onEntryAccessed(false, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e4, e5, e1);
+
+ assert !e1.isEvicted();
+ assert !e4.isEvicted();
+ assert !e5.isEvicted();
+
+ p.onEntryAccessed(false, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e4, e5, e1);
+
+ assert !e1.isEvicted();
+ assert !e4.isEvicted();
+ assert !e5.isEvicted();
+
+ p.onEntryAccessed(false, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e4, e5, e1);
+
+ assert !e1.isEvicted();
+ assert !e4.isEvicted();
+ assert !e5.isEvicted();
+
+ p.onEntryAccessed(true, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e4, e5);
+
+ assert !e1.isEvicted();
+ assert !e4.isEvicted();
+ assert !e5.isEvicted();
+
+ p.onEntryAccessed(true, e4);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e5);
+
+ assert !e4.isEvicted();
+ assert !e5.isEvicted();
+
+ p.onEntryAccessed(true, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue());
+
+ assert !e5.isEvicted();
+
+ info(p);
+ }
+ finally {
+ stopAllGrids();
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void doTestPolicyWithBatch() throws Exception {
+ policyFactory = createPolicyFactory();
+
+ try {
+ startGrid();
+
+ MockEntry e1 = new MockEntry("1", "1");
+ MockEntry e2 = new MockEntry("2", "2");
+ MockEntry e3 = new MockEntry("3", "3");
+ MockEntry e4 = new MockEntry("4", "4");
+ MockEntry e5 = new MockEntry("5", "5");
+
+ FifoEvictionPolicy<String, String> p = policy();
+
+ p.onEntryAccessed(false, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1);
+
+ p.onEntryAccessed(false, e2);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2);
+
+ p.onEntryAccessed(false, e3);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2, e3);
+
+ p.onEntryAccessed(false, e4);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e1, e2, e3, e4);
+
+ assertFalse(e1.isEvicted());
+ assertFalse(e2.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+
+ p.onEntryAccessed(false, e5);
+
+ // Batch evicted.
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5);
+
+ assertTrue(e1.isEvicted());
+ assertTrue(e2.isEvicted());
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(false, e1 = new MockEntry("1", "1"));
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5, e1);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+ assertFalse(e1.isEvicted());
+
+ p.onEntryAccessed(false, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5, e1);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+ assertFalse(e1.isEvicted());
+
+ p.onEntryAccessed(false, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5, e1);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+ assertFalse(e1.isEvicted());
+
+ p.onEntryAccessed(true, e1);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e4, e5);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e4.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(true, e4);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3, e5);
+
+ assertFalse(e3.isEvicted());
+ assertFalse(e5.isEvicted());
+
+ p.onEntryAccessed(true, e5);
+
+ check(MockEntry.ENTRY_SIZE, p.queue(), e3);
+
+ assertFalse(e3.isEvicted());
+
+ p.onEntryAccessed(true, e3);
+
+ check(MockEntry.ENTRY_SIZE, p.queue());
+
+ assertFalse(e3.isEvicted());
+
+ info(p);
+ }
+ finally {
+ stopAllGrids();
+ }
+ }
+}
\ No newline at end of file
[19/28] ignite git commit: IGNITE-5218: First version of decision
trees. This closes #2936
Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/ContinuousRegionInfo.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/ContinuousRegionInfo.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/ContinuousRegionInfo.java
new file mode 100644
index 0000000..e98bb72
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/ContinuousRegionInfo.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees;
+
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+
+/**
+ * Information about region used by continuous features.
+ */
+public class ContinuousRegionInfo extends RegionInfo {
+ /**
+ * Count of samples in this region.
+ */
+ private int size;
+
+ /**
+ * @param impurity Impurity of the region.
+ * @param size Size of this region
+ */
+ public ContinuousRegionInfo(double impurity, int size) {
+ super(impurity);
+ this.size = size;
+ }
+
+ /**
+ * No-op constructor for serialization/deserialization.
+ */
+ public ContinuousRegionInfo() {
+ // No-op
+ }
+
+ /**
+ * Get the size of region.
+ */
+ public int getSize() {
+ return size;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return "ContinuousRegionInfo [" +
+ "size=" + size +
+ ']';
+ }
+
+ /** {@inheritDoc} */
+ @Override public void writeExternal(ObjectOutput out) throws IOException {
+ super.writeExternal(out);
+ out.writeInt(size);
+ }
+
+ /** {@inheritDoc} */
+ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
+ super.readExternal(in);
+ size = in.readInt();
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/ContinuousSplitCalculator.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/ContinuousSplitCalculator.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/ContinuousSplitCalculator.java
new file mode 100644
index 0000000..f9b81d0
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/ContinuousSplitCalculator.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees;
+
+import java.util.stream.DoubleStream;
+import org.apache.ignite.ml.trees.trainers.columnbased.vectors.SplitInfo;
+
+/**
+ * This class is used for calculation of best split by continuous feature.
+ *
+ * @param <C> Class in which information about region will be stored.
+ */
+public interface ContinuousSplitCalculator<C extends ContinuousRegionInfo> {
+ /**
+ * Calculate region info 'from scratch'.
+ *
+ * @param s Stream of labels in this region.
+ * @param l Index of sample projection on this feature in array sorted by this projection value and intervals
+ * bitsets. ({@see org.apache.ignite.ml.trees.trainers.columnbased.vectors.ContinuousFeatureProcessor}).
+ * @return Region info.
+ */
+ C calculateRegionInfo(DoubleStream s, int l);
+
+ /**
+ * Calculate split info of best split of region given information about this region.
+ *
+ * @param sampleIndexes Indexes of samples of this region.
+ * @param values All values of this feature.
+ * @param labels All labels of this feature.
+ * @param regionIdx Index of region being split.
+ * @param data Information about region being split which can be used for computations.
+ * @return Information about best split of region with index given by regionIdx.
+ */
+ SplitInfo<C> splitRegion(Integer[] sampleIndexes, double[] values, double[] labels, int regionIdx, C data);
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/RegionInfo.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/RegionInfo.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/RegionInfo.java
new file mode 100644
index 0000000..8ec7db3
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/RegionInfo.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+
+/** Class containing information about region. */
+public class RegionInfo implements Externalizable {
+ /** Impurity in this region. */
+ private double impurity;
+
+ /**
+ * @param impurity Impurity of this region.
+ */
+ public RegionInfo(double impurity) {
+ this.impurity = impurity;
+ }
+
+ /**
+ * No-op constructor for serialization/deserialization.
+ */
+ public RegionInfo() {
+ // No-op
+ }
+
+ /**
+ * Get impurity in this region.
+ *
+ * @return Impurity of this region.
+ */
+ public double impurity() {
+ return impurity;
+ }
+
+ /** {@inheritDoc} */
+ @Override public void writeExternal(ObjectOutput out) throws IOException {
+ out.writeDouble(impurity);
+ }
+
+ /** {@inheritDoc} */
+ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
+ impurity = in.readDouble();
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/models/DecisionTreeModel.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/models/DecisionTreeModel.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/models/DecisionTreeModel.java
new file mode 100644
index 0000000..86e9326
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/models/DecisionTreeModel.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.models;
+
+import org.apache.ignite.ml.Model;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.trees.nodes.DecisionTreeNode;
+
+/**
+ * Model for decision tree.
+ */
+public class DecisionTreeModel implements Model<Vector, Double> {
+ /** Root node of the decision tree. */
+ private final DecisionTreeNode root;
+
+ /**
+ * Construct decision tree model.
+ *
+ * @param root Root of decision tree.
+ */
+ public DecisionTreeModel(DecisionTreeNode root) {
+ this.root = root;
+ }
+
+ /** {@inheritDoc} */
+ @Override public Double predict(Vector val) {
+ return root.process(val);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/models/package-info.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/models/package-info.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/models/package-info.java
new file mode 100644
index 0000000..ce8418e
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/models/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains decision tree models.
+ */
+package org.apache.ignite.ml.trees.models;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/CategoricalSplitNode.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/CategoricalSplitNode.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/CategoricalSplitNode.java
new file mode 100644
index 0000000..cae6d4a
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/CategoricalSplitNode.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.nodes;
+
+import java.util.BitSet;
+import org.apache.ignite.ml.math.Vector;
+
+/**
+ * Split node by categorical feature.
+ */
+public class CategoricalSplitNode extends SplitNode {
+ /** Bitset specifying which categories belong to left subregion. */
+ private final BitSet bs;
+
+ /**
+ * Construct categorical split node.
+ *
+ * @param featureIdx Index of feature by which split is done.
+ * @param bs Bitset specifying which categories go to the left subtree.
+ */
+ public CategoricalSplitNode(int featureIdx, BitSet bs) {
+ super(featureIdx);
+ this.bs = bs;
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean goLeft(Vector v) {
+ return bs.get((int)v.getX(featureIdx));
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return "CategoricalSplitNode [bs=" + bs + ']';
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/ContinuousSplitNode.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/ContinuousSplitNode.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/ContinuousSplitNode.java
new file mode 100644
index 0000000..285cfcd
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/ContinuousSplitNode.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.nodes;
+
+import org.apache.ignite.ml.math.Vector;
+
+/**
+ * Split node representing split of continuous feature.
+ */
+public class ContinuousSplitNode extends SplitNode {
+ /** Threshold. Values which are less or equal then threshold are assigned to the left subregion. */
+ private final double threshold;
+
+ /**
+ * Construct ContinuousSplitNode by threshold and feature index.
+ *
+ * @param threshold Threshold.
+ * @param featureIdx Feature index.
+ */
+ public ContinuousSplitNode(double threshold, int featureIdx) {
+ super(featureIdx);
+ this.threshold = threshold;
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean goLeft(Vector v) {
+ return v.getX(featureIdx) <= threshold;
+ }
+
+ /** Threshold. Values which are less or equal then threshold are assigned to the left subregion. */
+ public double threshold() {
+ return threshold;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return "ContinuousSplitNode [" +
+ "threshold=" + threshold +
+ ']';
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/DecisionTreeNode.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/DecisionTreeNode.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/DecisionTreeNode.java
new file mode 100644
index 0000000..d31623d
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/DecisionTreeNode.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.nodes;
+
+import org.apache.ignite.ml.math.Vector;
+
+/**
+ * Node of decision tree.
+ */
+public interface DecisionTreeNode {
+ /**
+ * Assign the double value to the given vector.
+ *
+ * @param v Vector.
+ * @return Value assigned to the given vector.
+ */
+ double process(Vector v);
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/Leaf.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/Leaf.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/Leaf.java
new file mode 100644
index 0000000..79b441f
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/Leaf.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.nodes;
+
+import org.apache.ignite.ml.math.Vector;
+
+/**
+ * Terminal node of the decision tree.
+ */
+public class Leaf implements DecisionTreeNode {
+ /**
+ * Value in subregion represented by this node.
+ */
+ private final double val;
+
+ /**
+ * Construct the leaf of decision tree.
+ *
+ * @param val Value in subregion represented by this node.
+ */
+ public Leaf(double val) {
+ this.val = val;
+ }
+
+ /**
+ * Return value in subregion represented by this node.
+ *
+ * @param v Vector.
+ * @return Value in subregion represented by this node.
+ */
+ @Override public double process(Vector v) {
+ return val;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/SplitNode.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/SplitNode.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/SplitNode.java
new file mode 100644
index 0000000..4c258d1
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/SplitNode.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.nodes;
+
+import org.apache.ignite.ml.math.Vector;
+
+/**
+ * Node in decision tree representing a split.
+ */
+public abstract class SplitNode implements DecisionTreeNode {
+ /** Left subtree. */
+ protected DecisionTreeNode l;
+
+ /** Right subtree. */
+ protected DecisionTreeNode r;
+
+ /** Feature index. */
+ protected final int featureIdx;
+
+ /**
+ * Constructs SplitNode with a given feature index.
+ *
+ * @param featureIdx Feature index.
+ */
+ public SplitNode(int featureIdx) {
+ this.featureIdx = featureIdx;
+ }
+
+ /**
+ * Indicates if the given vector is in left subtree.
+ *
+ * @param v Vector
+ * @return Status of given vector being left subtree.
+ */
+ abstract boolean goLeft(Vector v);
+
+ /**
+ * Left subtree.
+ *
+ * @return Left subtree.
+ */
+ public DecisionTreeNode left() {
+ return l;
+ }
+
+ /**
+ * Right subtree.
+ *
+ * @return Right subtree.
+ */
+ public DecisionTreeNode right() {
+ return r;
+ }
+
+ /**
+ * Set the left subtree.
+ *
+ * @param n left subtree.
+ */
+ public void setLeft(DecisionTreeNode n) {
+ l = n;
+ }
+
+ /**
+ * Set the right subtree.
+ *
+ * @param n right subtree.
+ */
+ public void setRight(DecisionTreeNode n) {
+ r = n;
+ }
+
+ /**
+ * Delegates processing to subtrees.
+ *
+ * @param v Vector.
+ * @return Value assigned to the given vector.
+ */
+ @Override public double process(Vector v) {
+ if (left() != null && goLeft(v))
+ return left().process(v);
+ else
+ return right().process(v);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/package-info.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/package-info.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/package-info.java
new file mode 100644
index 0000000..d6deb9d
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/nodes/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains classes representing decision tree nodes.
+ */
+package org.apache.ignite.ml.trees.nodes;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/package-info.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/package-info.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/package-info.java
new file mode 100644
index 0000000..b07ba4a
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains decision tree algorithms.
+ */
+package org.apache.ignite.ml.trees;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/BiIndex.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/BiIndex.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/BiIndex.java
new file mode 100644
index 0000000..0d27c8a
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/BiIndex.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import org.apache.ignite.cache.affinity.AffinityKeyMapped;
+
+/**
+ * Class representing a simple index in 2d matrix in the form (row, col).
+ */
+public class BiIndex implements Externalizable {
+ /** Row. */
+ private int row;
+
+ /** Column. */
+ @AffinityKeyMapped
+ private int col;
+
+ /**
+ * No-op constructor for serialization/deserialization.
+ */
+ public BiIndex() {
+ // No-op.
+ }
+
+ /**
+ * Construct BiIndex from row and column.
+ *
+ * @param row Row.
+ * @param col Column.
+ */
+ public BiIndex(int row, int col) {
+ this.row = row;
+ this.col = col;
+ }
+
+ /**
+ * Returns row.
+ *
+ * @return Row.
+ */
+ public int row() {
+ return row;
+ }
+
+ /**
+ * Returns column.
+ *
+ * @return Column.
+ */
+ public int col() {
+ return col;
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+
+ BiIndex idx = (BiIndex)o;
+
+ if (row != idx.row)
+ return false;
+ return col == idx.col;
+ }
+
+ /** {@inheritDoc} */
+ @Override public int hashCode() {
+ int res = row;
+ res = 31 * res + col;
+ return res;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return "BiIndex [" +
+ "row=" + row +
+ ", col=" + col +
+ ']';
+ }
+
+ /** {@inheritDoc} */
+ @Override public void writeExternal(ObjectOutput out) throws IOException {
+ out.writeInt(row);
+ out.writeInt(col);
+ }
+
+ /** {@inheritDoc} */
+ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
+ row = in.readInt();
+ col = in.readInt();
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/BiIndexedCacheColumnDecisionTreeTrainerInput.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/BiIndexedCacheColumnDecisionTreeTrainerInput.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/BiIndexedCacheColumnDecisionTreeTrainerInput.java
new file mode 100644
index 0000000..04281fb
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/BiIndexedCacheColumnDecisionTreeTrainerInput.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased;
+
+import java.util.Map;
+import java.util.stream.DoubleStream;
+import java.util.stream.IntStream;
+import java.util.stream.Stream;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.lang.IgniteBiTuple;
+
+/**
+ * Adapter for column decision tree trainer for bi-indexed cache.
+ */
+public class BiIndexedCacheColumnDecisionTreeTrainerInput extends CacheColumnDecisionTreeTrainerInput<BiIndex, Double> {
+ /**
+ * Construct an input for {@link ColumnDecisionTreeTrainer}.
+ *
+ * @param cache Bi-indexed cache.
+ * @param catFeaturesInfo Information about categorical feature in the form (feature index -> number of
+ * categories).
+ * @param samplesCnt Count of samples.
+ * @param featuresCnt Count of features.
+ */
+ public BiIndexedCacheColumnDecisionTreeTrainerInput(IgniteCache<BiIndex, Double> cache,
+ Map<Integer, Integer> catFeaturesInfo, int samplesCnt, int featuresCnt) {
+ super(cache,
+ () -> IntStream.range(0, samplesCnt).mapToObj(s -> new BiIndex(s, featuresCnt)),
+ e -> Stream.of(new IgniteBiTuple<>(e.getKey().row(), e.getValue())),
+ DoubleStream::of,
+ fIdx -> IntStream.range(0, samplesCnt).mapToObj(s -> new BiIndex(s, fIdx)),
+ catFeaturesInfo,
+ featuresCnt,
+ samplesCnt);
+ }
+
+ /** {@inheritDoc} */
+ @Override public Object affinityKey(int idx, Ignite ignite) {
+ return idx;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/CacheColumnDecisionTreeTrainerInput.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/CacheColumnDecisionTreeTrainerInput.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/CacheColumnDecisionTreeTrainerInput.java
new file mode 100644
index 0000000..9518caf
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/CacheColumnDecisionTreeTrainerInput.java
@@ -0,0 +1,142 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased;
+
+import java.util.Map;
+import java.util.stream.Collectors;
+import java.util.stream.DoubleStream;
+import java.util.stream.Stream;
+import javax.cache.Cache;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.internal.processors.cache.CacheEntryImpl;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.math.functions.IgniteSupplier;
+
+/**
+ * Adapter of a given cache to {@see CacheColumnDecisionTreeTrainerInput}
+ *
+ * @param <K> Class of keys of the cache.
+ * @param <V> Class of values of the cache.
+ */
+public abstract class CacheColumnDecisionTreeTrainerInput<K, V> implements ColumnDecisionTreeTrainerInput {
+ /** Supplier of labels key. */
+ private final IgniteSupplier<Stream<K>> labelsKeys;
+
+ /** Count of features. */
+ private final int featuresCnt;
+
+ /** Function which maps feature index to Stream of keys corresponding to this feature index. */
+ private final IgniteFunction<Integer, Stream<K>> keyMapper;
+
+ /** Information about which features are categorical in form of feature index -> number of categories. */
+ private final Map<Integer, Integer> catFeaturesInfo;
+
+ /** Cache name. */
+ private final String cacheName;
+
+ /** Count of samples. */
+ private final int samplesCnt;
+
+ /** Function used for mapping cache values to stream of tuples. */
+ private final IgniteFunction<Cache.Entry<K, V>, Stream<IgniteBiTuple<Integer, Double>>> valuesMapper;
+
+ /**
+ * Function which map value of entry with label key to DoubleStream.
+ * Look at {@code CacheColumnDecisionTreeTrainerInput::labels} for understanding how {@code labelsKeys} and
+ * {@code labelsMapper} interact.
+ */
+ private final IgniteFunction<V, DoubleStream> labelsMapper;
+
+ /**
+ * Constructs input for {@see org.apache.ignite.ml.trees.trainers.columnbased.ColumnDecisionTreeTrainer}.
+ *
+ * @param c Cache.
+ * @param valuesMapper Function for mapping cache entry to stream used by {@link
+ * org.apache.ignite.ml.trees.trainers.columnbased.ColumnDecisionTreeTrainer}.
+ * @param labelsMapper Function used for mapping cache value to labels array.
+ * @param keyMapper Function used for mapping feature index to the cache key.
+ * @param catFeaturesInfo Information about which features are categorical in form of feature index -> number of
+ * categories.
+ * @param featuresCnt Count of features.
+ * @param samplesCnt Count of samples.
+ */
+ // TODO: IGNITE-5724 think about boxing/unboxing
+ public CacheColumnDecisionTreeTrainerInput(IgniteCache<K, V> c,
+ IgniteSupplier<Stream<K>> labelsKeys,
+ IgniteFunction<Cache.Entry<K, V>, Stream<IgniteBiTuple<Integer, Double>>> valuesMapper,
+ IgniteFunction<V, DoubleStream> labelsMapper,
+ IgniteFunction<Integer, Stream<K>> keyMapper,
+ Map<Integer, Integer> catFeaturesInfo,
+ int featuresCnt, int samplesCnt) {
+
+ cacheName = c.getName();
+ this.labelsKeys = labelsKeys;
+ this.valuesMapper = valuesMapper;
+ this.labelsMapper = labelsMapper;
+ this.keyMapper = keyMapper;
+ this.catFeaturesInfo = catFeaturesInfo;
+ this.samplesCnt = samplesCnt;
+ this.featuresCnt = featuresCnt;
+ }
+
+ /** {@inheritDoc} */
+ @Override public Stream<IgniteBiTuple<Integer, Double>> values(int idx) {
+ return cache(Ignition.localIgnite()).getAll(keyMapper.apply(idx).collect(Collectors.toSet())).
+ entrySet().
+ stream().
+ flatMap(ent -> valuesMapper.apply(new CacheEntryImpl<>(ent.getKey(), ent.getValue())));
+ }
+
+ /** {@inheritDoc} */
+ @Override public double[] labels(Ignite ignite) {
+ return labelsKeys.get().map(k -> get(k, ignite)).flatMapToDouble(labelsMapper).toArray();
+ }
+
+ /** {@inheritDoc} */
+ @Override public Map<Integer, Integer> catFeaturesInfo() {
+ return catFeaturesInfo;
+ }
+
+ /** {@inheritDoc} */
+ @Override public int featuresCount() {
+ return featuresCnt;
+ }
+
+ /** {@inheritDoc} */
+ @Override public Object affinityKey(int idx, Ignite ignite) {
+ return ignite.affinity(cacheName).affinityKey(keyMapper.apply(idx));
+ }
+
+ /** */
+ private V get(K k, Ignite ignite) {
+ V res = cache(ignite).localPeek(k);
+
+ if (res == null)
+ res = cache(ignite).get(k);
+
+ return res;
+ }
+
+ /** */
+ private IgniteCache<K, V> cache(Ignite ignite) {
+ return ignite.getOrCreateCache(cacheName);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/ColumnDecisionTreeTrainer.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/ColumnDecisionTreeTrainer.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/ColumnDecisionTreeTrainer.java
new file mode 100644
index 0000000..32e33f3
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/ColumnDecisionTreeTrainer.java
@@ -0,0 +1,557 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased;
+
+import com.zaxxer.sparsebits.SparseBitSet;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.function.Consumer;
+import java.util.stream.Collectors;
+import java.util.stream.DoubleStream;
+import java.util.stream.IntStream;
+import java.util.stream.Stream;
+import javax.cache.Cache;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.CachePeekMode;
+import org.apache.ignite.cache.affinity.Affinity;
+import org.apache.ignite.cluster.ClusterNode;
+import org.apache.ignite.internal.processors.cache.CacheEntryImpl;
+import org.apache.ignite.internal.util.typedef.X;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.Trainer;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.distributed.CacheUtils;
+import org.apache.ignite.ml.math.functions.Functions;
+import org.apache.ignite.ml.math.functions.IgniteBiFunction;
+import org.apache.ignite.ml.math.functions.IgniteCurriedBiFunction;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.math.functions.IgniteSupplier;
+import org.apache.ignite.ml.trees.ContinuousRegionInfo;
+import org.apache.ignite.ml.trees.ContinuousSplitCalculator;
+import org.apache.ignite.ml.trees.models.DecisionTreeModel;
+import org.apache.ignite.ml.trees.nodes.DecisionTreeNode;
+import org.apache.ignite.ml.trees.nodes.Leaf;
+import org.apache.ignite.ml.trees.nodes.SplitNode;
+import org.apache.ignite.ml.trees.trainers.columnbased.caches.ContextCache;
+import org.apache.ignite.ml.trees.trainers.columnbased.caches.FeaturesCache;
+import org.apache.ignite.ml.trees.trainers.columnbased.caches.FeaturesCache.FeatureKey;
+import org.apache.ignite.ml.trees.trainers.columnbased.caches.ProjectionsCache;
+import org.apache.ignite.ml.trees.trainers.columnbased.caches.ProjectionsCache.RegionKey;
+import org.apache.ignite.ml.trees.trainers.columnbased.caches.SplitCache;
+import org.apache.ignite.ml.trees.trainers.columnbased.caches.SplitCache.SplitKey;
+import org.apache.ignite.ml.trees.trainers.columnbased.vectors.FeatureProcessor;
+import org.apache.ignite.ml.trees.trainers.columnbased.vectors.SplitInfo;
+import org.jetbrains.annotations.NotNull;
+
+import static org.apache.ignite.ml.trees.trainers.columnbased.caches.FeaturesCache.getFeatureCacheKey;
+
+/**
+ * This trainer stores observations as columns and features as rows.
+ * Ideas from https://github.com/fabuzaid21/yggdrasil are used here.
+ */
+public class ColumnDecisionTreeTrainer<D extends ContinuousRegionInfo> implements
+ Trainer<DecisionTreeModel, ColumnDecisionTreeTrainerInput> {
+ /**
+ * Function used to assign a value to a region.
+ */
+ private final IgniteFunction<DoubleStream, Double> regCalc;
+
+ /**
+ * Function used to calculate impurity in regions used by categorical features.
+ */
+ private final IgniteFunction<ColumnDecisionTreeTrainerInput, ? extends ContinuousSplitCalculator<D>> continuousCalculatorProvider;
+
+ /**
+ * Categorical calculator provider.
+ **/
+ private final IgniteFunction<ColumnDecisionTreeTrainerInput, IgniteFunction<DoubleStream, Double>> categoricalCalculatorProvider;
+
+ /**
+ * Cache used for storing data for training.
+ */
+ private IgniteCache<RegionKey, List<RegionProjection>> prjsCache;
+
+ /**
+ * Minimal information gain.
+ */
+ private static final double MIN_INFO_GAIN = 1E-10;
+
+ /**
+ * Maximal depth of the decision tree.
+ */
+ private final int maxDepth;
+
+ /**
+ * Size of block which is used for storing regions in cache.
+ */
+ private static final int BLOCK_SIZE = 1 << 4;
+
+ /** Ignite instance. */
+ private final Ignite ignite;
+
+ /**
+ * Construct {@link ColumnDecisionTreeTrainer}.
+ *
+ * @param maxDepth Maximal depth of the decision tree.
+ * @param continuousCalculatorProvider Provider of calculator of splits for region projection on continuous
+ * features.
+ * @param categoricalCalculatorProvider Provider of calculator of splits for region projection on categorical
+ * features.
+ * @param regCalc Function used to assign a value to a region.
+ */
+ public ColumnDecisionTreeTrainer(int maxDepth,
+ IgniteFunction<ColumnDecisionTreeTrainerInput, ? extends ContinuousSplitCalculator<D>> continuousCalculatorProvider,
+ IgniteFunction<ColumnDecisionTreeTrainerInput, IgniteFunction<DoubleStream, Double>> categoricalCalculatorProvider,
+ IgniteFunction<DoubleStream, Double> regCalc,
+ Ignite ignite) {
+ this.maxDepth = maxDepth;
+ this.continuousCalculatorProvider = continuousCalculatorProvider;
+ this.categoricalCalculatorProvider = categoricalCalculatorProvider;
+ this.regCalc = regCalc;
+ this.ignite = ignite;
+ }
+
+ /**
+ * Utility class used to get index of feature by which split is done and split info.
+ */
+ private static class IndexAndSplitInfo {
+ /**
+ * Index of feature by which split is done.
+ */
+ private final int featureIdx;
+
+ /**
+ * Split information.
+ */
+ private final SplitInfo info;
+
+ /**
+ * @param featureIdx Index of feature by which split is done.
+ * @param info Split information.
+ */
+ IndexAndSplitInfo(int featureIdx, SplitInfo info) {
+ this.featureIdx = featureIdx;
+ this.info = info;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return "IndexAndSplitInfo [featureIdx=" + featureIdx + ", info=" + info + ']';
+ }
+ }
+
+ /**
+ * Utility class used to build decision tree. Basically it is pointer to leaf node.
+ */
+ private static class TreeTip {
+ /** */
+ private Consumer<DecisionTreeNode> leafSetter;
+
+ /** */
+ private int depth;
+
+ /** */
+ TreeTip(Consumer<DecisionTreeNode> leafSetter, int depth) {
+ this.leafSetter = leafSetter;
+ this.depth = depth;
+ }
+ }
+
+ /**
+ * Utility class used as decision tree root node.
+ */
+ private static class RootNode implements DecisionTreeNode {
+ /** */
+ private DecisionTreeNode s;
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override public double process(Vector v) {
+ return s.process(v);
+ }
+
+ /** */
+ void setSplit(DecisionTreeNode s) {
+ this.s = s;
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override public DecisionTreeModel train(ColumnDecisionTreeTrainerInput i) {
+ prjsCache = ProjectionsCache.getOrCreate(ignite);
+ IgniteCache<UUID, TrainingContext<D>> ctxtCache = ContextCache.getOrCreate(ignite);
+ SplitCache.getOrCreate(ignite);
+
+ UUID trainingUUID = UUID.randomUUID();
+
+ TrainingContext<D> ct = new TrainingContext<>(i, continuousCalculatorProvider.apply(i), categoricalCalculatorProvider.apply(i), trainingUUID, ignite);
+ ctxtCache.put(trainingUUID, ct);
+
+ CacheUtils.bcast(prjsCache.getName(), ignite, () -> {
+ Ignite ignite = Ignition.localIgnite();
+ IgniteCache<RegionKey, List<RegionProjection>> projCache = ProjectionsCache.getOrCreate(ignite);
+ IgniteCache<FeatureKey, double[]> featuresCache = FeaturesCache.getOrCreate(ignite);
+
+ Affinity<RegionKey> targetAffinity = ignite.affinity(ProjectionsCache.CACHE_NAME);
+
+ ClusterNode locNode = ignite.cluster().localNode();
+
+ Map<FeatureKey, double[]> fm = new ConcurrentHashMap<>();
+ Map<RegionKey, List<RegionProjection>> pm = new ConcurrentHashMap<>();
+
+ targetAffinity.
+ mapKeysToNodes(IntStream.range(0, i.featuresCount()).
+ mapToObj(idx -> ProjectionsCache.key(idx, 0, i.affinityKey(idx, ignite), trainingUUID)).
+ collect(Collectors.toSet())).getOrDefault(locNode, Collections.emptyList()).
+ forEach(k -> {
+ FeatureProcessor vec;
+
+ int featureIdx = k.featureIdx();
+
+ IgniteCache<UUID, TrainingContext<D>> ctxCache = ContextCache.getOrCreate(ignite);
+ TrainingContext ctx = ctxCache.get(trainingUUID);
+ double[] vals = new double[ctx.labels().length];
+
+ vec = ctx.featureProcessor(featureIdx);
+ i.values(featureIdx).forEach(t -> vals[t.get1()] = t.get2());
+
+ fm.put(getFeatureCacheKey(featureIdx, trainingUUID, i.affinityKey(featureIdx, ignite)), vals);
+
+ List<RegionProjection> newReg = new ArrayList<>(BLOCK_SIZE);
+ newReg.add(vec.createInitialRegion(getSamples(i.values(featureIdx), ctx.labels().length), vals, ctx.labels()));
+ pm.put(k, newReg);
+ });
+
+ featuresCache.putAll(fm);
+ projCache.putAll(pm);
+
+ return null;
+ });
+
+ return doTrain(i, trainingUUID);
+ }
+
+ /**
+ * Get samples array.
+ *
+ * @param values Stream of tuples in the form of (index, value).
+ * @param size size of stream.
+ * @return Samples array.
+ */
+ private Integer[] getSamples(Stream<IgniteBiTuple<Integer, Double>> values, int size) {
+ Integer[] res = new Integer[size];
+
+ values.forEach(v -> res[v.get1()] = v.get1());
+
+ return res;
+ }
+
+ /** */
+ @NotNull
+ private DecisionTreeModel doTrain(ColumnDecisionTreeTrainerInput input, UUID uuid) {
+ RootNode root = new RootNode();
+
+ // List containing setters of leaves of the tree.
+ List<TreeTip> tips = new LinkedList<>();
+ tips.add(new TreeTip(root::setSplit, 0));
+
+ int curDepth = 0;
+ int regsCnt = 1;
+
+ int featuresCnt = input.featuresCount();
+ IntStream.range(0, featuresCnt).mapToObj(fIdx -> SplitCache.key(fIdx, input.affinityKey(fIdx, ignite), uuid)).
+ forEach(k -> SplitCache.getOrCreate(ignite).put(k, new IgniteBiTuple<>(0, 0.0)));
+ updateSplitCache(0, regsCnt, featuresCnt, ig -> i -> input.affinityKey(i, ig), uuid);
+
+ // TODO: IGNITE-5893 Currently if the best split makes tree deeper than max depth process will be terminated, but actually we should
+ // only stop when *any* improving split makes tree deeper than max depth. Can be fixed if we will store which
+ // regions cannot be split more and split only those that can.
+ while (true) {
+ long before = System.currentTimeMillis();
+
+ IgniteBiTuple<Integer, IgniteBiTuple<Integer, Double>> b = findBestSplitIndexForFeatures(featuresCnt, input::affinityKey, uuid);
+
+ long findBestRegIdx = System.currentTimeMillis() - before;
+
+ Integer bestFeatureIdx = b.get1();
+
+ Integer regIdx = b.get2().get1();
+ Double bestInfoGain = b.get2().get2();
+
+ if (regIdx >= 0 && bestInfoGain > MIN_INFO_GAIN) {
+ before = System.currentTimeMillis();
+
+ SplitInfo bi = ignite.compute().affinityCall(ProjectionsCache.CACHE_NAME,
+ input.affinityKey(bestFeatureIdx, ignite),
+ () -> {
+ TrainingContext<ContinuousRegionInfo> ctx = ContextCache.getOrCreate(ignite).get(uuid);
+ Ignite ignite = Ignition.localIgnite();
+ RegionKey key = ProjectionsCache.key(bestFeatureIdx,
+ regIdx / BLOCK_SIZE,
+ input.affinityKey(bestFeatureIdx, Ignition.localIgnite()),
+ uuid);
+ RegionProjection reg = ProjectionsCache.getOrCreate(ignite).localPeek(key).get(regIdx % BLOCK_SIZE);
+ return ctx.featureProcessor(bestFeatureIdx).findBestSplit(reg, ctx.values(bestFeatureIdx, ignite), ctx.labels(), regIdx);
+ });
+
+ long findBestSplit = System.currentTimeMillis() - before;
+
+ IndexAndSplitInfo best = new IndexAndSplitInfo(bestFeatureIdx, bi);
+
+ regsCnt++;
+
+ X.println(">>> Globally best: " + best.info + " idx time: " + findBestRegIdx + ", calculate best: " + findBestSplit + " fi: " + best.featureIdx + ", regs: " + regsCnt);
+ // Request bitset for split region.
+ int ind = best.info.regionIndex();
+
+ SparseBitSet bs = ignite.compute().affinityCall(ProjectionsCache.CACHE_NAME,
+ input.affinityKey(bestFeatureIdx, ignite),
+ () -> {
+ Ignite ignite = Ignition.localIgnite();
+ IgniteCache<FeatureKey, double[]> featuresCache = FeaturesCache.getOrCreate(ignite);
+ IgniteCache<UUID, TrainingContext<D>> ctxCache = ContextCache.getOrCreate(ignite);
+ TrainingContext ctx = ctxCache.localPeek(uuid);
+
+ double[] values = featuresCache.localPeek(getFeatureCacheKey(bestFeatureIdx, uuid, input.affinityKey(bestFeatureIdx, Ignition.localIgnite())));
+ RegionKey key = ProjectionsCache.key(bestFeatureIdx,
+ regIdx / BLOCK_SIZE,
+ input.affinityKey(bestFeatureIdx, Ignition.localIgnite()),
+ uuid);
+ RegionProjection reg = ProjectionsCache.getOrCreate(ignite).localPeek(key).get(regIdx % BLOCK_SIZE);
+ return ctx.featureProcessor(bestFeatureIdx).calculateOwnershipBitSet(reg, values, best.info);
+
+ });
+
+ SplitNode sn = best.info.createSplitNode(best.featureIdx);
+
+ TreeTip tipToSplit = tips.get(ind);
+ tipToSplit.leafSetter.accept(sn);
+ tipToSplit.leafSetter = sn::setLeft;
+ int d = tipToSplit.depth++;
+ tips.add(new TreeTip(sn::setRight, d));
+
+ if (d > curDepth) {
+ curDepth = d;
+ X.println(">>> Depth: " + curDepth);
+ X.println(">>> Cache size: " + prjsCache.size(CachePeekMode.PRIMARY));
+ }
+
+ before = System.currentTimeMillis();
+ // Perform split on all feature vectors.
+ IgniteSupplier<Set<RegionKey>> bestRegsKeys = () -> IntStream.range(0, featuresCnt).
+ mapToObj(fIdx -> ProjectionsCache.key(fIdx, ind / BLOCK_SIZE, input.affinityKey(fIdx, Ignition.localIgnite()), uuid)).
+ collect(Collectors.toSet());
+
+ int rc = regsCnt;
+
+ // Perform split.
+ CacheUtils.update(prjsCache.getName(), ignite,
+ (Ignite ign, Cache.Entry<RegionKey, List<RegionProjection>> e) -> {
+ RegionKey k = e.getKey();
+
+ List<RegionProjection> leftBlock = e.getValue();
+
+ int fIdx = k.featureIdx();
+ int idxInBlock = ind % BLOCK_SIZE;
+
+ IgniteCache<UUID, TrainingContext<D>> ctxCache = ContextCache.getOrCreate(ign);
+ TrainingContext<D> ctx = ctxCache.get(uuid);
+
+ RegionProjection targetRegProj = leftBlock.get(idxInBlock);
+
+ IgniteBiTuple<RegionProjection, RegionProjection> regs = ctx.
+ performSplit(input, bs, fIdx, best.featureIdx, targetRegProj, best.info.leftData(), best.info.rightData(), ign);
+
+ RegionProjection left = regs.get1();
+ RegionProjection right = regs.get2();
+
+ leftBlock.set(idxInBlock, left);
+ RegionKey rightKey = ProjectionsCache.key(fIdx, (rc - 1) / BLOCK_SIZE, input.affinityKey(fIdx, ign), uuid);
+
+ IgniteCache<RegionKey, List<RegionProjection>> c = ProjectionsCache.getOrCreate(ign);
+
+ List<RegionProjection> rightBlock = rightKey.equals(k) ? leftBlock : c.localPeek(rightKey);
+
+ if (rightBlock == null) {
+ List<RegionProjection> newBlock = new ArrayList<>(BLOCK_SIZE);
+ newBlock.add(right);
+ return Stream.of(new CacheEntryImpl<>(k, leftBlock), new CacheEntryImpl<>(rightKey, newBlock));
+ }
+ else {
+ rightBlock.add(right);
+ return rightBlock.equals(k) ?
+ Stream.of(new CacheEntryImpl<>(k, leftBlock)) :
+ Stream.of(new CacheEntryImpl<>(k, leftBlock), new CacheEntryImpl<>(rightKey, rightBlock));
+ }
+ },
+ bestRegsKeys);
+
+ X.println(">>> Update of projs cache took " + (System.currentTimeMillis() - before));
+
+ before = System.currentTimeMillis();
+
+ updateSplitCache(ind, rc, featuresCnt, ig -> i -> input.affinityKey(i, ig), uuid);
+
+ X.println(">>> Update of split cache took " + (System.currentTimeMillis() - before));
+ }
+ else {
+ X.println(">>> Best feature index: " + bestFeatureIdx + ", best infoGain " + bestInfoGain);
+ break;
+ }
+ }
+
+ int rc = regsCnt;
+
+ IgniteSupplier<Iterable<Cache.Entry<RegionKey, List<RegionProjection>>>> featZeroRegs = () -> {
+ IgniteCache<RegionKey, List<RegionProjection>> projsCache = ProjectionsCache.getOrCreate(Ignition.localIgnite());
+
+ return () -> IntStream.range(0, (rc - 1) / BLOCK_SIZE + 1).
+ mapToObj(rBIdx -> ProjectionsCache.key(0, rBIdx, input.affinityKey(0, Ignition.localIgnite()), uuid)).
+ map(k -> (Cache.Entry<RegionKey, List<RegionProjection>>)new CacheEntryImpl<>(k, projsCache.localPeek(k))).iterator();
+ };
+
+ Map<Integer, Double> vals = CacheUtils.reduce(prjsCache.getName(), ignite,
+ (TrainingContext ctx, Cache.Entry<RegionKey, List<RegionProjection>> e, Map<Integer, Double> m) -> {
+ int regBlockIdx = e.getKey().regionBlockIndex();
+
+ if (e.getValue() != null) {
+ for (int i = 0; i < e.getValue().size(); i++) {
+ int regIdx = regBlockIdx * BLOCK_SIZE + i;
+ RegionProjection reg = e.getValue().get(i);
+
+ Double res = regCalc.apply(Arrays.stream(reg.sampleIndexes()).mapToDouble(s -> ctx.labels()[s]));
+ m.put(regIdx, res);
+ }
+ }
+
+ return m;
+ },
+ () -> ContextCache.getOrCreate(Ignition.localIgnite()).get(uuid),
+ featZeroRegs,
+ (infos, infos2) -> {
+ Map<Integer, Double> res = new HashMap<>();
+ res.putAll(infos);
+ res.putAll(infos2);
+ return res;
+ },
+ HashMap::new
+ );
+
+ int i = 0;
+ for (TreeTip tip : tips) {
+ tip.leafSetter.accept(new Leaf(vals.get(i)));
+ i++;
+ }
+
+ ProjectionsCache.clear(featuresCnt, rc, input::affinityKey, uuid, ignite);
+ ContextCache.getOrCreate(ignite).remove(uuid);
+ FeaturesCache.clear(featuresCnt, input::affinityKey, uuid, ignite);
+ SplitCache.clear(featuresCnt, input::affinityKey, uuid, ignite);
+
+ return new DecisionTreeModel(root.s);
+ }
+
+ /**
+ * Find the best split in the form (feature index, (index of region with the best split, impurity of region with the
+ * best split)).
+ *
+ * @param featuresCnt Count of features.
+ * @param affinity Affinity function.
+ * @param trainingUUID UUID of training.
+ * @return Best split in the form (feature index, (index of region with the best split, impurity of region with the
+ * best split)).
+ */
+ private IgniteBiTuple<Integer, IgniteBiTuple<Integer, Double>> findBestSplitIndexForFeatures(int featuresCnt,
+ IgniteBiFunction<Integer, Ignite, Object> affinity,
+ UUID trainingUUID) {
+ Set<Integer> featureIndexes = IntStream.range(0, featuresCnt).boxed().collect(Collectors.toSet());
+
+ return CacheUtils.reduce(SplitCache.CACHE_NAME, ignite,
+ (Object ctx, Cache.Entry<SplitKey, IgniteBiTuple<Integer, Double>> e, IgniteBiTuple<Integer, IgniteBiTuple<Integer, Double>> r) ->
+ Functions.MAX_GENERIC(new IgniteBiTuple<>(e.getKey().featureIdx(), e.getValue()), r, comparator()),
+ () -> null,
+ () -> SplitCache.localEntries(featureIndexes, affinity, trainingUUID),
+ (i1, i2) -> Functions.MAX_GENERIC(i1, i2, Comparator.comparingDouble(bt -> bt.get2().get2())),
+ () -> new IgniteBiTuple<>(-1, new IgniteBiTuple<>(-1, Double.NEGATIVE_INFINITY))
+ );
+ }
+
+ /** */
+ private static Comparator<IgniteBiTuple<Integer, IgniteBiTuple<Integer, Double>>> comparator() {
+ return Comparator.comparingDouble(bt -> bt != null && bt.get2() != null ? bt.get2().get2() : Double.NEGATIVE_INFINITY);
+ }
+
+ /**
+ * Update split cache.
+ *
+ * @param lastSplitRegionIdx Index of region which had last best split.
+ * @param regsCnt Count of regions.
+ * @param featuresCnt Count of features.
+ * @param affinity Affinity function.
+ * @param trainingUUID UUID of current training.
+ */
+ private void updateSplitCache(int lastSplitRegionIdx, int regsCnt, int featuresCnt,
+ IgniteCurriedBiFunction<Ignite, Integer, Object> affinity,
+ UUID trainingUUID) {
+ CacheUtils.update(SplitCache.CACHE_NAME, ignite,
+ (Ignite ign, Cache.Entry<SplitKey, IgniteBiTuple<Integer, Double>> e) -> {
+ Integer bestRegIdx = e.getValue().get1();
+ int fIdx = e.getKey().featureIdx();
+ TrainingContext ctx = ContextCache.getOrCreate(ign).get(trainingUUID);
+
+ Map<Integer, RegionProjection> toCompare;
+
+ // Fully recalculate best.
+ if (bestRegIdx == lastSplitRegionIdx)
+ toCompare = ProjectionsCache.projectionsOfFeature(fIdx, maxDepth, regsCnt, BLOCK_SIZE, affinity.apply(ign), trainingUUID, ign);
+ // Just compare previous best and two regions which are produced by split.
+ else
+ toCompare = ProjectionsCache.projectionsOfRegions(fIdx, maxDepth,
+ IntStream.of(bestRegIdx, lastSplitRegionIdx, regsCnt - 1), BLOCK_SIZE, affinity.apply(ign), trainingUUID, ign);
+
+ double[] values = ctx.values(fIdx, ign);
+ double[] labels = ctx.labels();
+
+ IgniteBiTuple<Integer, Double> max = toCompare.entrySet().stream().
+ map(ent -> {
+ SplitInfo bestSplit = ctx.featureProcessor(fIdx).findBestSplit(ent.getValue(), values, labels, ent.getKey());
+ return new IgniteBiTuple<>(ent.getKey(), bestSplit != null ? bestSplit.infoGain() : Double.NEGATIVE_INFINITY);
+ }).
+ max(Comparator.comparingDouble(IgniteBiTuple::get2)).
+ get();
+
+ return Stream.of(new CacheEntryImpl<>(e.getKey(), max));
+ },
+ () -> IntStream.range(0, featuresCnt).mapToObj(fIdx -> SplitCache.key(fIdx, affinity.apply(ignite).apply(fIdx), trainingUUID)).collect(Collectors.toSet())
+ );
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/ColumnDecisionTreeTrainerInput.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/ColumnDecisionTreeTrainerInput.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/ColumnDecisionTreeTrainerInput.java
new file mode 100644
index 0000000..94331f7
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/ColumnDecisionTreeTrainerInput.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased;
+
+import java.util.Map;
+import java.util.stream.Stream;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.lang.IgniteBiTuple;
+
+/**
+ * Input for {@see ColumnDecisionTreeTrainer}.
+ */
+public interface ColumnDecisionTreeTrainerInput {
+ /**
+ * Projection of data on feature with the given index.
+ *
+ * @param idx Feature index.
+ * @return Projection of data on feature with the given index.
+ */
+ Stream<IgniteBiTuple<Integer, Double>> values(int idx);
+
+ /**
+ * Labels.
+ *
+ * @param ignite Ignite instance.
+ */
+ double[] labels(Ignite ignite);
+
+ /** Information about which features are categorical in the form of feature index -> number of categories. */
+ Map<Integer, Integer> catFeaturesInfo();
+
+ /** Number of features. */
+ int featuresCount();
+
+ /**
+ * Get affinity key for the given column index.
+ * Affinity key should be pure-functionally dependent from idx.
+ */
+ Object affinityKey(int idx, Ignite ignite);
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/MatrixColumnDecisionTreeTrainerInput.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/MatrixColumnDecisionTreeTrainerInput.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/MatrixColumnDecisionTreeTrainerInput.java
new file mode 100644
index 0000000..9a11902
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/MatrixColumnDecisionTreeTrainerInput.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.stream.DoubleStream;
+import java.util.stream.IntStream;
+import java.util.stream.Stream;
+import javax.cache.Cache;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.math.distributed.keys.RowColMatrixKey;
+import org.apache.ignite.ml.math.distributed.keys.impl.SparseMatrixKey;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.math.impls.matrix.SparseDistributedMatrix;
+import org.apache.ignite.ml.math.impls.storage.matrix.SparseDistributedMatrixStorage;
+import org.jetbrains.annotations.NotNull;
+
+/**
+ * Adapter of SparseDistributedMatrix to ColumnDecisionTreeTrainerInput.
+ * Sparse SparseDistributedMatrix should be in {@see org.apache.ignite.ml.math.StorageConstants#COLUMN_STORAGE_MODE} and
+ * should contain samples in rows last position in row being label of this sample.
+ */
+public class MatrixColumnDecisionTreeTrainerInput extends CacheColumnDecisionTreeTrainerInput<RowColMatrixKey, Map<Integer, Double>> {
+ /**
+ * @param m Sparse SparseDistributedMatrix should be in {@see org.apache.ignite.ml.math.StorageConstants#COLUMN_STORAGE_MODE}
+ * containing samples in rows last position in row being label of this sample.
+ * @param catFeaturesInfo Information about which features are categorical in form of feature index -> number of
+ * categories.
+ */
+ public MatrixColumnDecisionTreeTrainerInput(SparseDistributedMatrix m, Map<Integer, Integer> catFeaturesInfo) {
+ super(((SparseDistributedMatrixStorage)m.getStorage()).cache(),
+ () -> Stream.of(new SparseMatrixKey(m.columnSize() - 1, m.getUUID(), m.columnSize() - 1)),
+ valuesMapper(m),
+ labels(m),
+ keyMapper(m),
+ catFeaturesInfo,
+ m.columnSize() - 1,
+ m.rowSize());
+ }
+
+ /** Values mapper. See {@link CacheColumnDecisionTreeTrainerInput#valuesMapper} */
+ @NotNull
+ private static IgniteFunction<Cache.Entry<RowColMatrixKey, Map<Integer, Double>>, Stream<IgniteBiTuple<Integer, Double>>> valuesMapper(
+ SparseDistributedMatrix m) {
+ return ent -> {
+ Map<Integer, Double> map = ent.getValue() != null ? ent.getValue() : new HashMap<>();
+ return IntStream.range(0, m.rowSize()).mapToObj(k -> new IgniteBiTuple<>(k, map.getOrDefault(k, 0.0)));
+ };
+ }
+
+ /** Key mapper. See {@link CacheColumnDecisionTreeTrainerInput#keyMapper} */
+ @NotNull private static IgniteFunction<Integer, Stream<RowColMatrixKey>> keyMapper(SparseDistributedMatrix m) {
+ return i -> Stream.of(new SparseMatrixKey(i, ((SparseDistributedMatrixStorage)m.getStorage()).getUUID(), i));
+ }
+
+ /** Labels mapper. See {@link CacheColumnDecisionTreeTrainerInput#labelsMapper} */
+ @NotNull private static IgniteFunction<Map<Integer, Double>, DoubleStream> labels(SparseDistributedMatrix m) {
+ return mp -> IntStream.range(0, m.rowSize()).mapToDouble(k -> mp.getOrDefault(k, 0.0));
+ }
+
+ /** {@inheritDoc} */
+ @Override public Object affinityKey(int idx, Ignite ignite) {
+ return idx;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/RegionProjection.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/RegionProjection.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/RegionProjection.java
new file mode 100644
index 0000000..e95f57b
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/RegionProjection.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import org.apache.ignite.ml.trees.RegionInfo;
+
+/**
+ * Projection of region on given feature.
+ *
+ * @param <D> Data of region.
+ */
+public class RegionProjection<D extends RegionInfo> implements Externalizable {
+ /** Samples projections. */
+ protected Integer[] sampleIndexes;
+
+ /** Region data */
+ protected D data;
+
+ /** Depth of this region. */
+ protected int depth;
+
+ /**
+ * @param sampleIndexes Samples indexes.
+ * @param data Region data.
+ * @param depth Depth of this region.
+ */
+ public RegionProjection(Integer[] sampleIndexes, D data, int depth) {
+ this.data = data;
+ this.depth = depth;
+ this.sampleIndexes = sampleIndexes;
+ }
+
+ /**
+ * No-op constructor used for serialization/deserialization.
+ */
+ public RegionProjection() {
+ // No-op.
+ }
+
+ /**
+ * Get samples indexes.
+ *
+ * @return Samples indexes.
+ */
+ public Integer[] sampleIndexes() {
+ return sampleIndexes;
+ }
+
+ /**
+ * Get region data.
+ *
+ * @return Region data.
+ */
+ public D data() {
+ return data;
+ }
+
+ /**
+ * Get region depth.
+ *
+ * @return Region depth.
+ */
+ public int depth() {
+ return depth;
+ }
+
+ /** {@inheritDoc} */
+ @Override public void writeExternal(ObjectOutput out) throws IOException {
+ out.writeInt(sampleIndexes.length);
+
+ for (Integer sampleIndex : sampleIndexes)
+ out.writeInt(sampleIndex);
+
+ out.writeObject(data);
+ out.writeInt(depth);
+ }
+
+ /** {@inheritDoc} */
+ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
+ int size = in.readInt();
+
+ sampleIndexes = new Integer[size];
+
+ for (int i = 0; i < size; i++)
+ sampleIndexes[i] = in.readInt();
+
+ data = (D)in.readObject();
+ depth = in.readInt();
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/TrainingContext.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/TrainingContext.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/TrainingContext.java
new file mode 100644
index 0000000..6415dab
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/TrainingContext.java
@@ -0,0 +1,166 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased;
+
+import com.zaxxer.sparsebits.SparseBitSet;
+import java.util.Map;
+import java.util.UUID;
+import java.util.stream.DoubleStream;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.trees.ContinuousRegionInfo;
+import org.apache.ignite.ml.trees.ContinuousSplitCalculator;
+import org.apache.ignite.ml.trees.RegionInfo;
+import org.apache.ignite.ml.trees.trainers.columnbased.caches.FeaturesCache;
+import org.apache.ignite.ml.trees.trainers.columnbased.vectors.CategoricalFeatureProcessor;
+import org.apache.ignite.ml.trees.trainers.columnbased.vectors.ContinuousFeatureProcessor;
+import org.apache.ignite.ml.trees.trainers.columnbased.vectors.FeatureProcessor;
+
+import static org.apache.ignite.ml.trees.trainers.columnbased.caches.FeaturesCache.COLUMN_DECISION_TREE_TRAINER_FEATURES_CACHE_NAME;
+
+/**
+ * Context of training with {@link ColumnDecisionTreeTrainer}.
+ *
+ * @param <D> Class for storing of information used in calculation of impurity of continuous feature region.
+ */
+public class TrainingContext<D extends ContinuousRegionInfo> {
+ /** Input for training with {@link ColumnDecisionTreeTrainer}. */
+ private final ColumnDecisionTreeTrainerInput input;
+
+ /** Labels. */
+ private final double[] labels;
+
+ /** Calculator used for finding splits of region of continuous features. */
+ private final ContinuousSplitCalculator<D> continuousSplitCalculator;
+
+ /** Calculator used for finding splits of region of categorical feature. */
+ private final IgniteFunction<DoubleStream, Double> categoricalSplitCalculator;
+
+ /** UUID of current training. */
+ private final UUID trainingUUID;
+
+ /**
+ * Construct context for training with {@link ColumnDecisionTreeTrainer}.
+ *
+ * @param input Input for training.
+ * @param continuousSplitCalculator Calculator used for calculations of splits of continuous features regions.
+ * @param categoricalSplitCalculator Calculator used for calculations of splits of categorical features regions.
+ * @param trainingUUID UUID of the current training.
+ * @param ignite Ignite instance.
+ */
+ public TrainingContext(ColumnDecisionTreeTrainerInput input,
+ ContinuousSplitCalculator<D> continuousSplitCalculator,
+ IgniteFunction<DoubleStream, Double> categoricalSplitCalculator,
+ UUID trainingUUID,
+ Ignite ignite) {
+ this.input = input;
+ this.labels = input.labels(ignite);
+ this.continuousSplitCalculator = continuousSplitCalculator;
+ this.categoricalSplitCalculator = categoricalSplitCalculator;
+ this.trainingUUID = trainingUUID;
+ }
+
+ /**
+ * Get processor used for calculating splits of categorical features.
+ *
+ * @param catsCnt Count of categories.
+ * @return Processor used for calculating splits of categorical features.
+ */
+ public CategoricalFeatureProcessor categoricalFeatureProcessor(int catsCnt) {
+ return new CategoricalFeatureProcessor(categoricalSplitCalculator, catsCnt);
+ }
+
+ /**
+ * Get processor used for calculating splits of continuous features.
+ *
+ * @return Processor used for calculating splits of continuous features.
+ */
+ public ContinuousFeatureProcessor<D> continuousFeatureProcessor() {
+ return new ContinuousFeatureProcessor<>(continuousSplitCalculator);
+ }
+
+ /**
+ * Get labels.
+ *
+ * @return Labels.
+ */
+ public double[] labels() {
+ return labels;
+ }
+
+ /**
+ * Get values of feature with given index.
+ *
+ * @param featIdx Feature index.
+ * @param ignite Ignite instance.
+ * @return Values of feature with given index.
+ */
+ public double[] values(int featIdx, Ignite ignite) {
+ IgniteCache<FeaturesCache.FeatureKey, double[]> featuresCache = ignite.getOrCreateCache(COLUMN_DECISION_TREE_TRAINER_FEATURES_CACHE_NAME);
+ return featuresCache.localPeek(FeaturesCache.getFeatureCacheKey(featIdx, trainingUUID, input.affinityKey(featIdx, ignite)));
+ }
+
+ /**
+ * Perform best split on the given region projection.
+ *
+ * @param input Input of {@link ColumnDecisionTreeTrainer} performing split.
+ * @param bitSet Bit set specifying split.
+ * @param targetFeatIdx Index of feature for performing split.
+ * @param bestFeatIdx Index of feature with best split.
+ * @param targetRegionPrj Projection of region to split on feature with index {@code featureIdx}.
+ * @param leftData Data of left region of split.
+ * @param rightData Data of right region of split.
+ * @param ignite Ignite instance.
+ * @return Perform best split on the given region projection.
+ */
+ public IgniteBiTuple<RegionProjection, RegionProjection> performSplit(ColumnDecisionTreeTrainerInput input,
+ SparseBitSet bitSet, int targetFeatIdx, int bestFeatIdx, RegionProjection targetRegionPrj, RegionInfo leftData,
+ RegionInfo rightData, Ignite ignite) {
+
+ Map<Integer, Integer> catFeaturesInfo = input.catFeaturesInfo();
+
+ if (!catFeaturesInfo.containsKey(targetFeatIdx) && !catFeaturesInfo.containsKey(bestFeatIdx))
+ return continuousFeatureProcessor().performSplit(bitSet, targetRegionPrj, (D)leftData, (D)rightData);
+ else if (catFeaturesInfo.containsKey(targetFeatIdx))
+ return categoricalFeatureProcessor(catFeaturesInfo.get(targetFeatIdx)).performSplitGeneric(bitSet, values(targetFeatIdx, ignite), targetRegionPrj, leftData, rightData);
+ return continuousFeatureProcessor().performSplitGeneric(bitSet, labels, targetRegionPrj, leftData, rightData);
+ }
+
+ /**
+ * Processor used for calculating splits for feature with the given index.
+ *
+ * @param featureIdx Index of feature to process.
+ * @return Processor used for calculating splits for feature with the given index.
+ */
+ public FeatureProcessor featureProcessor(int featureIdx) {
+ return input.catFeaturesInfo().containsKey(featureIdx) ? categoricalFeatureProcessor(input.catFeaturesInfo().get(featureIdx)) : continuousFeatureProcessor();
+ }
+
+ /**
+ * Shortcut for affinity key.
+ *
+ * @param idx Feature index.
+ * @return Affinity key.
+ */
+ public Object affinityKey(int idx) {
+ return input.affinityKey(idx, Ignition.localIgnite());
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/ContextCache.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/ContextCache.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/ContextCache.java
new file mode 100644
index 0000000..51ea359
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/trainers/columnbased/caches/ContextCache.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees.trainers.columnbased.caches;
+
+import java.util.UUID;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.cache.CacheAtomicityMode;
+import org.apache.ignite.cache.CacheMode;
+import org.apache.ignite.cache.CacheWriteSynchronizationMode;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.ml.trees.ContinuousRegionInfo;
+import org.apache.ignite.ml.trees.trainers.columnbased.ColumnDecisionTreeTrainer;
+import org.apache.ignite.ml.trees.trainers.columnbased.TrainingContext;
+
+/**
+ * Class for operations related to cache containing training context for {@link ColumnDecisionTreeTrainer}.
+ */
+public class ContextCache {
+ /**
+ * Name of cache containing training context for {@link ColumnDecisionTreeTrainer}.
+ */
+ public static final String COLUMN_DECISION_TREE_TRAINER_CONTEXT_CACHE_NAME = "COLUMN_DECISION_TREE_TRAINER_CONTEXT_CACHE_NAME";
+
+ /**
+ * Get or create cache for training context.
+ *
+ * @param ignite Ignite instance.
+ * @param <D> Class storing information about continuous regions.
+ * @return Cache for training context.
+ */
+ public static <D extends ContinuousRegionInfo> IgniteCache<UUID, TrainingContext<D>> getOrCreate(Ignite ignite) {
+ CacheConfiguration<UUID, TrainingContext<D>> cfg = new CacheConfiguration<>();
+
+ cfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
+
+ cfg.setAtomicityMode(CacheAtomicityMode.ATOMIC);
+
+ cfg.setEvictionPolicy(null);
+
+ cfg.setCopyOnRead(false);
+
+ cfg.setCacheMode(CacheMode.REPLICATED);
+
+ cfg.setOnheapCacheEnabled(true);
+
+ cfg.setReadFromBackup(true);
+
+ cfg.setName(COLUMN_DECISION_TREE_TRAINER_CONTEXT_CACHE_NAME);
+
+ return ignite.getOrCreateCache(cfg);
+ }
+}
[10/28] ignite git commit: IGNITE-6851: SQL: fixed CREATE INDEX
parsing, so that index name is now optional.
Posted by sb...@apache.org.
IGNITE-6851: SQL: fixed CREATE INDEX parsing, so that index name is now optional.
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/a82ff061
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/a82ff061
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/a82ff061
Branch: refs/heads/ignite-zk
Commit: a82ff0610cd20a5f86ea3b9ef7545786df841691
Parents: 85cf958
Author: devozerov <vo...@gridgain.com>
Authored: Fri Nov 10 10:58:42 2017 +0300
Committer: devozerov <vo...@gridgain.com>
Committed: Fri Nov 10 10:58:42 2017 +0300
----------------------------------------------------------------------
.../internal/sql/command/SqlCreateIndexCommand.java | 16 +++++++++++++++-
.../internal/sql/SqlParserCreateIndexSelfTest.java | 4 ++++
2 files changed, 19 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/a82ff061/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlCreateIndexCommand.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlCreateIndexCommand.java b/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlCreateIndexCommand.java
index 897aea5..ef89a5a 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlCreateIndexCommand.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/sql/command/SqlCreateIndexCommand.java
@@ -23,6 +23,7 @@ import org.apache.ignite.internal.sql.SqlLexerToken;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.internal.S;
+import org.jetbrains.annotations.Nullable;
import java.util.Collection;
import java.util.Collections;
@@ -129,7 +130,7 @@ public class SqlCreateIndexCommand implements SqlCommand {
@Override public SqlCommand parse(SqlLexer lex) {
ifNotExists = parseIfNotExists(lex);
- idxName = parseIdentifier(lex, IF);
+ idxName = parseIndexName(lex);
skipIfMatchesKeyword(lex, ON);
@@ -143,6 +144,19 @@ public class SqlCreateIndexCommand implements SqlCommand {
return this;
}
+ /**
+ * Pasrse index name.
+ *
+ * @param lex Lexer.
+ * @return Index name.
+ */
+ private static @Nullable String parseIndexName(SqlLexer lex) {
+ if (matchesKeyword(lex.lookAhead(), ON))
+ return null;
+
+ return parseIdentifier(lex, IF);
+ }
+
/*
* @param lex Lexer.
*/
http://git-wip-us.apache.org/repos/asf/ignite/blob/a82ff061/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserCreateIndexSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserCreateIndexSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserCreateIndexSelfTest.java
index 5de0a3a..e7bc3e9 100644
--- a/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserCreateIndexSelfTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/internal/sql/SqlParserCreateIndexSelfTest.java
@@ -90,6 +90,10 @@ public class SqlParserCreateIndexSelfTest extends SqlParserAbstractSelfTest {
parseValidate("schema", "CREATE INDEX idx ON tbl(a)", "schema", "TBL", "IDX", "A", false);
parseValidate("sChema", "CREATE INDEX idx ON tbl(a)", "sChema", "TBL", "IDX", "A", false);
+ // No index name.
+ parseValidate(null, "CREATE INDEX ON tbl(a)", null, "TBL", null, "A", false);
+ parseValidate(null, "CREATE INDEX ON schema.tbl(a)", "SCHEMA", "TBL", null, "A", false);
+
// NOT EXISTS
SqlCreateIndexCommand cmd;
[11/28] ignite git commit: IGNITE-6839 Delete binary meta before
tests, PDS compatibility tests improved - Fixes #2990.
Posted by sb...@apache.org.
IGNITE-6839 Delete binary meta before tests, PDS compatibility tests improved - Fixes #2990.
Signed-off-by: Alexey Goncharuk <al...@gmail.com>
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/20ec6c94
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/20ec6c94
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/20ec6c94
Branch: refs/heads/ignite-zk
Commit: 20ec6c948964efda204417a47d8fce6f128c222b
Parents: a82ff06
Author: dpavlov <dp...@gridgain.com>
Authored: Fri Nov 10 11:01:05 2017 +0300
Committer: Alexey Goncharuk <al...@gmail.com>
Committed: Fri Nov 10 11:01:05 2017 +0300
----------------------------------------------------------------------
.../DummyPersistenceCompatibilityTest.java | 225 ++++++++++++++++++-
.../FoldersReuseCompatibilityTest.java | 48 +++-
.../wal/reader/StandaloneGridKernalContext.java | 2 +-
.../db/wal/reader/IgniteWalReaderTest.java | 106 ++++-----
4 files changed, 316 insertions(+), 65 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/20ec6c94/modules/compatibility/src/test/java/org/apache/ignite/compatibility/persistence/DummyPersistenceCompatibilityTest.java
----------------------------------------------------------------------
diff --git a/modules/compatibility/src/test/java/org/apache/ignite/compatibility/persistence/DummyPersistenceCompatibilityTest.java b/modules/compatibility/src/test/java/org/apache/ignite/compatibility/persistence/DummyPersistenceCompatibilityTest.java
index 655da52..b05d5a6 100644
--- a/modules/compatibility/src/test/java/org/apache/ignite/compatibility/persistence/DummyPersistenceCompatibilityTest.java
+++ b/modules/compatibility/src/test/java/org/apache/ignite/compatibility/persistence/DummyPersistenceCompatibilityTest.java
@@ -17,6 +17,11 @@
package org.apache.ignite.compatibility.persistence;
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.io.Serializable;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CacheAtomicityMode;
@@ -28,15 +33,25 @@ import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.PersistentStoreConfiguration;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.processors.cache.GridCacheAbstractFullApiSelfTest;
+import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteInClosure;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
-/** */
+/**
+ * Saves data using previous version of ignite and then load this data using actual version
+ */
public class DummyPersistenceCompatibilityTest extends IgnitePersistenceCompatibilityAbstractTest {
/** */
private static final String TEST_CACHE_NAME = DummyPersistenceCompatibilityTest.class.getSimpleName();
/** {@inheritDoc} */
+ @Override protected void beforeTest() throws Exception {
+ super.beforeTest();
+
+ deleteRecursively(U.resolveWorkDirectory(U.defaultWorkDirectory(), "binary_meta", false));
+ }
+
+ /** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
@@ -52,11 +67,41 @@ public class DummyPersistenceCompatibilityTest extends IgnitePersistenceCompatib
}
/**
+ * Tests opportunity to read data from previous Ignite DB version.
+ *
+ * @throws Exception If failed.
+ */
+ public void testNodeStartByOldVersionPersistenceData_2_2() throws Exception {
+ doTestStartupWithOldVersion("2.2.0");
+ }
+
+ /**
+ * Tests opportunity to read data from previous Ignite DB version.
+ *
+ * @throws Exception If failed.
+ */
+ public void testNodeStartByOldVersionPersistenceData_2_1() throws Exception {
+ doTestStartupWithOldVersion("2.1.0");
+ }
+
+ /**
+ * Tests opportunity to read data from previous Ignite DB version.
+ *
+ * @throws Exception If failed.
+ */
+ public void testNodeStartByOldVersionPersistenceData_2_3() throws Exception {
+ doTestStartupWithOldVersion("2.3.0");
+ }
+
+ /**
+ * Tests opportunity to read data from previous Ignite DB version.
+ *
+ * @param ver 3-digits version of ignite
* @throws Exception If failed.
*/
- public void testNodeStartByOldVersionPersistenceData() throws Exception {
+ private void doTestStartupWithOldVersion(String ver) throws Exception {
try {
- startGrid(1, "2.2.0", new ConfigurationClosure(), new PostStartupClosure());
+ startGrid(1, ver, new ConfigurationClosure(), new PostStartupClosure());
stopAllGrids();
@@ -66,10 +111,23 @@ public class DummyPersistenceCompatibilityTest extends IgnitePersistenceCompatib
ignite.active(true);
- IgniteCache<Integer, String> cache = ignite.getOrCreateCache(TEST_CACHE_NAME);
+ IgniteCache<Object, Object> cache = ignite.getOrCreateCache(TEST_CACHE_NAME);
for (int i = 0; i < 10; i++)
assertEquals("data" + i, cache.get(i));
+
+ assertEquals(cache.get("1"), "2");
+ assertEquals(cache.get(12), 2);
+ assertEquals(cache.get(13L), 2L);
+ assertEquals(cache.get(TestEnum.A), "Enum_As_Key");
+ assertEquals(cache.get("Enum_As_Value"), TestEnum.B);
+ assertEquals(cache.get(TestEnum.C), TestEnum.C);
+ assertEquals(cache.get("Serializable"), new TestSerializable(42));
+ assertEquals(cache.get(new TestSerializable(42)), "Serializable_As_Key");
+ assertEquals(cache.get("Externalizable"), new TestExternalizable(42));
+ assertEquals(cache.get(new TestExternalizable(42)), "Externalizable_As_Key");
+ assertEquals(cache.get("testStringContainer"),
+ new TestStringContainerToBePrinted("testStringContainer"));
}
finally {
stopAllGrids();
@@ -82,16 +140,28 @@ public class DummyPersistenceCompatibilityTest extends IgnitePersistenceCompatib
@Override public void apply(Ignite ignite) {
ignite.active(true);
- CacheConfiguration<Integer, String> cacheCfg = new CacheConfiguration<>();
+ CacheConfiguration<Object, Object> cacheCfg = new CacheConfiguration<>();
cacheCfg.setName(TEST_CACHE_NAME);
cacheCfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL);
cacheCfg.setBackups(1);
cacheCfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
- IgniteCache<Integer, String> cache = ignite.createCache(cacheCfg);
+ IgniteCache<Object, Object> cache = ignite.createCache(cacheCfg);
for (int i = 0; i < 10; i++)
cache.put(i, "data" + i);
+
+ cache.put("1", "2");
+ cache.put(12, 2);
+ cache.put(13L, 2L);
+ cache.put(TestEnum.A, "Enum_As_Key");
+ cache.put("Enum_As_Value", TestEnum.B);
+ cache.put(TestEnum.C, TestEnum.C);
+ cache.put("Serializable", new TestSerializable(42));
+ cache.put(new TestSerializable(42), "Serializable_As_Key");
+ cache.put("Externalizable", new TestExternalizable(42));
+ cache.put(new TestExternalizable(42), "Externalizable_As_Key");
+ cache.put("testStringContainer", new TestStringContainerToBePrinted("testStringContainer"));
}
}
@@ -111,4 +181,147 @@ public class DummyPersistenceCompatibilityTest extends IgnitePersistenceCompatib
cfg.setPersistentStoreConfiguration(new PersistentStoreConfiguration());
}
}
+
+ /** Enum for cover binaryObject enum save/load. */
+ public enum TestEnum {
+ /** */A, /** */B, /** */C
+ }
+
+ /** Special class to test WAL reader resistance to Serializable interface. */
+ static class TestSerializable implements Serializable {
+ /** */
+ private static final long serialVersionUID = 0L;
+
+ /** I value. */
+ private int iVal;
+
+ /**
+ * Creates test object
+ *
+ * @param iVal I value.
+ */
+ TestSerializable(int iVal) {
+ this.iVal = iVal;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return "TestSerializable{" +
+ "iVal=" + iVal +
+ '}';
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+
+ TestSerializable that = (TestSerializable)o;
+
+ return iVal == that.iVal;
+ }
+
+ /** {@inheritDoc} */
+ @Override public int hashCode() {
+ return iVal;
+ }
+ }
+
+ /** Special class to test WAL reader resistance to Serializable interface. */
+ static class TestExternalizable implements Externalizable {
+ /** */
+ private static final long serialVersionUID = 0L;
+
+ /** I value. */
+ private int iVal;
+
+ /** Noop ctor for unmarshalling */
+ public TestExternalizable() {
+
+ }
+
+ /**
+ * Creates test object with provided value.
+ *
+ * @param iVal I value.
+ */
+ public TestExternalizable(int iVal) {
+ this.iVal = iVal;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return "TestExternalizable{" +
+ "iVal=" + iVal +
+ '}';
+ }
+
+ /** {@inheritDoc} */
+ @Override public void writeExternal(ObjectOutput out) throws IOException {
+ out.writeInt(iVal);
+ }
+
+ /** {@inheritDoc} */
+ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
+ iVal = in.readInt();
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+
+ TestExternalizable that = ( TestExternalizable)o;
+
+ return iVal == that.iVal;
+ }
+
+ /** {@inheritDoc} */
+ @Override public int hashCode() {
+ return iVal;
+ }
+ }
+
+ /** Container class to test toString of data records. */
+ static class TestStringContainerToBePrinted {
+ /** */
+ String data;
+
+ /**
+ * Creates container.
+ *
+ * @param data value to be searched in to String.
+ */
+ public TestStringContainerToBePrinted(String data) {
+ this.data = data;
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+
+ TestStringContainerToBePrinted printed = (TestStringContainerToBePrinted)o;
+
+ return data != null ? data.equals(printed.data) : printed.data == null;
+ }
+
+ /** {@inheritDoc} */
+ @Override public int hashCode() {
+ return data != null ? data.hashCode() : 0;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return "TestStringContainerToBePrinted{" +
+ "data='" + data + '\'' +
+ '}';
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/20ec6c94/modules/compatibility/src/test/java/org/apache/ignite/compatibility/persistence/FoldersReuseCompatibilityTest.java
----------------------------------------------------------------------
diff --git a/modules/compatibility/src/test/java/org/apache/ignite/compatibility/persistence/FoldersReuseCompatibilityTest.java b/modules/compatibility/src/test/java/org/apache/ignite/compatibility/persistence/FoldersReuseCompatibilityTest.java
index 1775013..06b96fc 100644
--- a/modules/compatibility/src/test/java/org/apache/ignite/compatibility/persistence/FoldersReuseCompatibilityTest.java
+++ b/modules/compatibility/src/test/java/org/apache/ignite/compatibility/persistence/FoldersReuseCompatibilityTest.java
@@ -23,6 +23,7 @@ import java.util.Arrays;
import java.util.Set;
import java.util.TreeSet;
import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.MemoryConfiguration;
@@ -46,10 +47,13 @@ public class FoldersReuseCompatibilityTest extends IgnitePersistenceCompatibilit
private static final String CACHE_NAME = "dummy";
/** Key to store in previous version of ignite */
- private static final String KEY = "ObjectFromPast";
+ private static final String KEY = "StringFromPrevVersion";
/** Value to store in previous version of ignite */
- private static final String VAL = "ValueFromPast";
+ private static final String VAL = "ValueFromPrevVersion";
+
+ /** Key to store in previous version of ignite */
+ private static final String KEY_OBJ = "ObjectFromPrevVersion";
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
@@ -58,6 +62,13 @@ public class FoldersReuseCompatibilityTest extends IgnitePersistenceCompatibilit
}
/** {@inheritDoc} */
+ @Override protected void beforeTest() throws Exception {
+ super.beforeTest();
+
+ deleteRecursively(U.resolveWorkDirectory(U.defaultWorkDirectory(), "binary_meta", false));
+ }
+
+ /** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
final IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
@@ -72,6 +83,16 @@ public class FoldersReuseCompatibilityTest extends IgnitePersistenceCompatibilit
*
* @throws Exception if failed.
*/
+ public void ignored_testFoldersReuseCompatibility_2_3() throws Exception {
+ runFoldersReuse("2.3.0");
+ }
+
+ /**
+ * Test startup of current ignite version using DB storage folder from previous version of Ignite. Expected to start
+ * successfully with existing DB
+ *
+ * @throws Exception if failed.
+ */
public void testFoldersReuseCompatibility_2_2() throws Exception {
runFoldersReuse("2.2.0");
}
@@ -94,9 +115,8 @@ public class FoldersReuseCompatibilityTest extends IgnitePersistenceCompatibilit
* @throws Exception if failed.
*/
private void runFoldersReuse(String ver) throws Exception {
- final IgniteEx grid = startGrid(1, ver, new ConfigurationClosure(), new PostStartupClosure());
+ final IgniteEx oldVer = startGrid(1, ver, new ConfigurationClosure(), new PostStartupClosure());
- grid.close();
stopAllGrids();
IgniteEx ignite = startGrid(0);
@@ -109,6 +129,9 @@ public class FoldersReuseCompatibilityTest extends IgnitePersistenceCompatibilit
assertEquals(VAL, ignite.cache(CACHE_NAME).get(KEY));
+ final DummyPersistenceCompatibilityTest.TestStringContainerToBePrinted actual = (DummyPersistenceCompatibilityTest.TestStringContainerToBePrinted)ignite.cache(CACHE_NAME).get(KEY_OBJ);
+ assertEquals(VAL, actual.data);
+
assertNodeIndexesInFolder();// should not create any new style directories
stopAllGrids();
@@ -119,7 +142,22 @@ public class FoldersReuseCompatibilityTest extends IgnitePersistenceCompatibilit
/** {@inheritDoc} */
@Override public void apply(Ignite ignite) {
ignite.active(true);
- ignite.getOrCreateCache(CACHE_NAME).put(KEY, VAL);
+
+ final IgniteCache<Object, Object> cache = ignite.getOrCreateCache(CACHE_NAME);
+ cache.put(KEY, VAL);
+ cache.put("1", "2");
+ cache.put(1, 2);
+ cache.put(1L, 2L);
+ cache.put(DummyPersistenceCompatibilityTest.TestEnum.A, "Enum_As_Key");
+ cache.put("Enum_As_Value", DummyPersistenceCompatibilityTest.TestEnum.B);
+ cache.put(DummyPersistenceCompatibilityTest.TestEnum.C, DummyPersistenceCompatibilityTest.TestEnum.C);
+
+ cache.put("Serializable", new DummyPersistenceCompatibilityTest.TestSerializable(42));
+ cache.put(new DummyPersistenceCompatibilityTest.TestSerializable(42), "Serializable_As_Key");
+ cache.put("Externalizable", new DummyPersistenceCompatibilityTest.TestExternalizable(42));
+ cache.put(new DummyPersistenceCompatibilityTest.TestExternalizable(42), "Externalizable_As_Key");
+ cache.put(KEY_OBJ, new DummyPersistenceCompatibilityTest.TestStringContainerToBePrinted(VAL));
+
}
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/20ec6c94/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/wal/reader/StandaloneGridKernalContext.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/wal/reader/StandaloneGridKernalContext.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/wal/reader/StandaloneGridKernalContext.java
index 485458b..80dfc5b 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/wal/reader/StandaloneGridKernalContext.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/wal/reader/StandaloneGridKernalContext.java
@@ -94,7 +94,7 @@ import org.jetbrains.annotations.Nullable;
* Dummy grid kernal context
*/
public class StandaloneGridKernalContext implements GridKernalContext {
- /** Binary metadata file store folderÑŽ */
+ /** Binary metadata file store folder. */
public static final String BINARY_META_FOLDER = "binary_meta";
/** Config for fake Ignite instance. */
http://git-wip-us.apache.org/repos/asf/ignite/blob/20ec6c94/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/persistence/db/wal/reader/IgniteWalReaderTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/persistence/db/wal/reader/IgniteWalReaderTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/persistence/db/wal/reader/IgniteWalReaderTest.java
index 9348a68..1844bfe 100644
--- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/persistence/db/wal/reader/IgniteWalReaderTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/persistence/db/wal/reader/IgniteWalReaderTest.java
@@ -98,17 +98,17 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
/** Cache name. */
private static final String CACHE_NAME = "cache0";
- /** additional cache for testing different combinations of types in WAL */
+ /** additional cache for testing different combinations of types in WAL. */
private static final String CACHE_ADDL_NAME = "cache1";
- /** Dump records to logger. Should be false for non local run */
+ /** Dump records to logger. Should be false for non local run. */
private static final boolean dumpRecords = false;
- /** Page size to set */
+ /** Page size to set. */
public static final int PAGE_SIZE = 4 * 1024;
/**
- * Field for transferring setting from test to getConfig method
+ * Field for transferring setting from test to getConfig method.
* Archive incomplete segment after inactivity milliseconds.
*/
private int archiveIncompleteSegmentAfterInactivityMs;
@@ -116,7 +116,7 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
/** Custom wal mode. */
private WALMode customWalMode;
- /** Clear properties in afterTest method() */
+ /** Clear properties in afterTest() method. */
private boolean clearProperties;
/** {@inheritDoc} */
@@ -237,23 +237,23 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
/**
- * Iterates on records and closes iterator
+ * Iterates on records and closes iterator.
*
- * @param walIter iterator to count, will be closed
- * @return count of records
- * @throws IgniteCheckedException if failed to iterate
+ * @param walIter iterator to count, will be closed.
+ * @return count of records.
+ * @throws IgniteCheckedException if failed to iterate.
*/
private int iterateAndCount(WALIterator walIter) throws IgniteCheckedException {
return iterateAndCount(walIter, true);
}
/**
- * Iterates on records and closes iterator
+ * Iterates on records and closes iterator.
*
- * @param walIter iterator to count, will be closed
- * @param touchEntries access data within entries
- * @return count of records
- * @throws IgniteCheckedException if failed to iterate
+ * @param walIter iterator to count, will be closed.
+ * @param touchEntries access data within entries.
+ * @return count of records.
+ * @throws IgniteCheckedException if failed to iterate.
*/
private int iterateAndCount(WALIterator walIter, boolean touchEntries) throws IgniteCheckedException {
int cnt = 0;
@@ -280,9 +280,9 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
/**
- * Tests archive completed event is fired
+ * Tests archive completed event is fired.
*
- * @throws Exception if failed
+ * @throws Exception if failed.
*/
public void testArchiveCompletedEventFired() throws Exception {
final AtomicBoolean evtRecorded = new AtomicBoolean();
@@ -315,10 +315,10 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
/**
- * Puts provided number of records to fill WAL
+ * Puts provided number of records to fill WAL.
*
- * @param ignite ignite instance
- * @param recordsToWrite count
+ * @param ignite ignite instance.
+ * @param recordsToWrite count.
*/
private void putDummyRecords(Ignite ignite, int recordsToWrite) {
IgniteCache<Object, Object> cache0 = ignite.cache(CACHE_NAME);
@@ -328,10 +328,10 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
/**
- * Puts provided number of records to fill WAL
+ * Puts provided number of records to fill WAL.
*
- * @param ignite ignite instance
- * @param recordsToWrite count
+ * @param ignite ignite instance.
+ * @param recordsToWrite count.
*/
private void putAllDummyRecords(Ignite ignite, int recordsToWrite) {
IgniteCache<Object, Object> cache0 = ignite.cache(CACHE_NAME);
@@ -345,11 +345,11 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
/**
- * Puts provided number of records to fill WAL under transactions
+ * Puts provided number of records to fill WAL under transactions.
*
- * @param ignite ignite instance
- * @param recordsToWrite count
- * @param txCnt transactions to run. If number is less then records count, txCnt records will be written
+ * @param ignite ignite instance.
+ * @param recordsToWrite count.
+ * @param txCnt transactions to run. If number is less then records count, txCnt records will be written.
*/
private IgniteCache<Object, Object> txPutDummyRecords(Ignite ignite, int recordsToWrite, int txCnt) {
IgniteCache<Object, Object> cache0 = ignite.cache(CACHE_NAME);
@@ -368,9 +368,9 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
/**
- * Tests time out based WAL segment archiving
+ * Tests time out based WAL segment archiving.
*
- * @throws Exception if failure occurs
+ * @throws Exception if failure occurs.
*/
public void testArchiveIncompleteSegmentAfterInactivity() throws Exception {
final AtomicBoolean waitingForEvt = new AtomicBoolean();
@@ -410,12 +410,12 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
/**
- * Removes entry by key and value from map (java 8 map method copy)
+ * Removes entry by key and value from map (java 8 map method copy).
*
* @param m map to remove from.
* @param key key to remove.
* @param val value to remove.
- * @return true if remove was successful
+ * @return true if remove was successful.
*/
private boolean remove(Map m, Object key, Object val) {
Object curVal = m.get(key);
@@ -427,7 +427,7 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
/**
- * Places records under transaction, checks its value using WAL
+ * Places records under transaction, checks its value using WAL.
*
* @throws Exception if failed.
*/
@@ -481,11 +481,11 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
/**
- * Generates DB subfolder name for provided node index (local) and UUID (consistent ID)
+ * Generates DB subfolder name for provided node index (local) and UUID (consistent ID).
*
* @param ignite ignite instance.
* @param nodeIdx node index.
- * @return folder file name
+ * @return folder file name.
*/
@NotNull private String genDbSubfolderName(Ignite ignite, int nodeIdx) {
return genNewStyleSubfolderName(nodeIdx, (UUID)ignite.cluster().localNode().consistentId());
@@ -500,7 +500,7 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
* @param minCntEntries minimum expected entries count to find.
* @param minTxCnt minimum expected transaction count to find.
* @param objConsumer object handler, called for each object found in logical data records.
- * @param dataRecordHnd data handler record
+ * @param dataRecordHnd data handler record.
* @throws IgniteCheckedException if failed.
*/
private void scanIterateAndCount(
@@ -600,9 +600,9 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
ctrlMap.put(next.getKey(), next.getValue());
}
- for (Cache.Entry<Object, Object> next : addlCache) {
- ctrlMapForBinaryObjects.put(next.getKey(), next.getValue());
- }
+ for (Cache.Entry<Object, Object> next : addlCache) {
+ ctrlMapForBinaryObjects.put(next.getKey(), next.getValue());
+ }
final String subfolderName = genDbSubfolderName(ignite0, 0);
@@ -724,9 +724,9 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
/**
- * Tests archive completed event is fired
+ * Tests archive completed event is fired.
*
- * @throws Exception if failed
+ * @throws Exception if failed.
*/
public void testFillWalForExactSegmentsCount() throws Exception {
customWalMode = WALMode.DEFAULT;
@@ -773,7 +773,7 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
/**
- * Tests reading of empty WAL from non filled cluster
+ * Tests reading of empty WAL from non filled cluster.
*
* @throws Exception if failed.
*/
@@ -1033,8 +1033,8 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
/**
- * @param values collection with numbers
- * @return sum of numbers
+ * @param values collection with numbers.
+ * @return sum of numbers.
*/
private int valuesSum(Iterable<Integer> values) {
int sum = 0;
@@ -1046,11 +1046,11 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
/**
- * Iterates over data records, checks each DataRecord and its entries, finds out all transactions in WAL
+ * Iterates over data records, checks each DataRecord and its entries, finds out all transactions in WAL.
*
- * @param walIter iterator to use
- * @return count of data records observed for each global TX ID. Contains null for non tx updates
- * @throws IgniteCheckedException if failure
+ * @param walIter iterator to use.
+ * @return count of data records observed for each global TX ID. Contains null for non tx updates.
+ * @throws IgniteCheckedException if failure.
*/
private Map<GridCacheVersion, Integer> iterateAndCountDataRecord(
final WALIterator walIter,
@@ -1119,12 +1119,12 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
return entriesUnderTxFound;
}
- /** Enum for cover binaryObject enum save/load */
+ /** Enum for cover binaryObject enum save/load. */
enum TestEnum {
/** */A, /** */B, /** */C
}
- /** Special class to test WAL reader resistance to Serializable interface */
+ /** Special class to test WAL reader resistance to Serializable interface. */
static class TestSerializable implements Serializable {
/** */
private static final long serialVersionUID = 0L;
@@ -1133,7 +1133,7 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
private int iVal;
/**
- * Creates test object
+ * Creates test object.
*
* @param iVal I value.
*/
@@ -1166,7 +1166,7 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
}
- /** Special class to test WAL reader resistance to Serializable interface */
+ /** Special class to test WAL reader resistance to Serializable interface. */
static class TestExternalizable implements Externalizable {
/** */
private static final long serialVersionUID = 0L;
@@ -1180,7 +1180,7 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
/**
- * Creates test object with provided value
+ * Creates test object with provided value.
*
* @param iVal I value.
*/
@@ -1223,7 +1223,7 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
}
- /** Container class to test toString of data records */
+ /** Container class to test toString of data records. */
static class TestStringContainerToBePrinted {
/** */
private String data;
@@ -1262,7 +1262,7 @@ public class IgniteWalReaderTest extends GridCommonAbstractTest {
}
}
- /** Test class for storing in ignite */
+ /** Test class for storing in ignite. */
private static class Organization {
/** Key. */
private final int key;
[25/28] ignite git commit: IGNITE-6818 Handle half open connection in
communication.
Posted by sb...@apache.org.
IGNITE-6818 Handle half open connection in communication.
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/191295d4
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/191295d4
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/191295d4
Branch: refs/heads/ignite-zk
Commit: 191295d45f53225d9e1e214c6fdd85b59e80d0ec
Parents: 132ec3f
Author: dkarachentsev <dk...@gridgain.com>
Authored: Mon Nov 13 10:35:21 2017 +0300
Committer: sboikov <sb...@gridgain.com>
Committed: Mon Nov 13 10:35:21 2017 +0300
----------------------------------------------------------------------
.../communication/tcp/TcpCommunicationSpi.java | 37 +++--
...ommunicationSpiHalfOpenedConnectionTest.java | 142 +++++++++++++++++++
.../IgniteSpiCommunicationSelfTestSuite.java | 2 +
3 files changed, 168 insertions(+), 13 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/191295d4/modules/core/src/main/java/org/apache/ignite/spi/communication/tcp/TcpCommunicationSpi.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/spi/communication/tcp/TcpCommunicationSpi.java b/modules/core/src/main/java/org/apache/ignite/spi/communication/tcp/TcpCommunicationSpi.java
index 1bff8ee..49425ce 100755
--- a/modules/core/src/main/java/org/apache/ignite/spi/communication/tcp/TcpCommunicationSpi.java
+++ b/modules/core/src/main/java/org/apache/ignite/spi/communication/tcp/TcpCommunicationSpi.java
@@ -539,15 +539,7 @@ public class TcpCommunicationSpi extends IgniteSpiAdapter implements Communicati
if (c.failed) {
ses.send(new RecoveryLastReceivedMessage(ALREADY_CONNECTED));
- for (GridNioSession ses0 : nioSrvr.sessions()) {
- ConnectionKey key0 = ses0.meta(CONN_IDX_META);
-
- if (ses0.accepted() && key0 != null &&
- key0.nodeId().equals(connKey.nodeId()) &&
- key0.connectionIndex() == connKey.connectionIndex() &&
- key0.connectCount() < connKey.connectCount())
- ses0.close();
- }
+ closeStaleConnections(connKey);
}
}
}
@@ -567,11 +559,13 @@ public class TcpCommunicationSpi extends IgniteSpiAdapter implements Communicati
if (oldClient instanceof GridTcpNioCommunicationClient) {
if (log.isInfoEnabled())
log.info("Received incoming connection when already connected " +
- "to this node, rejecting [locNode=" + locNode.id() +
- ", rmtNode=" + sndId + ']');
+ "to this node, rejecting [locNode=" + locNode.id() +
+ ", rmtNode=" + sndId + ']');
ses.send(new RecoveryLastReceivedMessage(ALREADY_CONNECTED));
+ closeStaleConnections(connKey);
+
return;
}
else {
@@ -599,11 +593,13 @@ public class TcpCommunicationSpi extends IgniteSpiAdapter implements Communicati
if (log.isInfoEnabled())
log.info("Received incoming connection when already connected " +
- "to this node, rejecting [locNode=" + locNode.id() +
- ", rmtNode=" + sndId + ']');
+ "to this node, rejecting [locNode=" + locNode.id() +
+ ", rmtNode=" + sndId + ']');
ses.send(new RecoveryLastReceivedMessage(ALREADY_CONNECTED));
+ closeStaleConnections(connKey);
+
fut.onDone(oldClient);
return;
@@ -658,6 +654,21 @@ public class TcpCommunicationSpi extends IgniteSpiAdapter implements Communicati
}
}
+ /**
+ * @param connKey Connection key.
+ */
+ private void closeStaleConnections(ConnectionKey connKey) {
+ for (GridNioSession ses0 : nioSrvr.sessions()) {
+ ConnectionKey key0 = ses0.meta(CONN_IDX_META);
+
+ if (ses0.accepted() && key0 != null &&
+ key0.nodeId().equals(connKey.nodeId()) &&
+ key0.connectionIndex() == connKey.connectionIndex() &&
+ key0.connectCount() < connKey.connectCount())
+ ses0.close();
+ }
+ }
+
@Override public void onMessage(final GridNioSession ses, Message msg) {
ConnectionKey connKey = ses.meta(CONN_IDX_META);
http://git-wip-us.apache.org/repos/asf/ignite/blob/191295d4/modules/core/src/test/java/org/apache/ignite/spi/communication/tcp/TcpCommunicationSpiHalfOpenedConnectionTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/spi/communication/tcp/TcpCommunicationSpiHalfOpenedConnectionTest.java b/modules/core/src/test/java/org/apache/ignite/spi/communication/tcp/TcpCommunicationSpiHalfOpenedConnectionTest.java
new file mode 100644
index 0000000..3e10f94
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/spi/communication/tcp/TcpCommunicationSpiHalfOpenedConnectionTest.java
@@ -0,0 +1,142 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.communication.tcp;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentMap;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.cluster.ClusterGroup;
+import org.apache.ignite.configuration.IgniteConfiguration;
+import org.apache.ignite.internal.util.nio.GridCommunicationClient;
+import org.apache.ignite.internal.util.nio.GridNioRecoveryDescriptor;
+import org.apache.ignite.internal.util.nio.GridNioServerListener;
+import org.apache.ignite.internal.util.nio.GridTcpNioCommunicationClient;
+import org.apache.ignite.internal.util.typedef.F;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.apache.ignite.plugin.extensions.communication.Message;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+
+/**
+ * Tests case when connection is closed only for one side, when other is not notified.
+ */
+public class TcpCommunicationSpiHalfOpenedConnectionTest extends GridCommonAbstractTest {
+ /** Client spi. */
+ private TcpCommunicationSpi clientSpi;
+
+ /** Paired connections. */
+ private boolean pairedConnections;
+
+ /** {@inheritDoc} */
+ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
+ IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
+
+ if (igniteInstanceName.contains("client")) {
+ cfg.setClientMode(true);
+
+ clientSpi = (TcpCommunicationSpi)cfg.getCommunicationSpi();
+ }
+
+ ((TcpCommunicationSpi)cfg.getCommunicationSpi()).setUsePairedConnections(pairedConnections);
+
+ return cfg;
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void afterTest() throws Exception {
+ stopAllGrids(true);
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testReconnect() throws Exception {
+ pairedConnections = false;
+
+ checkReconnect();
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ public void testReconnectPaired() throws Exception {
+ pairedConnections = true;
+
+ checkReconnect();
+ }
+
+ /**
+ * @throws Exception If failed.
+ */
+ private void checkReconnect() throws Exception {
+ Ignite srv = startGrid("server");
+ Ignite client = startGrid("client");
+
+ UUID nodeId = srv.cluster().localNode().id();
+
+ System.out.println(">> Server ID: " + nodeId);
+
+ ClusterGroup srvGrp = client.cluster().forNodeId(nodeId);
+
+ System.out.println(">> Send job");
+
+ // Establish connection
+ client.compute(srvGrp).run(F.noop());
+
+ ConcurrentMap<UUID, GridCommunicationClient[]> clients = U.field(clientSpi, "clients");
+ ConcurrentMap<?, GridNioRecoveryDescriptor> recoveryDescs = U.field(clientSpi, "recoveryDescs");
+ ConcurrentMap<?, GridNioRecoveryDescriptor> outRecDescs = U.field(clientSpi, "outRecDescs");
+ ConcurrentMap<?, GridNioRecoveryDescriptor> inRecDescs = U.field(clientSpi, "inRecDescs");
+ GridNioServerListener<Message> lsnr = U.field(clientSpi, "srvLsnr");
+
+ Iterator<GridNioRecoveryDescriptor> it = F.concat(
+ recoveryDescs.values().iterator(),
+ outRecDescs.values().iterator(),
+ inRecDescs.values().iterator()
+ );
+
+ while (it.hasNext()) {
+ GridNioRecoveryDescriptor desc = it.next();
+
+ // Need to simulate connection close in GridNioServer as it
+ // releases descriptors on disconnect.
+ desc.release();
+ }
+
+ // Remove client to avoid calling close(), in that case server
+ // will close connection too, but we want to keep the server
+ // uninformed and force ping old connection.
+ GridCommunicationClient[] clients0 = clients.remove(nodeId);
+
+ for (GridCommunicationClient commClient : clients0)
+ lsnr.onDisconnected(((GridTcpNioCommunicationClient)commClient).session(), new IOException("Test exception"));
+
+ info(">> Removed client");
+
+ // Reestablish connection
+ client.compute(srvGrp).run(F.noop());
+
+ info(">> Sent second job");
+ }
+
+ /** {@inheritDoc} */
+ @Override protected long getTestTimeout() {
+ return 30_000;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/191295d4/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteSpiCommunicationSelfTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteSpiCommunicationSelfTestSuite.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteSpiCommunicationSelfTestSuite.java
index 77de3fc..8e96a3f 100644
--- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteSpiCommunicationSelfTestSuite.java
+++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteSpiCommunicationSelfTestSuite.java
@@ -38,6 +38,7 @@ import org.apache.ignite.spi.communication.tcp.GridTcpCommunicationSpiTcpSelfTes
import org.apache.ignite.spi.communication.tcp.IgniteTcpCommunicationRecoveryAckClosureSelfTest;
import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpiDropNodesTest;
import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpiFaultyClientTest;
+import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpiHalfOpenedConnectionTest;
/**
* Test suite for all communication SPIs.
@@ -78,6 +79,7 @@ public class IgniteSpiCommunicationSelfTestSuite extends TestSuite {
suite.addTest(new TestSuite(TcpCommunicationSpiFaultyClientTest.class));
suite.addTest(new TestSuite(TcpCommunicationSpiDropNodesTest.class));
+ suite.addTest(new TestSuite(TcpCommunicationSpiHalfOpenedConnectionTest.class));
return suite;
}
[23/28] ignite git commit: IGNITE-6824 Add step validator support to
pcScaleNumber directive,
add "step" validation message to pc-form-field-size pug mixin.
Posted by sb...@apache.org.
IGNITE-6824 Add step validator support to pcScaleNumber directive, add "step" validation message to pc-form-field-size pug mixin.
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/250ceb72
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/250ceb72
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/250ceb72
Branch: refs/heads/ignite-zk
Commit: 250ceb726d38631282599fb39b3990e66a667922
Parents: 78a8403
Author: Ilya Borisov <ib...@gridgain.com>
Authored: Mon Nov 13 13:29:47 2017 +0700
Committer: Andrey Novikov <an...@gridgain.com>
Committed: Mon Nov 13 13:29:47 2017 +0700
----------------------------------------------------------------------
.../components/page-configure-basic/components/pcbScaleNumber.js | 1 +
.../components/page-configure-basic/mixins/pcb-form-field-size.pug | 1 +
2 files changed, 2 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/250ceb72/modules/web-console/frontend/app/components/page-configure-basic/components/pcbScaleNumber.js
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/components/page-configure-basic/components/pcbScaleNumber.js b/modules/web-console/frontend/app/components/page-configure-basic/components/pcbScaleNumber.js
index 93d87ba..663d631 100644
--- a/modules/web-console/frontend/app/components/page-configure-basic/components/pcbScaleNumber.js
+++ b/modules/web-console/frontend/app/components/page-configure-basic/components/pcbScaleNumber.js
@@ -28,6 +28,7 @@ export default function pcbScaleNumber() {
ngModel.$parsers.push(down);
ngModel.$validators.min = wrap(ngModel.$validators.min)(up);
ngModel.$validators.max = wrap(ngModel.$validators.max)(up);
+ ngModel.$validators.step = wrap(ngModel.$validators.step)(up);
scope.$watch(attr.pcbScaleNumber, (value, old) => {
factor = Number(value);
http://git-wip-us.apache.org/repos/asf/ignite/blob/250ceb72/modules/web-console/frontend/app/components/page-configure-basic/mixins/pcb-form-field-size.pug
----------------------------------------------------------------------
diff --git a/modules/web-console/frontend/app/components/page-configure-basic/mixins/pcb-form-field-size.pug b/modules/web-console/frontend/app/components/page-configure-basic/mixins/pcb-form-field-size.pug
index fccd6ca..0cd5d01 100644
--- a/modules/web-console/frontend/app/components/page-configure-basic/mixins/pcb-form-field-size.pug
+++ b/modules/web-console/frontend/app/components/page-configure-basic/mixins/pcb-form-field-size.pug
@@ -63,6 +63,7 @@ mixin pcb-form-field-size(label, model, name, disabled, required, placeholder, m
+pcb-form-field-feedback(form, name, 'min', `Value is less than allowable minimum: ${min}`)
+pcb-form-field-feedback(form, name, 'max', `Value is more than allowable maximum: ${max}`)
+pcb-form-field-feedback(form, name, 'number', 'Only numbers allowed')
+ +pcb-form-field-feedback(form, name, 'step', 'Step is invalid')
.input-tip
+pcb-form-field-input(attributes=attributes)
[20/28] ignite git commit: IGNITE-5218: First version of decision
trees. This closes #2936
Posted by sb...@apache.org.
IGNITE-5218: First version of decision trees.
This closes #2936
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/db7697b1
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/db7697b1
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/db7697b1
Branch: refs/heads/ignite-zk
Commit: db7697b17cf6eb94754edb2b5e200655a3610dc1
Parents: 6579e69
Author: Artem Malykh <am...@gridgain.com>
Authored: Fri Nov 10 18:03:33 2017 +0300
Committer: Igor Sapego <is...@gridgain.com>
Committed: Fri Nov 10 18:03:33 2017 +0300
----------------------------------------------------------------------
.gitignore | 2 +
examples/pom.xml | 5 +
.../examples/ml/math/trees/MNISTExample.java | 261 +++++++++
.../examples/ml/math/trees/package-info.java | 22 +
modules/ml/licenses/netlib-java-bsd3.txt | 51 ++
modules/ml/pom.xml | 12 +-
.../main/java/org/apache/ignite/ml/Model.java | 4 +-
.../main/java/org/apache/ignite/ml/Trainer.java | 30 +
.../clustering/KMeansDistributedClusterer.java | 19 +-
.../apache/ignite/ml/estimators/Estimators.java | 50 ++
.../ignite/ml/estimators/package-info.java | 22 +
.../ignite/ml/math/distributed/CacheUtils.java | 178 +++++-
.../math/distributed/keys/MatrixCacheKey.java | 6 +-
.../distributed/keys/impl/BlockMatrixKey.java | 17 +-
.../distributed/keys/impl/SparseMatrixKey.java | 59 +-
.../ignite/ml/math/functions/Functions.java | 38 ++
.../ml/math/functions/IgniteBinaryOperator.java | 29 +
.../math/functions/IgniteCurriedBiFunction.java | 29 +
.../ml/math/functions/IgniteSupplier.java | 30 +
.../math/functions/IgniteToDoubleFunction.java | 25 +
.../matrix/SparseBlockDistributedMatrix.java | 4 +-
.../impls/matrix/SparseDistributedMatrix.java | 3 +-
.../storage/matrix/BlockMatrixStorage.java | 12 +-
.../matrix/SparseDistributedMatrixStorage.java | 17 +-
.../ignite/ml/structures/LabeledVector.java | 63 +++
.../ml/structures/LabeledVectorDouble.java | 46 ++
.../ignite/ml/structures/package-info.java | 22 +
.../ignite/ml/trees/CategoricalRegionInfo.java | 72 +++
.../ignite/ml/trees/CategoricalSplitInfo.java | 68 +++
.../ignite/ml/trees/ContinuousRegionInfo.java | 74 +++
.../ml/trees/ContinuousSplitCalculator.java | 50 ++
.../org/apache/ignite/ml/trees/RegionInfo.java | 62 +++
.../ml/trees/models/DecisionTreeModel.java | 44 ++
.../ignite/ml/trees/models/package-info.java | 22 +
.../ml/trees/nodes/CategoricalSplitNode.java | 50 ++
.../ml/trees/nodes/ContinuousSplitNode.java | 56 ++
.../ignite/ml/trees/nodes/DecisionTreeNode.java | 33 ++
.../org/apache/ignite/ml/trees/nodes/Leaf.java | 49 ++
.../apache/ignite/ml/trees/nodes/SplitNode.java | 100 ++++
.../ignite/ml/trees/nodes/package-info.java | 22 +
.../apache/ignite/ml/trees/package-info.java | 22 +
.../ml/trees/trainers/columnbased/BiIndex.java | 113 ++++
...exedCacheColumnDecisionTreeTrainerInput.java | 57 ++
.../CacheColumnDecisionTreeTrainerInput.java | 142 +++++
.../columnbased/ColumnDecisionTreeTrainer.java | 557 +++++++++++++++++++
.../ColumnDecisionTreeTrainerInput.java | 55 ++
.../MatrixColumnDecisionTreeTrainerInput.java | 82 +++
.../trainers/columnbased/RegionProjection.java | 109 ++++
.../trainers/columnbased/TrainingContext.java | 166 ++++++
.../columnbased/caches/ContextCache.java | 68 +++
.../columnbased/caches/FeaturesCache.java | 151 +++++
.../columnbased/caches/ProjectionsCache.java | 284 ++++++++++
.../trainers/columnbased/caches/SplitCache.java | 206 +++++++
.../ContinuousSplitCalculators.java | 34 ++
.../contsplitcalcs/GiniSplitCalculator.java | 234 ++++++++
.../contsplitcalcs/VarianceSplitCalculator.java | 179 ++++++
.../contsplitcalcs/package-info.java | 22 +
.../trainers/columnbased/package-info.java | 22 +
.../columnbased/regcalcs/RegionCalculators.java | 85 +++
.../columnbased/regcalcs/package-info.java | 22 +
.../vectors/CategoricalFeatureProcessor.java | 211 +++++++
.../vectors/ContinuousFeatureProcessor.java | 111 ++++
.../vectors/ContinuousSplitInfo.java | 54 ++
.../columnbased/vectors/FeatureProcessor.java | 81 +++
.../vectors/FeatureVectorProcessorUtils.java | 57 ++
.../columnbased/vectors/SampleInfo.java | 80 +++
.../trainers/columnbased/vectors/SplitInfo.java | 106 ++++
.../columnbased/vectors/package-info.java | 22 +
.../org/apache/ignite/ml/util/MnistUtils.java | 121 ++++
.../java/org/apache/ignite/ml/util/Utils.java | 53 ++
.../org/apache/ignite/ml/IgniteMLTestSuite.java | 4 +-
.../java/org/apache/ignite/ml/TestUtils.java | 15 +
.../SparseDistributedBlockMatrixTest.java | 1 +
.../ignite/ml/trees/BaseDecisionTreeTest.java | 70 +++
.../ml/trees/ColumnDecisionTreeTrainerTest.java | 190 +++++++
.../ignite/ml/trees/DecisionTreesTestSuite.java | 33 ++
.../ml/trees/GiniSplitCalculatorTest.java | 141 +++++
.../ignite/ml/trees/SplitDataGenerator.java | 390 +++++++++++++
.../ml/trees/VarianceSplitCalculatorTest.java | 84 +++
.../ColumnDecisionTreeTrainerBenchmark.java | 455 +++++++++++++++
.../trees/columntrees.manualrun.properties | 5 +
81 files changed, 6538 insertions(+), 114 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index d8dd951..18146f8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -89,3 +89,5 @@ packages
/modules/platforms/cpp/odbc-test/ignite-odbc-tests
/modules/platforms/cpp/stamp-h1
+#Files related to ML manual-runnable tests
+/modules/ml/src/test/resources/manualrun/trees/columntrees.manualrun.properties
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/pom.xml b/examples/pom.xml
index 30d23ae..2b95e65 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -248,6 +248,11 @@
<artifactId>ignite-ml</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>1.2</version>
+ </dependency>
</dependencies>
</profile>
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/examples/src/main/ml/org/apache/ignite/examples/ml/math/trees/MNISTExample.java
----------------------------------------------------------------------
diff --git a/examples/src/main/ml/org/apache/ignite/examples/ml/math/trees/MNISTExample.java b/examples/src/main/ml/org/apache/ignite/examples/ml/math/trees/MNISTExample.java
new file mode 100644
index 0000000..6aaadd9
--- /dev/null
+++ b/examples/src/main/ml/org/apache/ignite/examples/ml/math/trees/MNISTExample.java
@@ -0,0 +1,261 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.math.trees;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Random;
+import java.util.function.Function;
+import java.util.stream.Stream;
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.IgniteDataStreamer;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.CacheAtomicityMode;
+import org.apache.ignite.cache.CacheMode;
+import org.apache.ignite.cache.CacheWriteSynchronizationMode;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.examples.ExampleNodeStartup;
+import org.apache.ignite.internal.util.IgniteUtils;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.Model;
+import org.apache.ignite.ml.estimators.Estimators;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.functions.IgniteTriFunction;
+import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector;
+import org.apache.ignite.ml.trees.models.DecisionTreeModel;
+import org.apache.ignite.ml.trees.trainers.columnbased.BiIndex;
+import org.apache.ignite.ml.trees.trainers.columnbased.BiIndexedCacheColumnDecisionTreeTrainerInput;
+import org.apache.ignite.ml.trees.trainers.columnbased.ColumnDecisionTreeTrainer;
+import org.apache.ignite.ml.trees.trainers.columnbased.contsplitcalcs.ContinuousSplitCalculators;
+import org.apache.ignite.ml.trees.trainers.columnbased.contsplitcalcs.GiniSplitCalculator;
+import org.apache.ignite.ml.trees.trainers.columnbased.regcalcs.RegionCalculators;
+import org.apache.ignite.ml.util.MnistUtils;
+import org.jetbrains.annotations.NotNull;
+
+/**
+ * <p>
+ * Example of usage of decision trees algorithm for MNIST dataset
+ * (it can be found here: http://yann.lecun.com/exdb/mnist/). </p>
+ * <p>
+ * Remote nodes should always be started with special configuration file which
+ * enables P2P class loading: {@code 'ignite.{sh|bat} examples/config/example-ignite.xml'}.</p>
+ * <p>
+ * Alternatively you can run {@link ExampleNodeStartup} in another JVM which will start node
+ * with {@code examples/config/example-ignite.xml} configuration.</p>
+ * <p>
+ * It is recommended to start at least one node prior to launching this example if you intend
+ * to run it with default memory settings.</p>
+ * <p>
+ * This example should with program arguments, for example
+ * -ts_i /path/to/train-images-idx3-ubyte
+ * -ts_l /path/to/train-labels-idx1-ubyte
+ * -tss_i /path/to/t10k-images-idx3-ubyte
+ * -tss_l /path/to/t10k-labels-idx1-ubyte
+ * -cfg examples/config/example-ignite.xml.</p>
+ * <p>
+ * -ts_i specifies path to training set images of MNIST;
+ * -ts_l specifies path to training set labels of MNIST;
+ * -tss_i specifies path to test set images of MNIST;
+ * -tss_l specifies path to test set labels of MNIST;
+ * -cfg specifies path to a config path.</p>
+ */
+public class MNISTExample {
+ /** Name of parameter specifying path to training set images. */
+ private static final String MNIST_TRAINING_IMAGES_PATH = "ts_i";
+
+ /** Name of parameter specifying path to training set labels. */
+ private static final String MNIST_TRAINING_LABELS_PATH = "ts_l";
+
+ /** Name of parameter specifying path to test set images. */
+ private static final String MNIST_TEST_IMAGES_PATH = "tss_i";
+
+ /** Name of parameter specifying path to test set labels. */
+ private static final String MNIST_TEST_LABELS_PATH = "tss_l";
+
+ /** Name of parameter specifying path of Ignite config. */
+ private static final String CONFIG = "cfg";
+
+ /** Default config path. */
+ private static final String DEFAULT_CONFIG = "examples/config/example-ignite.xml";
+
+ /**
+ * Launches example.
+ *
+ * @param args Program arguments.
+ */
+ public static void main(String[] args) {
+ String igniteCfgPath;
+
+ CommandLineParser parser = new BasicParser();
+
+ String trainingImagesPath;
+ String trainingLabelsPath;
+
+ String testImagesPath;
+ String testLabelsPath;
+
+ try {
+ // Parse the command line arguments.
+ CommandLine line = parser.parse(buildOptions(), args);
+
+ trainingImagesPath = line.getOptionValue(MNIST_TRAINING_IMAGES_PATH);
+ trainingLabelsPath = line.getOptionValue(MNIST_TRAINING_LABELS_PATH);
+ testImagesPath = line.getOptionValue(MNIST_TEST_IMAGES_PATH);
+ testLabelsPath = line.getOptionValue(MNIST_TEST_LABELS_PATH);
+ igniteCfgPath = line.getOptionValue(CONFIG, DEFAULT_CONFIG);
+ }
+ catch (ParseException e) {
+ e.printStackTrace();
+ return;
+ }
+
+ try (Ignite ignite = Ignition.start(igniteCfgPath)) {
+ IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
+
+ int ptsCnt = 60000;
+ int featCnt = 28 * 28;
+
+ Stream<DenseLocalOnHeapVector> trainingMnistStream = MnistUtils.mnist(trainingImagesPath, trainingLabelsPath, new Random(123L), ptsCnt);
+ Stream<DenseLocalOnHeapVector> testMnistStream = MnistUtils.mnist(testImagesPath, testLabelsPath, new Random(123L), 10_000);
+
+ IgniteCache<BiIndex, Double> cache = createBiIndexedCache(ignite);
+
+ loadVectorsIntoBiIndexedCache(cache.getName(), trainingMnistStream.iterator(), featCnt + 1, ignite);
+
+ ColumnDecisionTreeTrainer<GiniSplitCalculator.GiniData> trainer =
+ new ColumnDecisionTreeTrainer<>(10, ContinuousSplitCalculators.GINI.apply(ignite), RegionCalculators.GINI, RegionCalculators.MOST_COMMON, ignite);
+
+ System.out.println(">>> Training started");
+ long before = System.currentTimeMillis();
+ DecisionTreeModel mdl = trainer.train(new BiIndexedCacheColumnDecisionTreeTrainerInput(cache, new HashMap<>(), ptsCnt, featCnt));
+ System.out.println(">>> Training finished in " + (System.currentTimeMillis() - before));
+
+ IgniteTriFunction<Model<Vector, Double>, Stream<IgniteBiTuple<Vector, Double>>, Function<Double, Double>, Double> mse = Estimators.errorsPercentage();
+ Double accuracy = mse.apply(mdl, testMnistStream.map(v -> new IgniteBiTuple<>(v.viewPart(0, featCnt), v.getX(featCnt))), Function.identity());
+ System.out.println(">>> Errs percentage: " + accuracy);
+ }
+ catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Build cli options.
+ */
+ @NotNull private static Options buildOptions() {
+ Options options = new Options();
+
+ Option trsImagesPathOpt = OptionBuilder.withArgName(MNIST_TRAINING_IMAGES_PATH).withLongOpt(MNIST_TRAINING_IMAGES_PATH).hasArg()
+ .withDescription("Path to the MNIST training set.")
+ .isRequired(true).create();
+
+ Option trsLabelsPathOpt = OptionBuilder.withArgName(MNIST_TRAINING_LABELS_PATH).withLongOpt(MNIST_TRAINING_LABELS_PATH).hasArg()
+ .withDescription("Path to the MNIST training set.")
+ .isRequired(true).create();
+
+ Option tssImagesPathOpt = OptionBuilder.withArgName(MNIST_TEST_IMAGES_PATH).withLongOpt(MNIST_TEST_IMAGES_PATH).hasArg()
+ .withDescription("Path to the MNIST test set.")
+ .isRequired(true).create();
+
+ Option tssLabelsPathOpt = OptionBuilder.withArgName(MNIST_TEST_LABELS_PATH).withLongOpt(MNIST_TEST_LABELS_PATH).hasArg()
+ .withDescription("Path to the MNIST test set.")
+ .isRequired(true).create();
+
+ Option configOpt = OptionBuilder.withArgName(CONFIG).withLongOpt(CONFIG).hasArg()
+ .withDescription("Path to the config.")
+ .isRequired(false).create();
+
+ options.addOption(trsImagesPathOpt);
+ options.addOption(trsLabelsPathOpt);
+ options.addOption(tssImagesPathOpt);
+ options.addOption(tssLabelsPathOpt);
+ options.addOption(configOpt);
+
+ return options;
+ }
+
+ /**
+ * Creates cache where data for training is stored.
+ *
+ * @param ignite Ignite instance.
+ * @return cache where data for training is stored.
+ */
+ private static IgniteCache<BiIndex, Double> createBiIndexedCache(Ignite ignite) {
+ CacheConfiguration<BiIndex, Double> cfg = new CacheConfiguration<>();
+
+ // Write to primary.
+ cfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.PRIMARY_SYNC);
+
+ // Atomic transactions only.
+ cfg.setAtomicityMode(CacheAtomicityMode.ATOMIC);
+
+ // No eviction.
+ cfg.setEvictionPolicy(null);
+
+ // No copying of values.
+ cfg.setCopyOnRead(false);
+
+ // Cache is partitioned.
+ cfg.setCacheMode(CacheMode.PARTITIONED);
+
+ cfg.setBackups(0);
+
+ cfg.setName("TMP_BI_INDEXED_CACHE");
+
+ return ignite.getOrCreateCache(cfg);
+ }
+
+ /**
+ * Loads vectors into cache.
+ *
+ * @param cacheName Name of cache.
+ * @param vectorsIterator Iterator over vectors to load.
+ * @param vectorSize Size of vector.
+ * @param ignite Ignite instance.
+ */
+ private static void loadVectorsIntoBiIndexedCache(String cacheName, Iterator<? extends Vector> vectorsIterator,
+ int vectorSize, Ignite ignite) {
+ try (IgniteDataStreamer<BiIndex, Double> streamer =
+ ignite.dataStreamer(cacheName)) {
+ int sampleIdx = 0;
+
+ streamer.perNodeBufferSize(10000);
+
+ while (vectorsIterator.hasNext()) {
+ org.apache.ignite.ml.math.Vector next = vectorsIterator.next();
+
+ for (int i = 0; i < vectorSize; i++)
+ streamer.addData(new BiIndex(sampleIdx, i), next.getX(i));
+
+ sampleIdx++;
+
+ if (sampleIdx % 1000 == 0)
+ System.out.println("Loaded " + sampleIdx + " vectors.");
+ }
+ }
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/examples/src/main/ml/org/apache/ignite/examples/ml/math/trees/package-info.java
----------------------------------------------------------------------
diff --git a/examples/src/main/ml/org/apache/ignite/examples/ml/math/trees/package-info.java b/examples/src/main/ml/org/apache/ignite/examples/ml/math/trees/package-info.java
new file mode 100644
index 0000000..9b6867b
--- /dev/null
+++ b/examples/src/main/ml/org/apache/ignite/examples/ml/math/trees/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Decision trees examples.
+ */
+package org.apache.ignite.examples.ml.math.trees;
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/licenses/netlib-java-bsd3.txt
----------------------------------------------------------------------
diff --git a/modules/ml/licenses/netlib-java-bsd3.txt b/modules/ml/licenses/netlib-java-bsd3.txt
new file mode 100644
index 0000000..d6b30c1
--- /dev/null
+++ b/modules/ml/licenses/netlib-java-bsd3.txt
@@ -0,0 +1,51 @@
+This product binaries redistribute netlib-java which is available under the following license:
+
+Copyright (c) 2013 Samuel Halliday
+Copyright (c) 1992-2011 The University of Tennessee and The University
+ of Tennessee Research Foundation. All rights
+ reserved.
+Copyright (c) 2000-2011 The University of California Berkeley. All
+ rights reserved.
+Copyright (c) 2006-2011 The University of Colorado Denver. All rights
+ reserved.
+
+$COPYRIGHT$
+
+Additional copyrights may follow
+
+$HEADER$
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+- Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+- Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer listed
+ in this license in the documentation and/or other materials
+ provided with the distribution.
+
+- Neither the name of the copyright holders nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+The copyright holders provide no reassurances that the source code
+provided does not infringe any patent, copyright, or any other
+intellectual property rights of third parties. The copyright holders
+disclaim any liability to any recipient for claims brought against
+recipient by any third party for infringement of that parties
+intellectual property rights.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/pom.xml
----------------------------------------------------------------------
diff --git a/modules/ml/pom.xml b/modules/ml/pom.xml
index 94cfc51..c495f44 100644
--- a/modules/ml/pom.xml
+++ b/modules/ml/pom.xml
@@ -75,13 +75,6 @@
<dependency>
<groupId>org.springframework</groupId>
- <artifactId>spring-beans</artifactId>
- <version>${spring.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
<scope>test</scope>
@@ -105,6 +98,11 @@
<version>1.0</version>
</dependency>
+ <dependency>
+ <groupId>com.zaxxer</groupId>
+ <artifactId>SparseBitSet</artifactId>
+ <version>1.0</version>
+ </dependency>
</dependencies>
<profiles>
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/Model.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/Model.java b/modules/ml/src/main/java/org/apache/ignite/ml/Model.java
index 3c60bfa..05ce774 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/Model.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/Model.java
@@ -24,7 +24,7 @@ import java.util.function.BiFunction;
@FunctionalInterface
public interface Model<T, V> extends Serializable {
/** Predict a result for value. */
- public V predict(T val);
+ V predict(T val);
/**
* Combines this model with other model via specified combiner
@@ -33,7 +33,7 @@ public interface Model<T, V> extends Serializable {
* @param combiner Combiner.
* @return Combination of models.
*/
- public default <X, W> Model<T, X> combine(Model<T, W> other, BiFunction<V, W, X> combiner) {
+ default <X, W> Model<T, X> combine(Model<T, W> other, BiFunction<V, W, X> combiner) {
return v -> combiner.apply(predict(v), other.predict(v));
}
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/Trainer.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/Trainer.java b/modules/ml/src/main/java/org/apache/ignite/ml/Trainer.java
new file mode 100644
index 0000000..795e218
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/Trainer.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml;
+
+import org.apache.ignite.ml.trees.trainers.columnbased.ColumnDecisionTreeTrainer;
+
+/**
+ * Interface for Trainers. Trainer is just a function which produces model from the data.
+ * See for example {@link ColumnDecisionTreeTrainer}.
+ * @param <M> Type of produced model.
+ * @param <T> Type of data needed for model producing.
+ */
+public interface Trainer<M extends Model, T> {
+ M train(T data);
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/clustering/KMeansDistributedClusterer.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/clustering/KMeansDistributedClusterer.java b/modules/ml/src/main/java/org/apache/ignite/ml/clustering/KMeansDistributedClusterer.java
index d6a3fc3..6c25edc 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/clustering/KMeansDistributedClusterer.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/clustering/KMeansDistributedClusterer.java
@@ -21,6 +21,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Random;
+import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import javax.cache.Cache;
@@ -94,7 +95,7 @@ public class KMeansDistributedClusterer extends BaseKMeansClusterer<SparseDistri
boolean converged = false;
int iteration = 0;
int dim = pointsCp.viewRow(0).size();
- IgniteUuid uid = pointsCp.getUUID();
+ UUID uid = pointsCp.getUUID();
// Execute iterations of Lloyd's algorithm until converged
while (iteration < maxIterations && !converged) {
@@ -140,7 +141,7 @@ public class KMeansDistributedClusterer extends BaseKMeansClusterer<SparseDistri
// to their squared distance from the centers. Note that only distances between points
// and new centers are computed in each iteration.
int step = 0;
- IgniteUuid uid = points.getUUID();
+ UUID uid = points.getUUID();
while (step < initSteps) {
// We assume here that costs can fit into memory of one node.
@@ -180,7 +181,7 @@ public class KMeansDistributedClusterer extends BaseKMeansClusterer<SparseDistri
}
/** */
- private List<Vector> getNewCenters(int k, ConcurrentHashMap<Integer, Double> costs, IgniteUuid uid,
+ private List<Vector> getNewCenters(int k, ConcurrentHashMap<Integer, Double> costs, UUID uid,
double sumCosts, String cacheName) {
return distributedFold(cacheName,
(IgniteBiFunction<Cache.Entry<SparseMatrixKey, Map<Integer, Double>>,
@@ -200,7 +201,7 @@ public class KMeansDistributedClusterer extends BaseKMeansClusterer<SparseDistri
list1.addAll(list2);
return list1;
},
- new ArrayList<>()
+ ArrayList::new
);
}
@@ -219,11 +220,11 @@ public class KMeansDistributedClusterer extends BaseKMeansClusterer<SparseDistri
(map1, map2) -> {
map1.putAll(map2);
return map1;
- }, new ConcurrentHashMap<>());
+ }, ConcurrentHashMap::new);
}
/** */
- private ConcurrentHashMap<Integer, Integer> weightCenters(IgniteUuid uid, List<Vector> distinctCenters, String cacheName) {
+ private ConcurrentHashMap<Integer, Integer> weightCenters(UUID uid, List<Vector> distinctCenters, String cacheName) {
return distributedFold(cacheName,
(IgniteBiFunction<Cache.Entry<SparseMatrixKey, Map<Integer, Double>>,
ConcurrentHashMap<Integer, Integer>,
@@ -249,7 +250,7 @@ public class KMeansDistributedClusterer extends BaseKMeansClusterer<SparseDistri
key -> key.matrixId().equals(uid),
(map1, map2) -> MapUtil.mergeMaps(map1, map2, (integer, integer2) -> integer2 + integer,
ConcurrentHashMap::new),
- new ConcurrentHashMap<>());
+ ConcurrentHashMap::new);
}
/** */
@@ -258,7 +259,7 @@ public class KMeansDistributedClusterer extends BaseKMeansClusterer<SparseDistri
}
/** */
- private SumsAndCounts getSumsAndCounts(Vector[] centers, int dim, IgniteUuid uid, String cacheName) {
+ private SumsAndCounts getSumsAndCounts(Vector[] centers, int dim, UUID uid, String cacheName) {
return CacheUtils.distributedFold(cacheName,
(IgniteBiFunction<Cache.Entry<SparseMatrixKey, Map<Integer, Double>>, SumsAndCounts, SumsAndCounts>)(entry, counts) -> {
Map<Integer, Double> vec = entry.getValue();
@@ -278,7 +279,7 @@ public class KMeansDistributedClusterer extends BaseKMeansClusterer<SparseDistri
return counts;
},
key -> key.matrixId().equals(uid),
- SumsAndCounts::merge, new SumsAndCounts()
+ SumsAndCounts::merge, SumsAndCounts::new
);
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/estimators/Estimators.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/estimators/Estimators.java b/modules/ml/src/main/java/org/apache/ignite/ml/estimators/Estimators.java
new file mode 100644
index 0000000..13331d1
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/estimators/Estimators.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.estimators;
+
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.function.Function;
+import java.util.stream.Stream;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.Model;
+import org.apache.ignite.ml.math.functions.IgniteTriFunction;
+
+/** Estimators. */
+public class Estimators {
+ /** Simple implementation of mean squared error estimator. */
+ public static <T, V> IgniteTriFunction<Model<T, V>, Stream<IgniteBiTuple<T, V>>, Function<V, Double>, Double> MSE() {
+ return (model, stream, f) -> stream.mapToDouble(dp -> {
+ double diff = f.apply(dp.get2()) - f.apply(model.predict(dp.get1()));
+ return diff * diff;
+ }).average().orElse(0);
+ }
+
+ /** Simple implementation of errors percentage estimator. */
+ public static <T, V> IgniteTriFunction<Model<T, V>, Stream<IgniteBiTuple<T, V>>, Function<V, Double>, Double> errorsPercentage() {
+ return (model, stream, f) -> {
+ AtomicLong total = new AtomicLong(0);
+
+ long cnt = stream.
+ peek((ib) -> total.incrementAndGet()).
+ filter(dp -> !model.predict(dp.get1()).equals(dp.get2())).
+ count();
+
+ return (double)cnt / total.get();
+ };
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/estimators/package-info.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/estimators/package-info.java b/modules/ml/src/main/java/org/apache/ignite/ml/estimators/package-info.java
new file mode 100644
index 0000000..c03827f
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/estimators/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains estimation algorithms.
+ */
+package org.apache.ignite.ml.estimators;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/CacheUtils.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/CacheUtils.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/CacheUtils.java
index 8c8bba7..b9eb386 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/CacheUtils.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/CacheUtils.java
@@ -21,7 +21,11 @@ import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BinaryOperator;
+import java.util.stream.Stream;
import javax.cache.Cache;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
@@ -32,17 +36,21 @@ import org.apache.ignite.cluster.ClusterGroup;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.internal.processors.cache.CacheEntryImpl;
import org.apache.ignite.internal.util.typedef.internal.A;
+import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.lang.IgniteCallable;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.lang.IgniteRunnable;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.ml.math.KeyMapper;
-import org.apache.ignite.ml.math.distributed.keys.RowColMatrixKey;
-import org.apache.ignite.ml.math.distributed.keys.impl.BlockMatrixKey;
+import org.apache.ignite.ml.math.distributed.keys.BlockMatrixKey;
+import org.apache.ignite.ml.math.distributed.keys.MatrixCacheKey;
import org.apache.ignite.ml.math.functions.IgniteBiFunction;
+import org.apache.ignite.ml.math.functions.IgniteBinaryOperator;
import org.apache.ignite.ml.math.functions.IgniteConsumer;
import org.apache.ignite.ml.math.functions.IgniteDoubleFunction;
import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.math.functions.IgniteSupplier;
+import org.apache.ignite.ml.math.functions.IgniteTriFunction;
import org.apache.ignite.ml.math.impls.matrix.BlockEntry;
/**
@@ -131,7 +139,7 @@ public class CacheUtils {
* @return Sum obtained using sparse logic.
*/
@SuppressWarnings("unchecked")
- public static <K, V> double sparseSum(IgniteUuid matrixUuid, String cacheName) {
+ public static <K, V> double sparseSum(UUID matrixUuid, String cacheName) {
A.notNull(matrixUuid, "matrixUuid");
A.notNull(cacheName, "cacheName");
@@ -198,7 +206,7 @@ public class CacheUtils {
* @return Minimum value obtained using sparse logic.
*/
@SuppressWarnings("unchecked")
- public static <K, V> double sparseMin(IgniteUuid matrixUuid, String cacheName) {
+ public static <K, V> double sparseMin(UUID matrixUuid, String cacheName) {
A.notNull(matrixUuid, "matrixUuid");
A.notNull(cacheName, "cacheName");
@@ -235,7 +243,7 @@ public class CacheUtils {
* @return Maximum value obtained using sparse logic.
*/
@SuppressWarnings("unchecked")
- public static <K, V> double sparseMax(IgniteUuid matrixUuid, String cacheName) {
+ public static <K, V> double sparseMax(UUID matrixUuid, String cacheName) {
A.notNull(matrixUuid, "matrixUuid");
A.notNull(cacheName, "cacheName");
@@ -316,7 +324,7 @@ public class CacheUtils {
* @param mapper Mapping {@link IgniteFunction}.
*/
@SuppressWarnings("unchecked")
- public static <K, V> void sparseMap(IgniteUuid matrixUuid, IgniteDoubleFunction<Double> mapper, String cacheName) {
+ public static <K, V> void sparseMap(UUID matrixUuid, IgniteDoubleFunction<Double> mapper, String cacheName) {
A.notNull(matrixUuid, "matrixUuid");
A.notNull(cacheName, "cacheName");
A.notNull(mapper, "mapper");
@@ -350,12 +358,12 @@ public class CacheUtils {
*
* @param matrixUuid Matrix uuid.
*/
- private static <K> IgnitePredicate<K> sparseKeyFilter(IgniteUuid matrixUuid) {
+ private static <K> IgnitePredicate<K> sparseKeyFilter(UUID matrixUuid) {
return key -> {
- if (key instanceof BlockMatrixKey)
- return ((BlockMatrixKey)key).matrixId().equals(matrixUuid);
- else if (key instanceof RowColMatrixKey)
- return ((RowColMatrixKey)key).matrixId().equals(matrixUuid);
+ if (key instanceof MatrixCacheKey)
+ return ((MatrixCacheKey)key).matrixId().equals(matrixUuid);
+ else if (key instanceof IgniteBiTuple)
+ return ((IgniteBiTuple<Integer, UUID>)key).get2().equals(matrixUuid);
else
throw new UnsupportedOperationException();
};
@@ -404,6 +412,76 @@ public class CacheUtils {
}
/**
+ * @param cacheName Cache name.
+ * @param fun An operation that accepts a cache entry and processes it.
+ * @param ignite Ignite.
+ * @param keysGen Keys generator.
+ * @param <K> Cache key object type.
+ * @param <V> Cache value object type.
+ */
+ public static <K, V> void update(String cacheName, Ignite ignite,
+ IgniteBiFunction<Ignite, Cache.Entry<K, V>, Stream<Cache.Entry<K, V>>> fun, IgniteSupplier<Set<K>> keysGen) {
+ bcast(cacheName, ignite, () -> {
+ Ignite ig = Ignition.localIgnite();
+ IgniteCache<K, V> cache = ig.getOrCreateCache(cacheName);
+
+ Affinity<K> affinity = ig.affinity(cacheName);
+ ClusterNode locNode = ig.cluster().localNode();
+
+ Collection<K> ks = affinity.mapKeysToNodes(keysGen.get()).get(locNode);
+
+ if (ks == null)
+ return;
+
+ Map<K, V> m = new ConcurrentHashMap<>();
+
+ ks.parallelStream().forEach(k -> {
+ V v = cache.localPeek(k);
+ if (v != null)
+ (fun.apply(ignite, new CacheEntryImpl<>(k, v))).forEach(ent -> m.put(ent.getKey(), ent.getValue()));
+ });
+
+ cache.putAll(m);
+ });
+ }
+
+ /**
+ * @param cacheName Cache name.
+ * @param fun An operation that accepts a cache entry and processes it.
+ * @param ignite Ignite.
+ * @param keysGen Keys generator.
+ * @param <K> Cache key object type.
+ * @param <V> Cache value object type.
+ */
+ public static <K, V> void update(String cacheName, Ignite ignite, IgniteConsumer<Cache.Entry<K, V>> fun,
+ IgniteSupplier<Set<K>> keysGen) {
+ bcast(cacheName, ignite, () -> {
+ Ignite ig = Ignition.localIgnite();
+ IgniteCache<K, V> cache = ig.getOrCreateCache(cacheName);
+
+ Affinity<K> affinity = ig.affinity(cacheName);
+ ClusterNode locNode = ig.cluster().localNode();
+
+ Collection<K> ks = affinity.mapKeysToNodes(keysGen.get()).get(locNode);
+
+ if (ks == null)
+ return;
+
+ Map<K, V> m = new ConcurrentHashMap<>();
+
+ for (K k : ks) {
+ V v = cache.localPeek(k);
+ fun.accept(new CacheEntryImpl<>(k, v));
+ m.put(k, v);
+ }
+
+ long before = System.currentTimeMillis();
+ cache.putAll(m);
+ System.out.println("PutAll took: " + (System.currentTimeMillis() - before));
+ });
+ }
+
+ /**
* <b>Currently fold supports only commutative operations.<b/>
*
* @param cacheName Cache name.
@@ -463,11 +541,11 @@ public class CacheUtils {
* @param folder Folder.
* @param keyFilter Key filter.
* @param accumulator Accumulator.
- * @param zeroVal Zero value.
+ * @param zeroValSupp Zero value supplier.
*/
public static <K, V, A> A distributedFold(String cacheName, IgniteBiFunction<Cache.Entry<K, V>, A, A> folder,
- IgnitePredicate<K> keyFilter, BinaryOperator<A> accumulator, A zeroVal) {
- return sparseFold(cacheName, folder, keyFilter, accumulator, zeroVal, null, null, 0,
+ IgnitePredicate<K> keyFilter, BinaryOperator<A> accumulator, IgniteSupplier<A> zeroValSupp) {
+ return sparseFold(cacheName, folder, keyFilter, accumulator, zeroValSupp, null, null, 0,
false);
}
@@ -478,17 +556,17 @@ public class CacheUtils {
* @param folder Folder.
* @param keyFilter Key filter.
* @param accumulator Accumulator.
- * @param zeroVal Zero value.
- * @param defVal Def value.
- * @param defKey Def key.
+ * @param zeroValSupp Zero value supplier.
+ * @param defVal Default value.
+ * @param defKey Default key.
* @param defValCnt Def value count.
* @param isNilpotent Is nilpotent.
*/
private static <K, V, A> A sparseFold(String cacheName, IgniteBiFunction<Cache.Entry<K, V>, A, A> folder,
- IgnitePredicate<K> keyFilter, BinaryOperator<A> accumulator, A zeroVal, V defVal, K defKey, long defValCnt,
- boolean isNilpotent) {
+ IgnitePredicate<K> keyFilter, BinaryOperator<A> accumulator, IgniteSupplier<A> zeroValSupp, V defVal, K defKey,
+ long defValCnt, boolean isNilpotent) {
- A defRes = zeroVal;
+ A defRes = zeroValSupp.get();
if (!isNilpotent)
for (int i = 0; i < defValCnt; i++)
@@ -504,7 +582,7 @@ public class CacheUtils {
Affinity affinity = ignite.affinity(cacheName);
ClusterNode locNode = ignite.cluster().localNode();
- A a = zeroVal;
+ A a = zeroValSupp.get();
// Iterate over all partitions. Some of them will be stored on that local node.
for (int part = 0; part < partsCnt; part++) {
@@ -519,16 +597,54 @@ public class CacheUtils {
return a;
});
- totalRes.add(defRes);
- return totalRes.stream().reduce(zeroVal, accumulator);
+ return totalRes.stream().reduce(defRes, accumulator);
+ }
+
+ public static <K, V, A, W> A reduce(String cacheName, Ignite ignite,
+ IgniteTriFunction<W, Cache.Entry<K, V>, A, A> acc,
+ IgniteSupplier<W> supp,
+ IgniteSupplier<Iterable<Cache.Entry<K, V>>> entriesGen, IgniteBinaryOperator<A> comb,
+ IgniteSupplier<A> zeroValSupp) {
+
+ A defRes = zeroValSupp.get();
+
+ Collection<A> totalRes = bcast(cacheName, ignite, () -> {
+ // Use affinity in filter for ScanQuery. Otherwise we accept consumer in each node which is wrong.
+ A a = zeroValSupp.get();
+
+ W w = supp.get();
+
+ for (Cache.Entry<K, V> kvEntry : entriesGen.get())
+ a = acc.apply(w, kvEntry, a);
+
+ return a;
+ });
+
+ return totalRes.stream().reduce(defRes, comb);
+ }
+
+ public static <K, V, A, W> A reduce(String cacheName, IgniteTriFunction<W, Cache.Entry<K, V>, A, A> acc,
+ IgniteSupplier<W> supp,
+ IgniteSupplier<Iterable<Cache.Entry<K, V>>> entriesGen, IgniteBinaryOperator<A> comb,
+ IgniteSupplier<A> zeroValSupp) {
+ return reduce(cacheName, Ignition.localIgnite(), acc, supp, entriesGen, comb, zeroValSupp);
}
/**
* @param cacheName Cache name.
* @param run {@link Runnable} to broadcast to cache nodes for given cache name.
*/
+ public static void bcast(String cacheName, Ignite ignite, IgniteRunnable run) {
+ ignite.compute(ignite.cluster().forDataNodes(cacheName)).broadcast(run);
+ }
+
+ /**
+ * Broadcast runnable to data nodes of given cache.
+ * @param cacheName Cache name.
+ * @param run Runnable.
+ */
public static void bcast(String cacheName, IgniteRunnable run) {
- ignite().compute(ignite().cluster().forCacheNodes(cacheName)).broadcast(run);
+ bcast(cacheName, ignite(), run);
}
/**
@@ -537,6 +653,18 @@ public class CacheUtils {
* @param <A> Type returned by the callable.
*/
public static <A> Collection<A> bcast(String cacheName, IgniteCallable<A> call) {
- return ignite().compute(ignite().cluster().forCacheNodes(cacheName)).broadcast(call);
+ return bcast(cacheName, ignite(), call);
+ }
+
+ /**
+ * Broadcast callable to data nodes of given cache.
+ * @param cacheName Cache name.
+ * @param ignite Ignite instance.
+ * @param call Callable to broadcast.
+ * @param <A> Type of callable result.
+ * @return Results of callable from each node.
+ */
+ public static <A> Collection<A> bcast(String cacheName, Ignite ignite, IgniteCallable<A> call) {
+ return ignite.compute(ignite.cluster().forDataNodes(cacheName)).broadcast(call);
}
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/MatrixCacheKey.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/MatrixCacheKey.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/MatrixCacheKey.java
index 669e9a4..0242560 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/MatrixCacheKey.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/MatrixCacheKey.java
@@ -17,7 +17,7 @@
package org.apache.ignite.ml.math.distributed.keys;
-import org.apache.ignite.lang.IgniteUuid;
+import java.util.UUID;
/**
* Base matrix cache key.
@@ -26,10 +26,10 @@ public interface MatrixCacheKey {
/**
* @return matrix id.
*/
- public IgniteUuid matrixId();
+ public UUID matrixId();
/**
* @return affinity key.
*/
- public IgniteUuid affinityKey();
+ public Object affinityKey();
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/impl/BlockMatrixKey.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/impl/BlockMatrixKey.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/impl/BlockMatrixKey.java
index 2edd9cb..cc8c488 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/impl/BlockMatrixKey.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/impl/BlockMatrixKey.java
@@ -21,6 +21,7 @@ import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
+import java.util.UUID;
import org.apache.ignite.binary.BinaryObjectException;
import org.apache.ignite.binary.BinaryRawReader;
import org.apache.ignite.binary.BinaryRawWriter;
@@ -47,7 +48,7 @@ public class BlockMatrixKey implements org.apache.ignite.ml.math.distributed.key
/** Block col ID */
private long blockIdCol;
/** Matrix ID */
- private IgniteUuid matrixUuid;
+ private UUID matrixUuid;
/** Block affinity key. */
private IgniteUuid affinityKey;
@@ -64,7 +65,7 @@ public class BlockMatrixKey implements org.apache.ignite.ml.math.distributed.key
* @param matrixUuid Matrix uuid.
* @param affinityKey Affinity key.
*/
- public BlockMatrixKey(long rowId, long colId, IgniteUuid matrixUuid, @Nullable IgniteUuid affinityKey) {
+ public BlockMatrixKey(long rowId, long colId, UUID matrixUuid, @Nullable IgniteUuid affinityKey) {
assert rowId >= 0;
assert colId >= 0;
assert matrixUuid != null;
@@ -86,7 +87,7 @@ public class BlockMatrixKey implements org.apache.ignite.ml.math.distributed.key
}
/** {@inheritDoc} */
- @Override public IgniteUuid matrixId() {
+ @Override public UUID matrixId() {
return matrixUuid;
}
@@ -97,7 +98,7 @@ public class BlockMatrixKey implements org.apache.ignite.ml.math.distributed.key
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
- U.writeGridUuid(out, matrixUuid);
+ out.writeObject(matrixUuid);
U.writeGridUuid(out, affinityKey);
out.writeLong(blockIdRow);
out.writeLong(blockIdCol);
@@ -105,7 +106,7 @@ public class BlockMatrixKey implements org.apache.ignite.ml.math.distributed.key
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
- matrixUuid = U.readGridUuid(in);
+ matrixUuid = (UUID)in.readObject();
affinityKey = U.readGridUuid(in);
blockIdRow = in.readLong();
blockIdCol = in.readLong();
@@ -115,7 +116,7 @@ public class BlockMatrixKey implements org.apache.ignite.ml.math.distributed.key
@Override public void writeBinary(BinaryWriter writer) throws BinaryObjectException {
BinaryRawWriter out = writer.rawWriter();
- BinaryUtils.writeIgniteUuid(out, matrixUuid);
+ out.writeUuid(matrixUuid);
BinaryUtils.writeIgniteUuid(out, affinityKey);
out.writeLong(blockIdRow);
out.writeLong(blockIdCol);
@@ -125,7 +126,7 @@ public class BlockMatrixKey implements org.apache.ignite.ml.math.distributed.key
@Override public void readBinary(BinaryReader reader) throws BinaryObjectException {
BinaryRawReader in = reader.rawReader();
- matrixUuid = BinaryUtils.readIgniteUuid(in);
+ matrixUuid = in.readUuid();
affinityKey = BinaryUtils.readIgniteUuid(in);
blockIdRow = in.readLong();
blockIdCol = in.readLong();
@@ -160,6 +161,4 @@ public class BlockMatrixKey implements org.apache.ignite.ml.math.distributed.key
@Override public String toString() {
return S.toString(BlockMatrixKey.class, this);
}
-
-
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/impl/SparseMatrixKey.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/impl/SparseMatrixKey.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/impl/SparseMatrixKey.java
index 0c34c8b..aa5e0ad 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/impl/SparseMatrixKey.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/keys/impl/SparseMatrixKey.java
@@ -21,30 +21,24 @@ import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
-import org.apache.ignite.binary.BinaryObjectException;
-import org.apache.ignite.binary.BinaryRawReader;
-import org.apache.ignite.binary.BinaryRawWriter;
-import org.apache.ignite.binary.BinaryReader;
-import org.apache.ignite.binary.BinaryWriter;
-import org.apache.ignite.binary.Binarylizable;
-import org.apache.ignite.internal.binary.BinaryUtils;
+import java.util.UUID;
+import org.apache.ignite.cache.affinity.AffinityKeyMapped;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.S;
-import org.apache.ignite.internal.util.typedef.internal.U;
-import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.ml.math.distributed.keys.RowColMatrixKey;
import org.apache.ignite.ml.math.impls.matrix.SparseDistributedMatrix;
/**
* Key implementation for {@link SparseDistributedMatrix}.
*/
-public class SparseMatrixKey implements RowColMatrixKey, Externalizable, Binarylizable {
+public class SparseMatrixKey implements RowColMatrixKey, Externalizable {
/** */
private int idx;
/** */
- private IgniteUuid matrixId;
+ private UUID matrixId;
/** */
- private IgniteUuid affinityKey;
+ @AffinityKeyMapped
+ private Object affinityKey;
/**
* Default constructor (required by Externalizable).
@@ -56,7 +50,7 @@ public class SparseMatrixKey implements RowColMatrixKey, Externalizable, Binaryl
/**
* Build Key.
*/
- public SparseMatrixKey(int idx, IgniteUuid matrixId, IgniteUuid affinityKey) {
+ public SparseMatrixKey(int idx, UUID matrixId, Object affinityKey) {
assert idx >= 0 : "Index must be positive.";
assert matrixId != null : "Matrix id can`t be null.";
@@ -71,54 +65,35 @@ public class SparseMatrixKey implements RowColMatrixKey, Externalizable, Binaryl
}
/** {@inheritDoc} */
- @Override public IgniteUuid matrixId() {
+ @Override public UUID matrixId() {
return matrixId;
}
/** {@inheritDoc} */
- @Override public IgniteUuid affinityKey() {
+ @Override public Object affinityKey() {
return affinityKey;
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
- U.writeGridUuid(out, matrixId);
- U.writeGridUuid(out, affinityKey);
+// U.writeGridUuid(out, matrixId);
+ out.writeObject(matrixId);
+ out.writeObject(affinityKey);
out.writeInt(idx);
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
- matrixId = U.readGridUuid(in);
- affinityKey = U.readGridUuid(in);
- idx = in.readInt();
- }
-
- /** {@inheritDoc} */
- @Override public void writeBinary(BinaryWriter writer) throws BinaryObjectException {
- BinaryRawWriter out = writer.rawWriter();
-
- BinaryUtils.writeIgniteUuid(out, matrixId);
- BinaryUtils.writeIgniteUuid(out, affinityKey);
- out.writeInt(idx);
- }
-
- /** {@inheritDoc} */
- @Override public void readBinary(BinaryReader reader) throws BinaryObjectException {
- BinaryRawReader in = reader.rawReader();
-
- matrixId = BinaryUtils.readIgniteUuid(in);
- affinityKey = BinaryUtils.readIgniteUuid(in);
+ matrixId = (UUID)in.readObject();
+ affinityKey = in.readObject();
idx = in.readInt();
}
/** {@inheritDoc} */
@Override public int hashCode() {
- int res = 1;
-
- res += res * 37 + matrixId.hashCode();
- res += res * 37 + idx;
-
+ int res = idx;
+ res = 31 * res + (matrixId != null ? matrixId.hashCode() : 0);
+ res = 31 * res + (affinityKey != null ? affinityKey.hashCode() : 0);
return res;
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/Functions.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/Functions.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/Functions.java
index 022dd04..0b4ad12 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/Functions.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/Functions.java
@@ -17,7 +17,9 @@
package org.apache.ignite.ml.math.functions;
+import java.util.Comparator;
import java.util.List;
+import java.util.function.BiFunction;
import org.apache.ignite.lang.IgniteBiTuple;
/**
@@ -75,6 +77,30 @@ public final class Functions {
/** Function that returns {@code max(abs(a), abs(b))}. */
public static final IgniteBiFunction<Double, Double, Double> MAX_ABS = (a, b) -> Math.max(Math.abs(a), Math.abs(b));
+ /**
+ * Generic 'max' function.
+ * @param a First object to compare.
+ * @param b Second object to compare.
+ * @param f Comparator.
+ * @param <T> Type of objects to compare.
+ * @return Maximum between {@code a} and {@code b} in terms of comparator {@code f}.
+ */
+ public static <T> T MAX_GENERIC(T a, T b, Comparator<T> f) {
+ return f.compare(a, b) > 0 ? a : b;
+ }
+
+ /**
+ * Generic 'min' function.
+ * @param a First object to compare.
+ * @param b Second object to compare.
+ * @param f Comparator.
+ * @param <T> Type of objects to compare.
+ * @return Minimum between {@code a} and {@code b} in terms of comparator {@code f}.
+ */
+ public static <T> T MIN_GENERIC(T a, T b, Comparator<T> f) {
+ return f.compare(a, b) > 0 ? a : b;
+ }
+
/** Function that returns {@code min(abs(a), abs(b))}. */
public static final IgniteBiFunction<Double, Double, Double> MIN_ABS = (a, b) -> Math.min(Math.abs(a), Math.abs(b));
@@ -185,4 +211,16 @@ public final class Functions {
return Math.pow(a, b);
};
}
+
+ /**
+ * Curry bifunction.
+ * @param f Bifunction to curry.
+ * @param <A> Type of first argument of {@code f}.
+ * @param <B> Type of second argument of {@code f}.
+ * @param <C> Return type of {@code f}.
+ * @return Curried bifunction.
+ */
+ public static <A, B, C> IgniteCurriedBiFunction<A, B, C> curry(BiFunction<A, B, C> f) {
+ return a -> b -> f.apply(a, b);
+ }
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteBinaryOperator.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteBinaryOperator.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteBinaryOperator.java
new file mode 100644
index 0000000..1170b67
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteBinaryOperator.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.math.functions;
+
+import java.io.Serializable;
+import java.util.function.BinaryOperator;
+
+/**
+ * Serializable binary operator.
+ *
+ * @see java.util.function.BinaryOperator
+ */
+public interface IgniteBinaryOperator<A> extends BinaryOperator<A>, Serializable {
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteCurriedBiFunction.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteCurriedBiFunction.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteCurriedBiFunction.java
new file mode 100644
index 0000000..3dd8490
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteCurriedBiFunction.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.math.functions;
+
+import java.io.Serializable;
+import java.util.function.BiFunction;
+
+/**
+ * Serializable binary function.
+ *
+ * @see BiFunction
+ */
+public interface IgniteCurriedBiFunction<A, B, T> extends IgniteFunction<A, IgniteFunction<B, T>>, Serializable {
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteSupplier.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteSupplier.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteSupplier.java
new file mode 100644
index 0000000..8c05b75
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteSupplier.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.math.functions;
+
+import java.io.Serializable;
+import java.util.function.Supplier;
+
+/**
+ * Serializable supplier.
+ *
+ * @see java.util.function.Consumer
+ */
+@FunctionalInterface
+public interface IgniteSupplier<T> extends Supplier<T>, Serializable {
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteToDoubleFunction.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteToDoubleFunction.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteToDoubleFunction.java
new file mode 100644
index 0000000..59a8bf3
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteToDoubleFunction.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.math.functions;
+
+import java.io.Serializable;
+import java.util.function.ToDoubleFunction;
+
+@FunctionalInterface
+public interface IgniteToDoubleFunction<T> extends ToDoubleFunction<T>, Serializable {
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseBlockDistributedMatrix.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseBlockDistributedMatrix.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseBlockDistributedMatrix.java
index 3d542bc..e829168 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseBlockDistributedMatrix.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseBlockDistributedMatrix.java
@@ -20,13 +20,13 @@ package org.apache.ignite.ml.math.impls.matrix;
import java.util.Collection;
import java.util.List;
import java.util.Map;
+import java.util.UUID;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.Ignition;
import org.apache.ignite.cache.affinity.Affinity;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.internal.util.lang.IgnitePair;
-import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.ml.math.Matrix;
import org.apache.ignite.ml.math.StorageConstants;
import org.apache.ignite.ml.math.Vector;
@@ -190,7 +190,7 @@ public class SparseBlockDistributedMatrix extends AbstractMatrix implements Stor
}
/** */
- private IgniteUuid getUUID() {
+ private UUID getUUID() {
return ((BlockMatrixStorage)getStorage()).getUUID();
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseDistributedMatrix.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseDistributedMatrix.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseDistributedMatrix.java
index 9a18f8b..594aebc 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseDistributedMatrix.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/matrix/SparseDistributedMatrix.java
@@ -19,6 +19,7 @@ package org.apache.ignite.ml.math.impls.matrix;
import java.util.Collection;
import java.util.Map;
+import java.util.UUID;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.Ignition;
@@ -211,7 +212,7 @@ public class SparseDistributedMatrix extends AbstractMatrix implements StorageCo
}
/** */
- public IgniteUuid getUUID() {
+ public UUID getUUID() {
return ((SparseDistributedMatrixStorage)getStorage()).getUUID();
}
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/BlockMatrixStorage.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/BlockMatrixStorage.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/BlockMatrixStorage.java
index 0d5cf0a..cd76e5a 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/BlockMatrixStorage.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/BlockMatrixStorage.java
@@ -24,6 +24,7 @@ import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
+import java.util.UUID;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.Ignition;
import org.apache.ignite.cache.CacheAtomicityMode;
@@ -32,7 +33,6 @@ import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.internal.util.lang.IgnitePair;
-import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.ml.math.MatrixStorage;
import org.apache.ignite.ml.math.StorageConstants;
@@ -59,7 +59,7 @@ public class BlockMatrixStorage extends CacheUtils implements MatrixStorage, Sto
/** Amount of columns in the matrix. */
private int cols;
/** Matrix uuid. */
- private IgniteUuid uuid;
+ private UUID uuid;
/** Block size about 8 KB of data. */
private int maxBlockEdge = MAX_BLOCK_SIZE;
@@ -92,7 +92,7 @@ public class BlockMatrixStorage extends CacheUtils implements MatrixStorage, Sto
cache = newCache();
- uuid = IgniteUuid.randomUuid();
+ uuid = UUID.randomUUID();
}
/**
@@ -152,7 +152,7 @@ public class BlockMatrixStorage extends CacheUtils implements MatrixStorage, Sto
out.writeInt(cols);
out.writeInt(blocksInRow);
out.writeInt(blocksInCol);
- U.writeGridUuid(out, uuid);
+ out.writeObject(uuid);
out.writeUTF(cache.getName());
}
@@ -162,7 +162,7 @@ public class BlockMatrixStorage extends CacheUtils implements MatrixStorage, Sto
cols = in.readInt();
blocksInRow = in.readInt();
blocksInCol = in.readInt();
- uuid = U.readGridUuid(in);
+ uuid = (UUID)in.readObject();
cache = ignite().getOrCreateCache(in.readUTF());
}
@@ -201,7 +201,7 @@ public class BlockMatrixStorage extends CacheUtils implements MatrixStorage, Sto
*
* @return storage UUID.
*/
- public IgniteUuid getUUID() {
+ public UUID getUUID() {
return uuid;
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/SparseDistributedMatrixStorage.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/SparseDistributedMatrixStorage.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/SparseDistributedMatrixStorage.java
index 95852b7..c40e73d 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/SparseDistributedMatrixStorage.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/SparseDistributedMatrixStorage.java
@@ -24,6 +24,7 @@ import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.Map;
import java.util.Set;
+import java.util.UUID;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.apache.ignite.IgniteCache;
@@ -33,7 +34,6 @@ import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.ml.math.MatrixStorage;
import org.apache.ignite.ml.math.StorageConstants;
import org.apache.ignite.ml.math.distributed.CacheUtils;
@@ -57,7 +57,7 @@ public class SparseDistributedMatrixStorage extends CacheUtils implements Matrix
/** Random or sequential access mode. */
private int acsMode;
/** Matrix uuid. */
- private IgniteUuid uuid;
+ private UUID uuid;
/** Actual distributed storage. */
private IgniteCache<
@@ -91,7 +91,7 @@ public class SparseDistributedMatrixStorage extends CacheUtils implements Matrix
cache = newCache();
- uuid = IgniteUuid.randomUuid();
+ uuid = UUID.randomUUID();
}
/**
@@ -115,6 +115,9 @@ public class SparseDistributedMatrixStorage extends CacheUtils implements Matrix
// Cache is partitioned.
cfg.setCacheMode(CacheMode.PARTITIONED);
+ // TODO: Possibly we should add a fix of https://issues.apache.org/jira/browse/IGNITE-6862 here commented below.
+ // cfg.setReadFromBackup(false);
+
// Random cache name.
cfg.setName(CACHE_NAME);
@@ -205,7 +208,7 @@ public class SparseDistributedMatrixStorage extends CacheUtils implements Matrix
/** Build cache key for row/column. */
public RowColMatrixKey getCacheKey(int idx) {
- return new SparseMatrixKey(idx, uuid, null);
+ return new SparseMatrixKey(idx, uuid, idx);
}
/** {@inheritDoc} */
@@ -239,7 +242,7 @@ public class SparseDistributedMatrixStorage extends CacheUtils implements Matrix
cols = in.readInt();
acsMode = in.readInt();
stoMode = in.readInt();
- uuid = (IgniteUuid)in.readObject();
+ uuid = (UUID)in.readObject();
cache = ignite().getOrCreateCache(in.readUTF());
}
@@ -304,7 +307,7 @@ public class SparseDistributedMatrixStorage extends CacheUtils implements Matrix
}
/** */
- public IgniteUuid getUUID() {
+ public UUID getUUID() {
return uuid;
}
@@ -312,7 +315,7 @@ public class SparseDistributedMatrixStorage extends CacheUtils implements Matrix
@Override public Set<RowColMatrixKey> getAllKeys() {
int range = stoMode == ROW_STORAGE_MODE ? rows : cols;
- return IntStream.range(0, range).mapToObj(i -> new SparseMatrixKey(i, getUUID(), null)).collect(Collectors.toSet());
+ return IntStream.range(0, range).mapToObj(i -> new SparseMatrixKey(i, getUUID(), i)).collect(Collectors.toSet());
}
/** {@inheritDoc} */
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledVector.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledVector.java b/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledVector.java
new file mode 100644
index 0000000..51b973a
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledVector.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.structures;
+
+import org.apache.ignite.ml.math.Vector;
+
+/**
+ * Class for vector with label.
+ *
+ * @param <V> Some class extending {@link Vector}.
+ * @param <T> Type of label.
+ */
+public class LabeledVector<V extends Vector, T> {
+ /** Vector. */
+ private final V vector;
+
+ /** Label. */
+ private final T lb;
+
+ /**
+ * Construct labeled vector.
+ *
+ * @param vector Vector.
+ * @param lb Label.
+ */
+ public LabeledVector(V vector, T lb) {
+ this.vector = vector;
+ this.lb = lb;
+ }
+
+ /**
+ * Get the vector.
+ *
+ * @return Vector.
+ */
+ public V vector() {
+ return vector;
+ }
+
+ /**
+ * Get the label.
+ *
+ * @return Label.
+ */
+ public T label() {
+ return lb;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledVectorDouble.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledVectorDouble.java b/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledVectorDouble.java
new file mode 100644
index 0000000..4ef9eae
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledVectorDouble.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.structures;
+
+import org.apache.ignite.ml.math.Vector;
+
+/**
+ * Labeled vector specialized to double label.
+ *
+ * @param <V> Type of vector.
+ */
+public class LabeledVectorDouble<V extends Vector> extends LabeledVector<V, Double> {
+ /**
+ * Construct LabeledVectorDouble.
+ *
+ * @param vector Vector.
+ * @param lb Label.
+ */
+ public LabeledVectorDouble(V vector, Double lb) {
+ super(vector, lb);
+ }
+
+ /**
+ * Get label as double.
+ *
+ * @return label as double.
+ */
+ public double doubleLabel() {
+ return label();
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/structures/package-info.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/structures/package-info.java b/modules/ml/src/main/java/org/apache/ignite/ml/structures/package-info.java
new file mode 100644
index 0000000..ec9d79e
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/structures/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains some utility structures.
+ */
+package org.apache.ignite.ml.structures;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/CategoricalRegionInfo.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/CategoricalRegionInfo.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/CategoricalRegionInfo.java
new file mode 100644
index 0000000..3ae474e
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/CategoricalRegionInfo.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.util.BitSet;
+
+/**
+ * Information about categorical region.
+ */
+public class CategoricalRegionInfo extends RegionInfo implements Externalizable {
+ /**
+ * Bitset representing categories of this region.
+ */
+ private BitSet cats;
+
+ /**
+ * @param impurity Impurity of region.
+ * @param cats Bitset representing categories of this region.
+ */
+ public CategoricalRegionInfo(double impurity, BitSet cats) {
+ super(impurity);
+
+ this.cats = cats;
+ }
+
+ /**
+ * No-op constructor for serialization/deserialization.
+ */
+ public CategoricalRegionInfo() {
+ // No-op
+ }
+
+ /**
+ * Get bitset representing categories of this region.
+ *
+ * @return Bitset representing categories of this region.
+ */
+ public BitSet cats() {
+ return cats;
+ }
+
+ /** {@inheritDoc} */
+ @Override public void writeExternal(ObjectOutput out) throws IOException {
+ super.writeExternal(out);
+ out.writeObject(cats);
+ }
+
+ /** {@inheritDoc} */
+ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
+ super.readExternal(in);
+ cats = (BitSet)in.readObject();
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/db7697b1/modules/ml/src/main/java/org/apache/ignite/ml/trees/CategoricalSplitInfo.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/trees/CategoricalSplitInfo.java b/modules/ml/src/main/java/org/apache/ignite/ml/trees/CategoricalSplitInfo.java
new file mode 100644
index 0000000..94cb1e8
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/trees/CategoricalSplitInfo.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.trees;
+
+import java.util.BitSet;
+import org.apache.ignite.ml.trees.nodes.CategoricalSplitNode;
+import org.apache.ignite.ml.trees.nodes.SplitNode;
+import org.apache.ignite.ml.trees.trainers.columnbased.vectors.SplitInfo;
+
+/**
+ * Information about split of categorical feature.
+ *
+ * @param <D> Class representing information of left and right subregions.
+ */
+public class CategoricalSplitInfo<D extends RegionInfo> extends SplitInfo<D> {
+ /** Bitset indicating which vectors are assigned to left subregion. */
+ private final BitSet bs;
+
+ /**
+ * @param regionIdx Index of region which is split.
+ * @param leftData Data of left subregion.
+ * @param rightData Data of right subregion.
+ * @param bs Bitset indicating which vectors are assigned to left subregion.
+ */
+ public CategoricalSplitInfo(int regionIdx, D leftData, D rightData,
+ BitSet bs) {
+ super(regionIdx, leftData, rightData);
+ this.bs = bs;
+ }
+
+ /** {@inheritDoc} */
+ @Override public SplitNode createSplitNode(int featureIdx) {
+ return new CategoricalSplitNode(featureIdx, bs);
+ }
+
+ /**
+ * Get bitset indicating which vectors are assigned to left subregion.
+ */
+ public BitSet bitSet() {
+ return bs;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String toString() {
+ return "CategoricalSplitInfo [" +
+ "infoGain=" + infoGain +
+ ", regionIdx=" + regionIdx +
+ ", leftData=" + leftData +
+ ", bs=" + bs +
+ ", rightData=" + rightData +
+ ']';
+ }
+}
[06/28] ignite git commit: IGNITE-6762: Fixed
SparseDistributedMatrixExample failed with NPE. This closes #3003
Posted by sb...@apache.org.
IGNITE-6762: Fixed SparseDistributedMatrixExample failed with NPE.
This closes #3003
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/c939bdba
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/c939bdba
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/c939bdba
Branch: refs/heads/ignite-zk
Commit: c939bdba3a159d1ccb080686796b50a03ac77c9f
Parents: 8195ba5
Author: Yury Babak <yb...@gridgain.com>
Authored: Thu Nov 9 16:45:49 2017 +0300
Committer: Igor Sapego <is...@gridgain.com>
Committed: Thu Nov 9 16:45:49 2017 +0300
----------------------------------------------------------------------
.../apache/ignite/ml/math/distributed/CacheUtils.java | 14 +++++---------
.../ignite/ml/math/impls/vector/CacheVectorTest.java | 10 ++--------
.../ml/math/impls/vector/VectorToMatrixTest.java | 3 ---
3 files changed, 7 insertions(+), 20 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/c939bdba/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/CacheUtils.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/CacheUtils.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/CacheUtils.java
index 9a73c5a..8c8bba7 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/CacheUtils.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/distributed/CacheUtils.java
@@ -20,6 +20,7 @@ package org.apache.ignite.ml.math.distributed;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
+import java.util.Objects;
import java.util.function.BinaryOperator;
import javax.cache.Cache;
import org.apache.ignite.Ignite;
@@ -73,7 +74,6 @@ public class CacheUtils {
/**
*
- *
*/
public Cache.Entry<K, V> entry() {
return entry;
@@ -81,7 +81,6 @@ public class CacheUtils {
/**
*
- *
*/
public IgniteCache<K, V> cache() {
return cache;
@@ -165,12 +164,8 @@ public class CacheUtils {
* @return Sum of the values.
*/
private static double sum(Collection<Double> c) {
- double sum = 0.0;
-
- for (double d : c)
- sum += d;
-
- return sum;
+ // Fix for IGNITE-6762, some collections could store null values.
+ return c.stream().filter(Objects::nonNull).mapToDouble(Double::doubleValue).sum();
}
/**
@@ -401,7 +396,8 @@ public class CacheUtils {
// Iterate over given partition.
// Query returns an empty cursor if this partition is not stored on this node.
- for (Cache.Entry<K, V> entry : cache.query(new ScanQuery<K, V>(part, (k, v) -> affinity.mapPartitionToNode(p) == locNode && (keyFilter == null || keyFilter.apply(k)))))
+ for (Cache.Entry<K, V> entry : cache.query(new ScanQuery<K, V>(part,
+ (k, v) -> affinity.mapPartitionToNode(p) == locNode && (keyFilter == null || keyFilter.apply(k)))))
fun.accept(new CacheEntry<>(entry, cache));
}
});
http://git-wip-us.apache.org/repos/asf/ignite/blob/c939bdba/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/vector/CacheVectorTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/vector/CacheVectorTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/vector/CacheVectorTest.java
index a6cdd4c..1008cc2 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/vector/CacheVectorTest.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/vector/CacheVectorTest.java
@@ -143,19 +143,13 @@ public class CacheVectorTest extends GridCommonAbstractTest {
}
/** */
- public void testSumNegative() {
+ public void testSumEmptyVector() {
IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
IdentityValueMapper valMapper = new IdentityValueMapper();
CacheVector<Integer, Double> cacheVector = new CacheVector<>(size, getCache(), keyMapper, valMapper);
- try {
- double d = cacheVector.sum();
- fail();
- }
- catch (NullPointerException e) {
- // No-op.
- }
+ cacheVector.sum();
}
/** */
http://git-wip-us.apache.org/repos/asf/ignite/blob/c939bdba/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/vector/VectorToMatrixTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/vector/VectorToMatrixTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/vector/VectorToMatrixTest.java
index 98230c3..a003dcf 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/vector/VectorToMatrixTest.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/math/impls/vector/VectorToMatrixTest.java
@@ -188,9 +188,6 @@ public class VectorToMatrixTest {
/** */
private void assertCross(Vector v1, Vector v2, String desc) {
- if (true) // TODO: IGNITE-5777, wait BLAS integration.
- return;
-
assertNotNull(v1);
assertNotNull(v2);
[07/28] ignite git commit: ignite-6669 Do not call
CacheStoreSessionListener if store operation is not executed
Posted by sb...@apache.org.
ignite-6669 Do not call CacheStoreSessionListener if store operation is not executed
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/b8672d7d
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/b8672d7d
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/b8672d7d
Branch: refs/heads/ignite-zk
Commit: b8672d7d691981be3c10f74e97ae2caa5ddd1593
Parents: c939bdb
Author: Slava Koptilin <sl...@gmail.com>
Authored: Thu Nov 9 18:10:31 2017 +0300
Committer: sboikov <sb...@gridgain.com>
Committed: Thu Nov 9 18:10:31 2017 +0300
----------------------------------------------------------------------
.../store/GridCacheStoreManagerAdapter.java | 94 +++++-
...oreListenerRWThroughDisabledAtomicCache.java | 33 ++
...enerRWThroughDisabledTransactionalCache.java | 138 +++++++++
...SessionListenerReadWriteThroughDisabled.java | 291 ++++++++++++++++++
...eStoreSessionListenerWriteBehindEnabled.java | 304 +++++++++++++++++++
.../testsuites/IgniteCacheTestSuite4.java | 8 +-
.../Cache/Store/CacheStoreSessionTest.cs | 13 +-
7 files changed, 856 insertions(+), 25 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/b8672d7d/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/store/GridCacheStoreManagerAdapter.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/store/GridCacheStoreManagerAdapter.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/store/GridCacheStoreManagerAdapter.java
index 9fe1f0c..22c2381 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/store/GridCacheStoreManagerAdapter.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/store/GridCacheStoreManagerAdapter.java
@@ -106,6 +106,9 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
private boolean writeThrough;
/** */
+ private boolean readThrough;
+
+ /** */
private Collection<CacheStoreSessionListener> sesLsnrs;
/** */
@@ -122,6 +125,8 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
writeThrough = cfg.isWriteThrough();
+ readThrough = cfg.isReadThrough();
+
this.cfgStore = cfgStore;
store = cacheStoreWrapper(ctx, cfgStore, cfg);
@@ -306,7 +311,7 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
log.debug(S.toString("Loading value from store for key",
"key", storeKey, true));
- sessionInit0(tx);
+ sessionInit0(tx, StoreOperation.READ, false);
boolean threwEx = true;
@@ -442,7 +447,7 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
if (log.isDebugEnabled())
log.debug("Loading values from store for keys: " + keys0);
- sessionInit0(tx);
+ sessionInit0(tx, StoreOperation.READ, false);
boolean threwEx = true;
@@ -501,7 +506,7 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
if (log.isDebugEnabled())
log.debug("Loading all values from store.");
- sessionInit0(null);
+ sessionInit0(null, StoreOperation.READ, false);
boolean threwEx = true;
@@ -567,7 +572,7 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
"val", val0, true));
}
- sessionInit0(tx);
+ sessionInit0(tx, StoreOperation.WRITE, false);
boolean threwEx = true;
@@ -622,7 +627,7 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
if (log.isDebugEnabled())
log.debug("Storing values in cache store [entries=" + entries + ']');
- sessionInit0(tx);
+ sessionInit0(tx, StoreOperation.WRITE, false);
boolean threwEx = true;
@@ -675,7 +680,7 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
if (log.isDebugEnabled())
log.debug(S.toString("Removing value from cache store", "key", key0, true));
- sessionInit0(tx);
+ sessionInit0(tx, StoreOperation.WRITE, false);
boolean threwEx = true;
@@ -727,7 +732,7 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
log.debug(S.toString("Removing values from cache store",
"keys", keys0, true));
- sessionInit0(tx);
+ sessionInit0(tx, StoreOperation.WRITE, false);
boolean threwEx = true;
@@ -778,10 +783,10 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
boolean storeSessionEnded) throws IgniteCheckedException {
assert store != null;
- sessionInit0(tx);
+ sessionInit0(tx, commit? StoreOperation.COMMIT: StoreOperation.ROLLBACK, false);
try {
- if (sesLsnrs != null) {
+ if (sesLsnrs != null && sesHolder.get().contains(store)) {
for (CacheStoreSessionListener lsnr : sesLsnrs)
lsnr.onSessionEnd(locSes, commit);
}
@@ -820,7 +825,7 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
/** {@inheritDoc} */
@Override public void writeBehindSessionInit() throws IgniteCheckedException {
- sessionInit0(null);
+ sessionInit0(null, null, true);
}
/** {@inheritDoc} */
@@ -830,9 +835,12 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
/**
* @param tx Current transaction.
+ * @param op Store operation.
+ * @param writeBehindStoreInitiator {@code true} if method call is initiated by {@link GridCacheWriteBehindStore}.
* @throws IgniteCheckedException If failed.
*/
- private void sessionInit0(@Nullable IgniteInternalTx tx) throws IgniteCheckedException {
+ private void sessionInit0(@Nullable IgniteInternalTx tx, @Nullable StoreOperation op,
+ boolean writeBehindStoreInitiator) throws IgniteCheckedException {
assert sesHolder != null;
SessionData ses;
@@ -854,8 +862,45 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
sesHolder.set(ses);
+ notifyCacheStoreSessionListeners(ses, op, writeBehindStoreInitiator);
+ }
+
+ /**
+ * @param ses Current session.
+ * @param op Store operation.
+ * @param writeBehindStoreInitiator {@code True} if method call is initiated by {@link GridCacheWriteBehindStore}.
+ * @throws IgniteCheckedException If failed.
+ */
+ private void notifyCacheStoreSessionListeners(SessionData ses, @Nullable StoreOperation op,
+ boolean writeBehindStoreInitiator) throws IgniteCheckedException {
try {
- if (!ses.started(store) && sesLsnrs != null) {
+ boolean notifyLsnrs = false;
+
+ if (writeBehindStoreInitiator)
+ notifyLsnrs = !ses.started(store) && sesLsnrs != null;
+ else {
+ assert op != null;
+
+ switch (op) {
+ case READ:
+ notifyLsnrs = readThrough && !ses.started(store) && sesLsnrs != null;
+ break;
+
+ case WRITE:
+ notifyLsnrs = !cacheConfiguration().isWriteBehindEnabled() && writeThrough
+ && !ses.started(store) && sesLsnrs != null;
+ break;
+
+ case COMMIT:
+ case ROLLBACK:
+ // No needs to start the session (if not started yet) and notify listeners.
+ break;
+
+ default:
+ assert false : "Unexpected operation: " + op.toString();
+ }
+ }
+ if (notifyLsnrs) {
for (CacheStoreSessionListener lsnr : sesLsnrs)
lsnr.onSessionStart(locSes);
}
@@ -871,7 +916,7 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
private void sessionEnd0(@Nullable IgniteInternalTx tx, boolean threwEx) throws IgniteCheckedException {
try {
if (tx == null) {
- if (sesLsnrs != null) {
+ if (sesLsnrs != null && sesHolder.get().contains(store)) {
for (CacheStoreSessionListener lsnr : sesLsnrs)
lsnr.onSessionEnd(locSes, !threwEx);
}
@@ -995,6 +1040,14 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
return !started.remove(store);
}
+ /**
+ * @param store Cache store.
+ * @return {@code True} if session started.
+ */
+ private boolean contains(CacheStore store) {
+ return started.contains(store);
+ }
+
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(SessionData.class, this, "tx", CU.txString(tx != null ? tx.tx : null));
@@ -1429,4 +1482,19 @@ public abstract class GridCacheStoreManagerAdapter extends GridCacheManagerAdapt
throw new UnsupportedOperationException();
}
}
+
+ /** Enumeration that represents possible operations on the underlying store. */
+ private enum StoreOperation {
+ /** Read key-value pair from the underlying store. */
+ READ,
+
+ /** Update or remove key from the underlying store. */
+ WRITE,
+
+ /** Commit changes to the underlying store. */
+ COMMIT,
+
+ /** Rollback changes to the underlying store. */
+ ROLLBACK
+ }
}
http://git-wip-us.apache.org/repos/asf/ignite/blob/b8672d7d/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledAtomicCache.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledAtomicCache.java b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledAtomicCache.java
new file mode 100644
index 0000000..9b59940
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledAtomicCache.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store;
+
+import org.apache.ignite.cache.CacheAtomicityMode;
+
+import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC;
+
+/**
+ * This class tests that redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * and {@link CacheStoreSessionListener#onSessionEnd(CacheStoreSession, boolean)} are not executed.
+ */
+public class CacheStoreListenerRWThroughDisabledAtomicCache extends CacheStoreSessionListenerReadWriteThroughDisabled {
+ /** {@inheritDoc} */
+ @Override protected CacheAtomicityMode atomicityMode() {
+ return ATOMIC;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/b8672d7d/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledTransactionalCache.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledTransactionalCache.java b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledTransactionalCache.java
new file mode 100644
index 0000000..6502c97
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreListenerRWThroughDisabledTransactionalCache.java
@@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store;
+
+import java.util.Random;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.cache.CacheAtomicityMode;
+import org.apache.ignite.transactions.Transaction;
+import org.apache.ignite.transactions.TransactionConcurrency;
+import org.apache.ignite.transactions.TransactionIsolation;
+
+import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
+import static org.apache.ignite.transactions.TransactionConcurrency.OPTIMISTIC;
+import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC;
+import static org.apache.ignite.transactions.TransactionIsolation.READ_COMMITTED;
+import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ;
+import static org.apache.ignite.transactions.TransactionIsolation.SERIALIZABLE;
+
+/**
+ * This class tests that redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * and {@link CacheStoreSessionListener#onSessionEnd(CacheStoreSession, boolean)} are not executed.
+ */
+public class CacheStoreListenerRWThroughDisabledTransactionalCache extends CacheStoreSessionListenerReadWriteThroughDisabled {
+ /** {@inheritDoc} */
+ @Override protected CacheAtomicityMode atomicityMode() {
+ return TRANSACTIONAL;
+ }
+
+ /**
+ * Tests {@link IgniteCache#get(Object)} with disabled read-through and write-through modes.
+ */
+ public void testTransactionalLookup() {
+ testTransactionalLookup(OPTIMISTIC, READ_COMMITTED);
+ testTransactionalLookup(OPTIMISTIC, REPEATABLE_READ);
+ testTransactionalLookup(OPTIMISTIC, SERIALIZABLE);
+
+ testTransactionalLookup(PESSIMISTIC, READ_COMMITTED);
+ testTransactionalLookup(PESSIMISTIC, REPEATABLE_READ);
+ testTransactionalLookup(PESSIMISTIC, SERIALIZABLE);
+ }
+
+ /**
+ * @param concurrency Transaction concurrency level.
+ * @param isolation Transaction isolation level.
+ */
+ private void testTransactionalLookup(TransactionConcurrency concurrency, TransactionIsolation isolation) {
+ IgniteCache cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ try (Transaction tx = grid(0).transactions().txStart(concurrency, isolation)) {
+ for (int i = 0; i < CNT; ++i)
+ cache.get(r.nextInt());
+
+ tx.commit();
+ }
+ }
+
+ /**
+ * Tests {@link IgniteCache#put(Object, Object)} with disabled read-through and write-through modes.
+ */
+ public void testTransactionalUpdate() {
+ testTransactionalUpdate(OPTIMISTIC, READ_COMMITTED);
+ testTransactionalUpdate(OPTIMISTIC, REPEATABLE_READ);
+ testTransactionalUpdate(OPTIMISTIC, SERIALIZABLE);
+
+ testTransactionalUpdate(PESSIMISTIC, READ_COMMITTED);
+ testTransactionalUpdate(PESSIMISTIC, REPEATABLE_READ);
+ testTransactionalUpdate(PESSIMISTIC, SERIALIZABLE);
+ }
+
+ /**
+ * @param concurrency Transaction concurrency level.
+ * @param isolation Transaction isolation level.
+ */
+ private void testTransactionalUpdate(TransactionConcurrency concurrency, TransactionIsolation isolation) {
+ IgniteCache cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ try (Transaction tx = grid(0).transactions().txStart(concurrency, isolation)) {
+ for (int i = 0; i < CNT; ++i)
+ cache.put(r.nextInt(), "test-value");
+
+ tx.commit();
+ }
+ }
+
+ /**
+ * Tests {@link IgniteCache#remove(Object)} with disabled read-through and write-through modes.
+ */
+ public void testTransactionalRemove() {
+ testTransactionalRemove(OPTIMISTIC, READ_COMMITTED);
+ testTransactionalRemove(OPTIMISTIC, REPEATABLE_READ);
+ testTransactionalRemove(OPTIMISTIC, SERIALIZABLE);
+
+ testTransactionalRemove(PESSIMISTIC, READ_COMMITTED);
+ testTransactionalRemove(PESSIMISTIC, REPEATABLE_READ);
+ testTransactionalRemove(PESSIMISTIC, SERIALIZABLE);
+ }
+
+ /**
+ * @param concurrency Transaction concurrency level.
+ * @param isolation Transaction isolation level.
+ */
+ private void testTransactionalRemove(TransactionConcurrency concurrency, TransactionIsolation isolation) {
+ IgniteCache cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ try (Transaction tx = grid(0).transactions().txStart(concurrency, isolation)) {
+ for (int i = 0; i < CNT; ++i) {
+ int key = r.nextInt();
+
+ cache.put(key, "test-value");
+
+ cache.remove(key, "test-value");
+ }
+
+ tx.commit();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/b8672d7d/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerReadWriteThroughDisabled.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerReadWriteThroughDisabled.java b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerReadWriteThroughDisabled.java
new file mode 100644
index 0000000..1f6e97d
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerReadWriteThroughDisabled.java
@@ -0,0 +1,291 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store;
+
+import java.io.PrintWriter;
+import java.io.Serializable;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.logging.Logger;
+import javax.cache.Cache;
+import javax.cache.configuration.Factory;
+import javax.cache.configuration.FactoryBuilder;
+import javax.cache.integration.CacheLoaderException;
+import javax.cache.integration.CacheWriterException;
+import javax.sql.DataSource;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.cache.store.jdbc.CacheJdbcStoreSessionListener;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.configuration.NearCacheConfiguration;
+import org.apache.ignite.internal.processors.cache.GridCacheAbstractSelfTest;
+
+/**
+ * This class tests that redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * and {@link CacheStoreSessionListener#onSessionEnd(CacheStoreSession, boolean)} are not executed.
+ */
+public abstract class CacheStoreSessionListenerReadWriteThroughDisabled extends GridCacheAbstractSelfTest {
+ /** {@inheritDoc} */
+ protected int gridCount() {
+ return 2;
+ }
+
+ /** */
+ protected final int CNT = 100;
+
+ /** {@inheritDoc} */
+ protected CacheConfiguration cacheConfiguration(String igniteInstanceName) throws Exception {
+ CacheConfiguration cacheCfg = super.cacheConfiguration(igniteInstanceName);
+
+ cacheCfg.setCacheStoreFactory(FactoryBuilder.factoryOf(EmptyCacheStore.class));
+
+ cacheCfg.setCacheStoreSessionListenerFactories(new CacheStoreSessionFactory());
+
+ cacheCfg.setReadThrough(false);
+ cacheCfg.setWriteThrough(false);
+
+ cacheCfg.setBackups(0);
+
+ return cacheCfg;
+ }
+
+ /** {@inheritDoc} */
+ protected NearCacheConfiguration nearConfiguration() {
+ return null;
+ }
+
+ /**
+ * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
+ * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#get(Object)} performed.
+ *
+ * @throws Exception If failed.
+ */
+ public void testLookup() throws Exception {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ for (int i = 0; i < CNT; ++i)
+ cache.get(r.nextInt());
+ }
+
+ /**
+ * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
+ * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#getAll(Set)} performed.
+ *
+ * @throws Exception If failed.
+ */
+ public void testBatchLookup() throws Exception {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ Set<Object> values = new HashSet<>();
+
+ for (int i = 0; i < CNT; ++i)
+ values.add(r.nextInt());
+
+ cache.getAll(values);
+ }
+
+ /**
+ * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
+ * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#put(Object, Object)} performed.
+ *
+ * @throws Exception If failed.
+ */
+ public void testUpdate() throws Exception {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ for (int i = 0; i < CNT; ++i)
+ cache.put(r.nextInt(), "test-value");
+ }
+
+ /**
+ * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
+ * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#putAll(Map)} performed.
+ *
+ * @throws Exception If failed.
+ */
+ public void testBatchUpdate() throws Exception {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ Map<Object, Object> values = new TreeMap<>();
+
+ for (int i = 0; i < CNT; ++i)
+ values.put(r.nextInt(), "test-value");
+
+ cache.putAll(values);
+ }
+
+ /**
+ * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
+ * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#remove(Object)} performed.
+ *
+ * @throws Exception If failed.
+ */
+ public void testRemove() throws Exception {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ for (int i = 0; i < CNT; ++i) {
+ int key = r.nextInt();
+
+ cache.put(key, "test-value");
+
+ cache.remove(key);
+ }
+ }
+
+ /**
+ * Tests that there are no calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)} and
+ * {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#removeAll(Set)} performed.
+ *
+ * @throws Exception If failed.
+ */
+ public void testBatchRemove() throws Exception {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ Random r = new Random();
+
+ Set<Object> values = new HashSet<>();
+
+ for (int i = 0; i < CNT; ++i) {
+ int key = r.nextInt();
+
+ cache.put(key, "test-value");
+
+ values.add(key);
+ }
+
+ cache.removeAll(values);
+ }
+
+ /**
+ * Cache store session factory.
+ */
+ public static class CacheStoreSessionFactory implements Factory<TestCacheStoreSessionListener> {
+ /** {@inheritDoc} */
+ @Override public TestCacheStoreSessionListener create() {
+ TestCacheStoreSessionListener lsnr = new TestCacheStoreSessionListener();
+ lsnr.setDataSource(new DataSourceStub());
+ return lsnr;
+ }
+ }
+
+ /**
+ * Test cache store session listener.
+ */
+ public static class TestCacheStoreSessionListener extends CacheJdbcStoreSessionListener {
+ /** {@inheritDoc} */
+ @Override public void onSessionStart(CacheStoreSession ses) {
+ fail("TestCacheStoreSessionListener.onSessionStart(CacheStoreSession) should not be called.");
+ }
+
+ /** {@inheritDoc} */
+ @Override public void onSessionEnd(CacheStoreSession ses, boolean commit) {
+ fail("TestCacheStoreSessionListener.onSessionEnd(CacheStoreSession, boolean) should not be called.");
+ }
+ }
+
+ /** Empty cache store implementation. All overridden methods should not be called while the test is running. */
+ public static class EmptyCacheStore extends CacheStoreAdapter {
+ /** {@inheritDoc} */
+ @Override public Object load(Object key) throws CacheLoaderException {
+ fail("EmptyCacheStore.load(Object) should not be called.");
+
+ return null;
+ }
+
+ /** {@inheritDoc} */
+ @Override public void write(Cache.Entry entry) throws CacheWriterException {
+ fail("EmptyCacheStore.write(Cache.Entry) should not be called.");
+ }
+
+ /** {@inheritDoc} */
+ @Override public void delete(Object key) throws CacheWriterException {
+ fail("EmptyCacheStore.delete(Object) should not be called.");
+ }
+ }
+
+ /**
+ * Data source stub which should not be called.
+ */
+ public static class DataSourceStub implements DataSource, Serializable {
+ /** {@inheritDoc} */
+ @Override public Connection getConnection() throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public Connection getConnection(String username, String password) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public <T> T unwrap(Class<T> iface) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean isWrapperFor(Class<?> iface) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public PrintWriter getLogWriter() throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public void setLogWriter(PrintWriter out) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public void setLoginTimeout(int seconds) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public int getLoginTimeout() throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public Logger getParentLogger() throws SQLFeatureNotSupportedException {
+ throw new UnsupportedOperationException();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/b8672d7d/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerWriteBehindEnabled.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerWriteBehindEnabled.java b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerWriteBehindEnabled.java
new file mode 100644
index 0000000..fbb881e
--- /dev/null
+++ b/modules/core/src/test/java/org/apache/ignite/cache/store/CacheStoreSessionListenerWriteBehindEnabled.java
@@ -0,0 +1,304 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.cache.store;
+
+import java.io.PrintWriter;
+import java.io.Serializable;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.logging.Logger;
+import javax.cache.Cache;
+import javax.cache.configuration.Factory;
+import javax.cache.configuration.FactoryBuilder;
+import javax.cache.integration.CacheLoaderException;
+import javax.cache.integration.CacheWriterException;
+import javax.sql.DataSource;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.IgniteException;
+import org.apache.ignite.cache.store.jdbc.CacheJdbcStoreSessionListener;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.internal.processors.cache.GridCacheAbstractSelfTest;
+import org.apache.ignite.internal.processors.cache.store.GridCacheWriteBehindStore;
+import org.apache.ignite.resources.IgniteInstanceResource;
+
+/**
+ * This class tests that calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * and {@link CacheStoreSessionListener#onSessionEnd(CacheStoreSession, boolean)} are executed from
+ * {@link GridCacheWriteBehindStore} only.
+ */
+public class CacheStoreSessionListenerWriteBehindEnabled extends GridCacheAbstractSelfTest {
+ /** */
+ protected final static int CNT = 100;
+
+ /** */
+ private final static int WRITE_BEHIND_FLUSH_FREQUENCY = 1000;
+
+ /** */
+ private static final List<OperationType> operations = Collections.synchronizedList(new ArrayList<OperationType>());
+
+ /** */
+ private static final AtomicInteger entryCnt = new AtomicInteger();
+
+ /** {@inheritDoc} */
+ @Override protected int gridCount() {
+ return 1;
+ }
+
+ /** {@inheritDoc} */
+ @Override protected CacheConfiguration cacheConfiguration(String igniteInstanceName) throws Exception {
+ CacheConfiguration cacheCfg = super.cacheConfiguration(igniteInstanceName);
+
+ cacheCfg.setCacheStoreFactory(FactoryBuilder.factoryOf(EmptyCacheStore.class));
+
+ cacheCfg.setCacheStoreSessionListenerFactories(new CacheStoreSessionFactory());
+
+ cacheCfg.setReadThrough(true);
+ cacheCfg.setWriteThrough(true);
+
+ cacheCfg.setWriteBehindEnabled(true);
+ cacheCfg.setWriteBehindBatchSize(CNT * 2);
+ cacheCfg.setWriteBehindFlushFrequency(WRITE_BEHIND_FLUSH_FREQUENCY);
+
+ cacheCfg.setBackups(0);
+
+ return cacheCfg;
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void beforeTest() throws Exception {
+ super.beforeTest();
+
+ operations.clear();
+
+ entryCnt.set(0);
+ }
+
+ /**
+ * Tests that there are no redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#get(Object)} performed.
+ */
+ public void testLookup() {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ for (int i = 0; i < CNT; ++i)
+ cache.get(i);
+
+ checkSessionCounters(CNT);
+ }
+
+ /**
+ * Tests that there are no redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#put(Object, Object)} performed.
+ */
+ public void testUpdate() {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ for (int i = 0; i < CNT; ++i)
+ cache.put(i, i);
+
+ checkSessionCounters(1);
+ }
+
+ /**
+ * Tests that there are no redundant calls of {@link CacheStoreSessionListener#onSessionStart(CacheStoreSession)}
+ * while {@link IgniteCache#remove(Object)} performed.
+ */
+ public void testRemove() {
+ IgniteCache<Object, Object> cache = grid(0).getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ for (int i = 0; i < CNT; ++i) {
+ cache.remove(i);
+ }
+
+ checkSessionCounters(1);
+ }
+
+ /**
+ * @param startedSessions Number of expected sessions.
+ */
+ private void checkSessionCounters(int startedSessions) {
+ try {
+ // Wait for GridCacheWriteBehindStore
+ Thread.sleep(WRITE_BEHIND_FLUSH_FREQUENCY * 4);
+
+ assertEquals(CNT, entryCnt.get());
+
+ checkOpCount(operations, OperationType.SESSION_START, startedSessions);
+
+ checkOpCount(operations, OperationType.SESSION_END, startedSessions);
+ }
+ catch (InterruptedException e) {
+ throw new IgniteException("Failed to wait for the GridCacheWriteBehindStore due to interruption.", e);
+ }
+ }
+
+ /**
+ * @param operations List of {@link OperationType}.
+ * @param op Operation.
+ * @param expected Expected number of operations for the given {@code op}.
+ */
+ private void checkOpCount(List<OperationType> operations, OperationType op, int expected) {
+ int n = 0;
+
+ for (OperationType o : operations) {
+ if (op.equals(o))
+ ++n;
+ }
+
+ assertEquals("Operation=" + op.name(), expected, n);
+ }
+
+ /**
+ * Operation type.
+ */
+ public enum OperationType {
+ /**
+ * Cache store session started.
+ */
+ SESSION_START,
+
+ /**
+ * Cache store session ended.
+ */
+ SESSION_END,
+ }
+
+ /**
+ * Cache store session factory.
+ */
+ public static class CacheStoreSessionFactory implements Factory<TestCacheStoreSessionListener> {
+ /** {@inheritDoc} */
+ @Override public TestCacheStoreSessionListener create() {
+ TestCacheStoreSessionListener lsnr = new TestCacheStoreSessionListener();
+ lsnr.setDataSource(new DataSourceStub());
+ return lsnr;
+ }
+ }
+
+ /**
+ * Test cache store session listener.
+ */
+ public static class TestCacheStoreSessionListener extends CacheJdbcStoreSessionListener {
+ /** */
+ @IgniteInstanceResource
+ private Ignite ignite;
+
+ /** {@inheritDoc} */
+ @Override public void onSessionStart(CacheStoreSession ses) {
+ operations.add(OperationType.SESSION_START);
+ }
+
+ /** {@inheritDoc} */
+ @Override public void onSessionEnd(CacheStoreSession ses, boolean commit) {
+ operations.add(OperationType.SESSION_END);
+ }
+ }
+
+ /**
+ * Test cache store.
+ *
+ * {@link EmptyCacheStore#writeAll(Collection)} and {@link EmptyCacheStore#deleteAll(Collection)} should be called
+ * by {@link GridCacheWriteBehindStore}.
+ */
+ public static class EmptyCacheStore extends CacheStoreAdapter<Object, Object> {
+ /** */
+ @IgniteInstanceResource
+ private Ignite ignite;
+
+ /** {@inheritDoc} */
+ @Override public Object load(Object key) throws CacheLoaderException {
+ entryCnt.getAndIncrement();
+ return null;
+ }
+
+ /** {@inheritDoc} */
+ @Override public void writeAll(Collection<Cache.Entry<?, ?>> entries) {
+ entryCnt.addAndGet(entries.size());
+ }
+
+ /** {@inheritDoc} */
+ @Override public void write(Cache.Entry entry) throws CacheWriterException {
+ }
+
+ /** {@inheritDoc} */
+ @Override public void deleteAll(Collection<?> keys) {
+ entryCnt.addAndGet(keys.size());
+ }
+
+ /** {@inheritDoc} */
+ @Override public void delete(Object key) throws CacheWriterException {
+ }
+ }
+
+ /**
+ * Data source stub which should not be called.
+ */
+ public static class DataSourceStub implements DataSource, Serializable {
+ /** {@inheritDoc} */
+ @Override public Connection getConnection() throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public Connection getConnection(String username, String password) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public <T> T unwrap(Class<T> iface) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public boolean isWrapperFor(Class<?> iface) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public PrintWriter getLogWriter() throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public void setLogWriter(PrintWriter out) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public void setLoginTimeout(int seconds) throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public int getLoginTimeout() throws SQLException {
+ throw new UnsupportedOperationException();
+ }
+
+ /** {@inheritDoc} */
+ @Override public Logger getParentLogger() throws SQLFeatureNotSupportedException {
+ throw new UnsupportedOperationException();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/b8672d7d/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java
index d931ea9..e4930e0 100644
--- a/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java
+++ b/modules/core/src/test/java/org/apache/ignite/testsuites/IgniteCacheTestSuite4.java
@@ -18,6 +18,9 @@
package org.apache.ignite.testsuites;
import junit.framework.TestSuite;
+import org.apache.ignite.cache.store.CacheStoreListenerRWThroughDisabledAtomicCache;
+import org.apache.ignite.cache.store.CacheStoreListenerRWThroughDisabledTransactionalCache;
+import org.apache.ignite.cache.store.CacheStoreSessionListenerWriteBehindEnabled;
import org.apache.ignite.cache.store.jdbc.CacheJdbcStoreSessionListenerSelfTest;
import org.apache.ignite.internal.processors.GridCacheTxLoadFromStoreOnLockSelfTest;
import org.apache.ignite.internal.processors.cache.CacheClientStoreSelfTest;
@@ -276,6 +279,9 @@ public class IgniteCacheTestSuite4 extends TestSuite {
suite.addTestSuite(CacheOffheapMapEntrySelfTest.class);
suite.addTestSuite(CacheJdbcStoreSessionListenerSelfTest.class);
+ suite.addTestSuite(CacheStoreListenerRWThroughDisabledAtomicCache.class);
+ suite.addTestSuite(CacheStoreListenerRWThroughDisabledTransactionalCache.class);
+ suite.addTestSuite(CacheStoreSessionListenerWriteBehindEnabled.class);
suite.addTestSuite(CacheClientStoreSelfTest.class);
suite.addTestSuite(CacheStoreUsageMultinodeStaticStartAtomicTest.class);
@@ -341,4 +347,4 @@ public class IgniteCacheTestSuite4 extends TestSuite {
return suite;
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/ignite/blob/b8672d7d/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Store/CacheStoreSessionTest.cs
----------------------------------------------------------------------
diff --git a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Store/CacheStoreSessionTest.cs b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Store/CacheStoreSessionTest.cs
index 818948c..6c9def3 100644
--- a/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Store/CacheStoreSessionTest.cs
+++ b/modules/platforms/dotnet/Apache.Ignite.Core.Tests/Cache/Store/CacheStoreSessionTest.cs
@@ -106,17 +106,8 @@ namespace Apache.Ignite.Core.Tests.Cache.Store
tx.Rollback();
}
- // SessionEnd is called once per store instance.
- Assert.AreEqual(StoreCount, _dumps.Count);
-
- foreach (var ops in _dumps)
- {
- var op = ops.Single();
- Assert.AreEqual(OperationType.SesEnd, op.Type);
- Assert.IsFalse(op.Commit);
- }
-
- _dumps = new ConcurrentBag<ICollection<Operation>>();
+ // SessionEnd should not be called.
+ Assert.AreEqual(0, _dumps.Count);
// 2. Test puts.
using (var tx = ignite.GetTransactions().TxStart())
[05/28] ignite git commit: IGNITE-6849: Fix of failing tests of
K-Means distributed clustering. This closes #3009
Posted by sb...@apache.org.
IGNITE-6849: Fix of failing tests of K-Means distributed clustering.
This closes #3009
Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/8195ba51
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/8195ba51
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/8195ba51
Branch: refs/heads/ignite-zk
Commit: 8195ba512a7bff7b0e882ce01017724be8bbd8d7
Parents: a1b6a33
Author: Artem Malykh <am...@gridgain.com>
Authored: Thu Nov 9 16:43:51 2017 +0300
Committer: Igor Sapego <is...@gridgain.com>
Committed: Thu Nov 9 16:43:51 2017 +0300
----------------------------------------------------------------------
.../impls/storage/matrix/MapWrapperStorage.java | 18 +-
.../ml/math/impls/vector/MapWrapperVector.java | 8 +
.../ml/clustering/ClusteringTestSuite.java | 3 +-
.../KMeansDistributedClustererTest.java | 197 -------------------
...KMeansDistributedClustererTestMultiNode.java | 146 ++++++++++++++
...MeansDistributedClustererTestSingleNode.java | 197 +++++++++++++++++++
6 files changed, 367 insertions(+), 202 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ignite/blob/8195ba51/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/MapWrapperStorage.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/MapWrapperStorage.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/MapWrapperStorage.java
index 381ad75..4648421 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/MapWrapperStorage.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/storage/matrix/MapWrapperStorage.java
@@ -30,10 +30,10 @@ import org.apache.ignite.ml.math.VectorStorage;
*/
public class MapWrapperStorage implements VectorStorage {
/** Underlying map. */
- Map<Integer, Double> data;
+ private Map<Integer, Double> data;
/** Vector size. */
- int size;
+ private int size;
/**
* Construct a wrapper around given map.
@@ -41,6 +41,8 @@ public class MapWrapperStorage implements VectorStorage {
* @param map Map to wrap.
*/
public MapWrapperStorage(Map<Integer, Double> map) {
+ data = map;
+
Set<Integer> keys = map.keySet();
GridArgumentCheck.notEmpty(keys, "map");
@@ -50,8 +52,14 @@ public class MapWrapperStorage implements VectorStorage {
assert min >= 0;
- data = map;
- size = (max - min) + 1;
+ size = (max - min) + 1;
+ }
+
+ /**
+ * No-op constructor for serialization.
+ */
+ public MapWrapperStorage() {
+ // No-op.
}
/** {@inheritDoc} */
@@ -75,12 +83,14 @@ public class MapWrapperStorage implements VectorStorage {
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject(data);
+ out.writeInt(size);
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
data = (Map<Integer, Double>)in.readObject();
+ size = in.readInt();
}
/** {@inheritDoc} */
http://git-wip-us.apache.org/repos/asf/ignite/blob/8195ba51/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/vector/MapWrapperVector.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/vector/MapWrapperVector.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/vector/MapWrapperVector.java
index 83b40c1..58309f6 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/vector/MapWrapperVector.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/impls/vector/MapWrapperVector.java
@@ -20,6 +20,7 @@ package org.apache.ignite.ml.math.impls.vector;
import java.util.Map;
import org.apache.ignite.ml.math.Matrix;
import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.VectorStorage;
import org.apache.ignite.ml.math.impls.storage.matrix.MapWrapperStorage;
/**
@@ -35,6 +36,13 @@ public class MapWrapperVector extends AbstractVector {
setStorage(new MapWrapperStorage(map));
}
+ /**
+ * No-op constructor for serialization.
+ */
+ public MapWrapperVector() {
+ // No-op.
+ }
+
/** {@inheritDoc} */
@Override public Vector like(int crd) {
throw new UnsupportedOperationException();
http://git-wip-us.apache.org/repos/asf/ignite/blob/8195ba51/modules/ml/src/test/java/org/apache/ignite/ml/clustering/ClusteringTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/clustering/ClusteringTestSuite.java b/modules/ml/src/test/java/org/apache/ignite/ml/clustering/ClusteringTestSuite.java
index c39eeef..b4cce5e 100644
--- a/modules/ml/src/test/java/org/apache/ignite/ml/clustering/ClusteringTestSuite.java
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/clustering/ClusteringTestSuite.java
@@ -25,7 +25,8 @@ import org.junit.runners.Suite;
*/
@RunWith(Suite.class)
@Suite.SuiteClasses({
- KMeansDistributedClustererTest.class,
+ KMeansDistributedClustererTestSingleNode.class,
+ KMeansDistributedClustererTestMultiNode.class,
KMeansLocalClustererTest.class
})
public class ClusteringTestSuite {
http://git-wip-us.apache.org/repos/asf/ignite/blob/8195ba51/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansDistributedClustererTest.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansDistributedClustererTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansDistributedClustererTest.java
deleted file mode 100644
index a59b7f9..0000000
--- a/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansDistributedClustererTest.java
+++ /dev/null
@@ -1,197 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.ml.clustering;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.stream.Collectors;
-import java.util.stream.IntStream;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.internal.util.IgniteUtils;
-import org.apache.ignite.ml.math.DistanceMeasure;
-import org.apache.ignite.ml.math.EuclideanDistance;
-import org.apache.ignite.ml.math.StorageConstants;
-import org.apache.ignite.ml.math.Vector;
-import org.apache.ignite.ml.math.VectorUtils;
-import org.apache.ignite.ml.math.functions.Functions;
-import org.apache.ignite.ml.math.impls.matrix.SparseDistributedMatrix;
-import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector;
-import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
-import org.junit.Assert;
-import org.junit.Test;
-
-import static org.apache.ignite.ml.clustering.KMeansUtil.checkIsInEpsilonNeighbourhood;
-
-/** */
-public class KMeansDistributedClustererTest extends GridCommonAbstractTest {
- /**
- * Number of nodes in grid. We should use 1 in this test because otherwise algorithm will be unstable
- * (We cannot guarantee the order in which results are returned from each node).
- */
- private static final int NODE_COUNT = 1;
-
- /** Grid instance. */
- private Ignite ignite;
-
- /**
- * Default constructor.
- */
- public KMeansDistributedClustererTest() {
- super(false);
- }
-
- /**
- * {@inheritDoc}
- */
- @Override protected void beforeTest() throws Exception {
- ignite = grid(NODE_COUNT);
- }
-
- /** {@inheritDoc} */
- @Override protected void beforeTestsStarted() throws Exception {
- for (int i = 1; i <= NODE_COUNT; i++)
- startGrid(i);
- }
-
- /** {@inheritDoc} */
- @Override protected void afterTestsStopped() throws Exception {
- stopAllGrids();
- }
-
- /** */
- @Test
- public void testPerformClusterAnalysisDegenerate() {
- IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
-
- KMeansDistributedClusterer clusterer = new KMeansDistributedClusterer(new EuclideanDistance(), 1, 1, 1L);
-
- double[] v1 = new double[] {1959, 325100};
- double[] v2 = new double[] {1960, 373200};
-
- SparseDistributedMatrix points = new SparseDistributedMatrix(2, 2, StorageConstants.ROW_STORAGE_MODE,
- StorageConstants.RANDOM_ACCESS_MODE);
-
- points.setRow(0, v1);
- points.setRow(1, v2);
-
- KMeansModel mdl = clusterer.cluster(points, 1);
-
- Assert.assertEquals(1, mdl.centers().length);
- Assert.assertEquals(2, mdl.centers()[0].size());
- }
-
- /** */
- @Test
- public void testClusterizationOnDatasetWithObviousStructure() throws IOException {
- IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
-
- int ptsCnt = 10000;
- int squareSideLen = 10000;
-
- Random rnd = new Random(123456L);
-
- // Let centers be in the vertices of square.
- Map<Integer, Vector> centers = new HashMap<>();
- centers.put(100, new DenseLocalOnHeapVector(new double[] {0.0, 0.0}));
- centers.put(900, new DenseLocalOnHeapVector(new double[] {squareSideLen, 0.0}));
- centers.put(3000, new DenseLocalOnHeapVector(new double[] {0.0, squareSideLen}));
- centers.put(6000, new DenseLocalOnHeapVector(new double[] {squareSideLen, squareSideLen}));
-
- int centersCnt = centers.size();
-
- SparseDistributedMatrix points = new SparseDistributedMatrix(ptsCnt, 2, StorageConstants.ROW_STORAGE_MODE,
- StorageConstants.RANDOM_ACCESS_MODE);
-
- List<Integer> permutation = IntStream.range(0, ptsCnt).boxed().collect(Collectors.toList());
- Collections.shuffle(permutation, rnd);
-
- Vector[] mc = new Vector[centersCnt];
- Arrays.fill(mc, VectorUtils.zeroes(2));
-
- int centIdx = 0;
- int totalCnt = 0;
-
- List<Vector> massCenters = new ArrayList<>();
-
- for (Integer count : centers.keySet()) {
- for (int i = 0; i < count; i++) {
- DenseLocalOnHeapVector pnt = (DenseLocalOnHeapVector)new DenseLocalOnHeapVector(2).assign(centers.get(count));
- // pertrubate point on random value.
- pnt.map(val -> val + rnd.nextDouble() * squareSideLen / 100);
- mc[centIdx] = mc[centIdx].plus(pnt);
- points.assignRow(permutation.get(totalCnt), pnt);
- totalCnt++;
- }
- massCenters.add(mc[centIdx].times(1 / (double)count));
- centIdx++;
- }
-
- EuclideanDistance dist = new EuclideanDistance();
- OrderedNodesComparator comp = new OrderedNodesComparator(centers.values().toArray(new Vector[] {}), dist);
-
- massCenters.sort(comp);
- KMeansDistributedClusterer clusterer = new KMeansDistributedClusterer(dist, 3, 100, 1L);
-
- KMeansModel mdl = clusterer.cluster(points, 4);
- Vector[] resCenters = mdl.centers();
- Arrays.sort(resCenters, comp);
-
- checkIsInEpsilonNeighbourhood(resCenters, massCenters.toArray(new Vector[] {}), 30.0);
- }
-
- /** */
- private static class OrderedNodesComparator implements Comparator<Vector> {
- /** */
- private final DistanceMeasure measure;
-
- /** */
- List<Vector> orderedNodes;
-
- /** */
- public OrderedNodesComparator(Vector[] orderedNodes, DistanceMeasure measure) {
- this.orderedNodes = Arrays.asList(orderedNodes);
- this.measure = measure;
- }
-
- /** */
- private int findClosestNodeIndex(Vector v) {
- return Functions.argmin(orderedNodes, v1 -> measure.compute(v1, v)).get1();
- }
-
- /** */
- @Override public int compare(Vector v1, Vector v2) {
- int ind1 = findClosestNodeIndex(v1);
- int ind2 = findClosestNodeIndex(v2);
-
- int signum = (int)Math.signum(ind1 - ind2);
-
- if (signum != 0)
- return signum;
-
- return (int)Math.signum(orderedNodes.get(ind1).minus(v1).kNorm(2) -
- orderedNodes.get(ind2).minus(v2).kNorm(2));
- }
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/8195ba51/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansDistributedClustererTestMultiNode.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansDistributedClustererTestMultiNode.java b/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansDistributedClustererTestMultiNode.java
new file mode 100644
index 0000000..06066c2
--- /dev/null
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansDistributedClustererTestMultiNode.java
@@ -0,0 +1,146 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.clustering;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.internal.util.IgniteUtils;
+import org.apache.ignite.ml.math.DistanceMeasure;
+import org.apache.ignite.ml.math.EuclideanDistance;
+import org.apache.ignite.ml.math.StorageConstants;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.VectorUtils;
+import org.apache.ignite.ml.math.functions.Functions;
+import org.apache.ignite.ml.math.impls.matrix.SparseDistributedMatrix;
+import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.junit.Assert;
+import org.junit.Test;
+
+import static org.apache.ignite.ml.clustering.KMeansUtil.checkIsInEpsilonNeighbourhood;
+
+/**
+ * This test is made to make sure that K-Means distributed clustering does not crush on distributed environment.
+ * In {@link KMeansDistributedClustererTestMultiNode} we check logic of clustering (checks for clusters structures).
+ * In this class we just check that clusterer does not crush. There are two separate tests because we cannot
+ * guarantee order in which nodes return results of intermediate computations and therefore algorithm can return
+ * different results.
+ */
+public class KMeansDistributedClustererTestMultiNode extends GridCommonAbstractTest {
+ /** Number of nodes in grid. */
+ private static final int NODE_COUNT = 3;
+
+ /** Grid instance. */
+ private Ignite ignite;
+
+ /**
+ * Default constructor.
+ */
+ public KMeansDistributedClustererTestMultiNode() {
+ super(false);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override protected void beforeTest() throws Exception {
+ ignite = grid(NODE_COUNT);
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void beforeTestsStarted() throws Exception {
+ for (int i = 1; i <= NODE_COUNT; i++)
+ startGrid(i);
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void afterTestsStopped() throws Exception {
+ stopAllGrids();
+ }
+
+ /** */
+ @Test
+ public void testPerformClusterAnalysisDegenerate() {
+ IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
+
+ KMeansDistributedClusterer clusterer = new KMeansDistributedClusterer(new EuclideanDistance(), 1, 1, 1L);
+
+ double[] v1 = new double[] {1959, 325100};
+ double[] v2 = new double[] {1960, 373200};
+
+ SparseDistributedMatrix points = new SparseDistributedMatrix(2, 2, StorageConstants.ROW_STORAGE_MODE,
+ StorageConstants.RANDOM_ACCESS_MODE);
+
+ points.setRow(0, v1);
+ points.setRow(1, v2);
+
+ clusterer.cluster(points, 1);
+ }
+
+ /** */
+ @Test
+ public void testClusterizationOnDatasetWithObviousStructure() throws IOException {
+ IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
+
+ int ptsCnt = 10000;
+ int squareSideLen = 10000;
+
+ Random rnd = new Random(123456L);
+
+ // Let centers be in the vertices of square.
+ Map<Integer, Vector> centers = new HashMap<>();
+ centers.put(100, new DenseLocalOnHeapVector(new double[] {0.0, 0.0}));
+ centers.put(900, new DenseLocalOnHeapVector(new double[] {squareSideLen, 0.0}));
+ centers.put(3000, new DenseLocalOnHeapVector(new double[] {0.0, squareSideLen}));
+ centers.put(6000, new DenseLocalOnHeapVector(new double[] {squareSideLen, squareSideLen}));
+
+ SparseDistributedMatrix points = new SparseDistributedMatrix(ptsCnt, 2, StorageConstants.ROW_STORAGE_MODE,
+ StorageConstants.RANDOM_ACCESS_MODE);
+
+ List<Integer> permutation = IntStream.range(0, ptsCnt).boxed().collect(Collectors.toList());
+ Collections.shuffle(permutation, rnd);
+
+ int totalCnt = 0;
+
+ for (Integer count : centers.keySet()) {
+ for (int i = 0; i < count; i++) {
+ DenseLocalOnHeapVector pnt = (DenseLocalOnHeapVector)new DenseLocalOnHeapVector(2).assign(centers.get(count));
+ // Perturbate point on random value.
+ pnt.map(val -> val + rnd.nextDouble() * squareSideLen / 100);
+ points.assignRow(permutation.get(totalCnt), pnt);
+ totalCnt++;
+ }
+ }
+
+ EuclideanDistance dist = new EuclideanDistance();
+
+ KMeansDistributedClusterer clusterer = new KMeansDistributedClusterer(dist, 3, 100, 1L);
+
+ clusterer.cluster(points, 4);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ignite/blob/8195ba51/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansDistributedClustererTestSingleNode.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansDistributedClustererTestSingleNode.java b/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansDistributedClustererTestSingleNode.java
new file mode 100644
index 0000000..27aaa0c
--- /dev/null
+++ b/modules/ml/src/test/java/org/apache/ignite/ml/clustering/KMeansDistributedClustererTestSingleNode.java
@@ -0,0 +1,197 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.clustering;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.internal.util.IgniteUtils;
+import org.apache.ignite.ml.math.DistanceMeasure;
+import org.apache.ignite.ml.math.EuclideanDistance;
+import org.apache.ignite.ml.math.StorageConstants;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.VectorUtils;
+import org.apache.ignite.ml.math.functions.Functions;
+import org.apache.ignite.ml.math.impls.matrix.SparseDistributedMatrix;
+import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.junit.Assert;
+import org.junit.Test;
+
+import static org.apache.ignite.ml.clustering.KMeansUtil.checkIsInEpsilonNeighbourhood;
+
+/** */
+public class KMeansDistributedClustererTestSingleNode extends GridCommonAbstractTest {
+ /**
+ * Number of nodes in grid. We should use 1 in this test because otherwise algorithm will be unstable
+ * (We cannot guarantee the order in which results are returned from each node).
+ */
+ private static final int NODE_COUNT = 1;
+
+ /** Grid instance. */
+ private Ignite ignite;
+
+ /**
+ * Default constructor.
+ */
+ public KMeansDistributedClustererTestSingleNode() {
+ super(false);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override protected void beforeTest() throws Exception {
+ ignite = grid(NODE_COUNT);
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void beforeTestsStarted() throws Exception {
+ for (int i = 1; i <= NODE_COUNT; i++)
+ startGrid(i);
+ }
+
+ /** {@inheritDoc} */
+ @Override protected void afterTestsStopped() throws Exception {
+ stopAllGrids();
+ }
+
+ /** */
+ @Test
+ public void testPerformClusterAnalysisDegenerate() {
+ IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
+
+ KMeansDistributedClusterer clusterer = new KMeansDistributedClusterer(new EuclideanDistance(), 1, 1, 1L);
+
+ double[] v1 = new double[] {1959, 325100};
+ double[] v2 = new double[] {1960, 373200};
+
+ SparseDistributedMatrix points = new SparseDistributedMatrix(2, 2, StorageConstants.ROW_STORAGE_MODE,
+ StorageConstants.RANDOM_ACCESS_MODE);
+
+ points.setRow(0, v1);
+ points.setRow(1, v2);
+
+ KMeansModel mdl = clusterer.cluster(points, 1);
+
+ Assert.assertEquals(1, mdl.centers().length);
+ Assert.assertEquals(2, mdl.centers()[0].size());
+ }
+
+ /** */
+ @Test
+ public void testClusterizationOnDatasetWithObviousStructure() throws IOException {
+ IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName());
+
+ int ptsCnt = 10000;
+ int squareSideLen = 10000;
+
+ Random rnd = new Random(123456L);
+
+ // Let centers be in the vertices of square.
+ Map<Integer, Vector> centers = new HashMap<>();
+ centers.put(100, new DenseLocalOnHeapVector(new double[] {0.0, 0.0}));
+ centers.put(900, new DenseLocalOnHeapVector(new double[] {squareSideLen, 0.0}));
+ centers.put(3000, new DenseLocalOnHeapVector(new double[] {0.0, squareSideLen}));
+ centers.put(6000, new DenseLocalOnHeapVector(new double[] {squareSideLen, squareSideLen}));
+
+ int centersCnt = centers.size();
+
+ SparseDistributedMatrix points = new SparseDistributedMatrix(ptsCnt, 2, StorageConstants.ROW_STORAGE_MODE,
+ StorageConstants.RANDOM_ACCESS_MODE);
+
+ List<Integer> permutation = IntStream.range(0, ptsCnt).boxed().collect(Collectors.toList());
+ Collections.shuffle(permutation, rnd);
+
+ Vector[] mc = new Vector[centersCnt];
+ Arrays.fill(mc, VectorUtils.zeroes(2));
+
+ int centIdx = 0;
+ int totalCnt = 0;
+
+ List<Vector> massCenters = new ArrayList<>();
+
+ for (Integer count : centers.keySet()) {
+ for (int i = 0; i < count; i++) {
+ DenseLocalOnHeapVector pnt = (DenseLocalOnHeapVector)new DenseLocalOnHeapVector(2).assign(centers.get(count));
+ // Perturbate point on random value.
+ pnt.map(val -> val + rnd.nextDouble() * squareSideLen / 100);
+ mc[centIdx] = mc[centIdx].plus(pnt);
+ points.assignRow(permutation.get(totalCnt), pnt);
+ totalCnt++;
+ }
+ massCenters.add(mc[centIdx].times(1 / (double)count));
+ centIdx++;
+ }
+
+ EuclideanDistance dist = new EuclideanDistance();
+ OrderedNodesComparator comp = new OrderedNodesComparator(centers.values().toArray(new Vector[] {}), dist);
+
+ massCenters.sort(comp);
+ KMeansDistributedClusterer clusterer = new KMeansDistributedClusterer(dist, 3, 100, 1L);
+
+ KMeansModel mdl = clusterer.cluster(points, 4);
+ Vector[] resCenters = mdl.centers();
+ Arrays.sort(resCenters, comp);
+
+ checkIsInEpsilonNeighbourhood(resCenters, massCenters.toArray(new Vector[] {}), 30.0);
+ }
+
+ /** */
+ private static class OrderedNodesComparator implements Comparator<Vector> {
+ /** */
+ private final DistanceMeasure measure;
+
+ /** */
+ List<Vector> orderedNodes;
+
+ /** */
+ public OrderedNodesComparator(Vector[] orderedNodes, DistanceMeasure measure) {
+ this.orderedNodes = Arrays.asList(orderedNodes);
+ this.measure = measure;
+ }
+
+ /** */
+ private int findClosestNodeIndex(Vector v) {
+ return Functions.argmin(orderedNodes, v1 -> measure.compute(v1, v)).get1();
+ }
+
+ /** */
+ @Override public int compare(Vector v1, Vector v2) {
+ int ind1 = findClosestNodeIndex(v1);
+ int ind2 = findClosestNodeIndex(v2);
+
+ int signum = (int)Math.signum(ind1 - ind2);
+
+ if (signum != 0)
+ return signum;
+
+ return (int)Math.signum(orderedNodes.get(ind1).minus(v1).kNorm(2) -
+ orderedNodes.get(ind2).minus(v2).kNorm(2));
+ }
+ }
+}
\ No newline at end of file