You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ignite.apache.org by av...@apache.org on 2021/02/10 09:25:52 UTC

[ignite] branch ignite-ducktape-conflict_resolve created (now 94edf14)

This is an automated email from the ASF dual-hosted git repository.

av pushed a change to branch ignite-ducktape-conflict_resolve
in repository https://gitbox.apache.org/repos/asf/ignite.git.


      at 94edf14  Revert "Check master (#8620)"

This branch includes the following new commits:

     new 94edf14  Revert "Check master (#8620)"

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[ignite] 01/01: Revert "Check master (#8620)"

Posted by av...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

av pushed a commit to branch ignite-ducktape-conflict_resolve
in repository https://gitbox.apache.org/repos/asf/ignite.git

commit 94edf1435d5045d80f442e143cbfe667a38fdeaa
Author: Anton Vinogradov <av...@apache.org>
AuthorDate: Wed Feb 10 12:24:55 2021 +0300

    Revert "Check master (#8620)"
    
    This reverts commit 362e8c35
---
 assembly/dependencies-apache-ignite-slim.xml       |    1 -
 docs/_config.yml                                   |    2 +-
 .../mybatis-l2-cache.adoc                          |    2 +-
 docs/_docs/index.adoc                              |   38 +-
 .../binary-classification/decision-trees.adoc      |    6 +-
 .../model-import-from-apache-spark.adoc            |    2 +-
 .../model-selection/cross-validation.adoc          |    4 +-
 .../model-selection/pipeline-api.adoc              |    4 +-
 .../regression/decision-trees-regression.adoc      |    6 +-
 docs/_docs/monitoring-metrics/system-views.adoc    |   27 -
 examples/pom-standalone-lgpl.xml                   |    6 -
 examples/pom-standalone.xml                        |    6 -
 examples/pom.xml                                   |   12 -
 .../ml/clustering/KMeansClusterizationExample.java |    4 +-
 .../CatboostClassificationModelParserExample.java  |  113 --
 .../CatboostRegressionModelParserExample.java      |  125 --
 .../ANNClassificationExportImportExample.java      |  339 -----
 .../CompoundNaiveBayesExportImportExample.java     |  129 --
 ...isionTreeClassificationExportImportExample.java |  151 ---
 .../DecisionTreeRegressionExportImportExample.java |  126 --
 .../DiscreteNaiveBayesExportImportExample.java     |  117 --
 ...DBOnTreesClassificationExportImportExample.java |  147 ---
 .../GDBOnTreesRegressionExportImportExample.java   |  145 ---
 .../GaussianNaiveBayesExportImportExample.java     |  117 --
 .../KMeansClusterizationExportImportExample.java   |   99 --
 .../LinearRegressionExportImportExample.java       |  116 --
 .../LogisticRegressionExportImportExample.java     |  122 --
 ...domForestClassificationExportImportExample.java |  144 ---
 .../RandomForestRegressionExportImportExample.java |  151 ---
 .../inference/exchange/SVMExportImportExample.java |  113 --
 .../modelparser/DecisionTreeFromSparkExample.java  |    4 +-
 .../DecisionTreeRegressionFromSparkExample.java    |    4 +-
 .../ml/preprocessing/encoding/EncoderExample.java  |    4 +-
 .../encoding/EncoderExampleWithNormalization.java  |    4 +-
 .../encoding/LabelEncoderExample.java              |    4 +-
 .../encoding/TargetEncoderExample.java             |  138 --
 .../linear/BostonHousePricesPredictionExample.java |    4 +-
 .../ml/selection/cv/CrossValidationExample.java    |    4 +-
 ...eeClassificationTrainerSQLInferenceExample.java |    4 +-
 ...onTreeClassificationTrainerSQLTableExample.java |    4 +-
 .../DecisionTreeClassificationTrainerExample.java  |    4 +-
 .../tree/DecisionTreeRegressionTrainerExample.java |    4 +-
 .../GDBOnTreesClassificationTrainerExample.java    |    8 +-
 .../GDBOnTreesRegressionTrainerExample.java        |   10 +-
 .../examples/ml/tutorial/Step_11_Boosting.java     |    8 +-
 .../ml/tutorial/Step_1_Read_and_Learn.java         |    4 +-
 .../examples/ml/tutorial/Step_2_Imputing.java      |    4 +-
 .../examples/ml/tutorial/Step_3_Categorial.java    |    4 +-
 .../Step_3_Categorial_with_One_Hot_Encoder.java    |    4 +-
 .../examples/ml/tutorial/Step_4_Add_age_fare.java  |    4 +-
 .../examples/ml/tutorial/Step_5_Scaling.java       |    4 +-
 .../ml/tutorial/Step_7_Split_train_test.java       |    4 +-
 .../ignite/examples/ml/tutorial/Step_8_CV.java     |    6 +-
 .../ml/tutorial/Step_8_CV_with_Param_Grid.java     |    6 +-
 .../Step_8_CV_with_Param_Grid_and_pipeline.java    |    4 +-
 .../hyperparametertuning/Step_13_RandomSearch.java |    6 +-
 .../Step_14_Parallel_Brute_Force_Search.java       |    6 +-
 .../Step_15_Parallel_Random_Search.java            |    6 +-
 .../Step_16_Genetic_Programming_Search.java        |    6 +-
 ...tep_17_Parallel_Genetic_Programming_Search.java |    6 +-
 .../ignite/examples/ml/util/MLSandboxDatasets.java |    5 +-
 ...-challenge-sample-catboost-expected-results.csv |    4 -
 .../amazon-employee-access-challenge-sample.csv    |    5 -
 .../amazon-employee-access-challenge_train.csv     |  100 --
 ...n_housing_dataset-catboost-expected-results.txt |  505 --------
 .../main/resources/models/catboost/model_clf.cbm   |  Bin 1089896 -> 0 bytes
 .../main/resources/models/catboost/model_reg.cbm   |  Bin 1089824 -> 0 bytes
 .../jmh/thin/JmhThinClientAbstractBenchmark.java   |  135 --
 .../jmh/thin/JmhThinClientCacheBenchmark.java      |   81 --
 ...eCreationDestructionWileTopologyChangeTest.java |   70 --
 .../ClientSizeCacheCreationDestructionTest.java    | 1243 ------------------
 .../client/suite/IgniteClientTestSuite.java        |    7 +-
 .../PersistenceBasicCompatibilityTest.java         |   58 +-
 .../WalPageCompressionIntegrationTest.java         |   18 -
 .../ignite/internal/commandline/CommandList.java   |    9 +-
 .../commandline/DefragmentationCommand.java        |  249 ----
 .../defragmentation/DefragmentationArguments.java  |   63 -
 .../DefragmentationSubcommands.java                |   68 -
 .../diagnostic/ConnectivityCommand.java            |  194 ---
 .../commandline/diagnostic/DiagnosticCommand.java  |    3 -
 .../diagnostic/DiagnosticSubCommand.java           |    5 +-
 .../encryption/CacheGroupEncryptionCommand.java    |  264 ----
 .../encryption/ChangeCacheGroupKeyCommand.java     |   95 --
 .../encryption/ChangeMasterKeyCommand.java         |   88 --
 .../commandline/encryption/EncryptionCommand.java  |  130 ++
 .../commandline/encryption/EncryptionCommands.java |   70 --
 .../EncryptionSubcommand.java}                     |   50 +-
 .../encryption/EncryptionSubcommands.java          |   99 --
 .../encryption/GetMasterKeyNameCommand.java        |   73 --
 .../encryption/ReencryptionRateCommand.java        |  137 --
 .../commandline/CommandHandlerParsingTest.java     |    3 +-
 .../testsuites/IgniteControlUtilityTestSuite.java  |    5 -
 .../util/GridCommandHandlerAbstractTest.java       |   30 +-
 .../GridCommandHandlerDefragmentationTest.java     |  470 -------
 ...idCommandHandlerIndexingClusterByClassTest.java |    7 -
 .../GridCommandHandlerInterruptCommandTest.java    |  326 -----
 .../apache/ignite/util/GridCommandHandlerTest.java |  357 +-----
 .../org/apache/ignite/IgniteSystemProperties.java  |   54 -
 .../ClusterNodeAttributeAffinityBackupFilter.java  |   16 +-
 .../configuration/DataStorageConfiguration.java    |   44 +-
 .../configuration/EncryptionConfiguration.java     |    2 +-
 .../java/org/apache/ignite/events/EventType.java   |    5 -
 .../ignite/events/SqlQueryExecutionEvent.java      |    4 -
 .../org/apache/ignite/internal/GridComponent.java  |    5 +-
 .../apache/ignite/internal/GridKernalContext.java  |   17 +-
 .../ignite/internal/GridKernalContextImpl.java     |   26 +-
 .../org/apache/ignite/internal/IgniteFeatures.java |    5 +-
 .../org/apache/ignite/internal/IgniteKernal.java   |   19 +-
 .../streams/BinaryByteBufferInputStream.java       |   91 +-
 .../internal/client/thin/ClientComputeImpl.java    |   20 +-
 .../internal/client/thin/ClientSslUtils.java       |  293 -----
 .../internal/client/thin/NotificationListener.java |    4 +-
 .../internal/client/thin/PayloadInputChannel.java  |    8 +-
 .../internal/client/thin/ReliableChannel.java      |   63 +-
 .../internal/client/thin/TcpClientChannel.java     |  692 ++++++++--
 .../internal/client/thin/TcpIgniteClient.java      |   27 +-
 .../internal/client/thin/io/ClientConnection.java  |   39 -
 .../thin/io/ClientConnectionMultiplexer.java       |   52 -
 .../client/thin/io/ClientMessageDecoder.java       |   92 --
 .../client/thin/io/ClientMessageHandler.java       |   31 -
 .../io/gridnioserver/GridNioClientConnection.java  |   93 --
 .../GridNioClientConnectionMultiplexer.java        |  147 ---
 .../io/gridnioserver/GridNioClientListener.java    |   73 --
 .../thin/io/gridnioserver/GridNioClientParser.java |   59 -
 .../stream/v2/DirectByteBufferStreamImplV2.java    |   15 +-
 .../internal/maintenance/MaintenanceProcessor.java |   29 +-
 .../internal/managers/IgniteMBeansManager.java     |   12 -
 .../managers/discovery/GridDiscoveryManager.java   |   21 +-
 .../encryption/CacheGroupEncryptionKeys.java       |    9 -
 .../managers/encryption/CacheGroupPageScanner.java |   86 +-
 .../managers/encryption/GridEncryptionManager.java |   60 +-
 .../internal/metric/IoStatisticsHolderQuery.java   |   21 +-
 .../internal/metric/IoStatisticsQueryHelper.java   |    6 +-
 .../pagemem/wal/IgniteWriteAheadLogManager.java    |   16 +-
 .../processors/cache/CacheDiagnosticManager.java   |    2 +-
 .../processors/cache/CacheGroupMetricsImpl.java    |   46 +-
 .../internal/processors/cache/CacheLockImpl.java   |   21 +-
 .../processors/cache/ClusterCachesInfo.java        |   18 +-
 .../processors/cache/GridCacheAdapter.java         |  308 ++---
 .../processors/cache/GridCacheProcessor.java       |   28 +-
 .../internal/processors/cache/GridCacheUtils.java  |   16 +
 .../cache/IgniteCacheOffheapManagerImpl.java       |    2 +-
 .../cache/distributed/dht/GridDhtCacheAdapter.java |  119 +-
 .../distributed/dht/atomic/GridDhtAtomicCache.java |   27 +
 .../dht/colocated/GridDhtColocatedCache.java       |    6 +
 .../preloader/GridDhtPartitionsExchangeFuture.java |   34 +-
 .../dht/preloader/latch/ExchangeLatchManager.java  |   25 +-
 .../distributed/near/GridNearAtomicCache.java      |    3 +
 .../distributed/near/GridNearCacheAdapter.java     |    7 +
 .../near/GridNearTransactionalCache.java           |    3 +
 .../cache/local/atomic/GridLocalAtomicCache.java   |   18 +
 .../GridCacheDatabaseSharedManager.java            |  229 +---
 .../cache/persistence/GridCacheOffheapManager.java |   54 +-
 .../IgniteCacheDatabaseSharedManager.java          |   11 +-
 .../processors/cache/persistence/RowStore.java     |    2 +-
 .../cache/persistence/checkpoint/Checkpoint.java   |   19 +-
 .../persistence/checkpoint/CheckpointEntry.java    |    5 +-
 .../persistence/checkpoint/CheckpointHistory.java  |  216 ++--
 .../persistence/checkpoint/CheckpointManager.java  |    7 +-
 .../checkpoint/CheckpointMarkersStorage.java       |   24 +-
 .../checkpoint/CheckpointReadWriteLock.java        |   11 +-
 .../cache/persistence/checkpoint/Checkpointer.java |   16 +-
 .../checkpoint/LightweightCheckpointManager.java   |    6 +-
 .../CachePartitionDefragmentationManager.java      | 1317 --------------------
 .../defragmentation/DefragmentationFileUtils.java  |  410 ------
 .../defragmentation/DefragmentationMXBeanImpl.java |   85 --
 .../DefragmentationPageReadWriteManager.java       |   37 -
 .../defragmentation/IgniteDefragmentation.java     |  341 -----
 .../defragmentation/IgniteDefragmentationImpl.java |  223 ----
 .../cache/persistence/defragmentation/LinkMap.java |  276 ----
 .../persistence/defragmentation/PageStoreMap.java  |  108 --
 .../persistence/defragmentation/TreeIterator.java  |  109 --
 .../maintenance/DefragmentationParameters.java     |   82 --
 .../DefragmentationWorkflowCallback.java           |   73 --
 .../maintenance/ExecuteDefragmentationAction.java  |  101 --
 .../maintenance/StopDefragmentationAction.java     |   53 -
 .../persistence/file/FilePageStoreManager.java     |    9 -
 .../cache/persistence/pagemem/PageMemoryImpl.java  |    4 +-
 .../cache/persistence/tree/BPlusTree.java          |   28 +-
 .../cache/persistence/tree/io/PageIO.java          |   13 -
 .../cache/persistence/tree/util/InsertLast.java    |    8 +-
 .../wal/AbstractWalRecordsIterator.java            |    4 +-
 .../cache/persistence/wal/FileDescriptor.java      |    6 +-
 .../persistence/wal/FileWriteAheadLogManager.java  |  879 ++++++-------
 .../wal/aware/SegmentArchiveSizeStorage.java       |  104 --
 .../wal/aware/SegmentArchivedStorage.java          |   34 +-
 .../cache/persistence/wal/aware/SegmentAware.java  |  194 +--
 .../wal/aware/SegmentCompressStorage.java          |   59 +-
 .../wal/aware/SegmentCurrentStateStorage.java      |   84 +-
 .../persistence/wal/aware/SegmentLockStorage.java  |   41 +-
 .../persistence/wal/aware/SegmentObservable.java   |    2 +-
 .../wal/aware/SegmentReservationStorage.java       |   92 +-
 .../wal/aware/SegmentTruncateStorage.java          |  151 ---
 .../persistence/wal/io/LockedReadFileInput.java    |   27 +-
 .../wal/io/LockedSegmentFileInputFactory.java      |    2 +-
 .../wal/reader/IgniteWalIteratorFactory.java       |    2 +-
 .../wal/reader/StandaloneGridKernalContext.java    |   12 -
 .../wal/reader/StandaloneWalRecordsIterator.java   |    2 +-
 .../query/GridCacheDistributedQueryFuture.java     |    5 -
 .../query/GridCacheDistributedQueryManager.java    |   52 +-
 .../cache/query/GridCacheQueryManager.java         |   20 -
 .../cache/transactions/IgniteInternalTx.java       |    7 -
 .../cache/transactions/IgniteTxAdapter.java        |   29 +-
 .../cache/transactions/IgniteTxManager.java        |   33 +-
 .../cluster/GridClusterStateProcessor.java         |    6 -
 .../internal/processors/job/GridJobProcessor.java  |    8 -
 .../persistence/DistributedMetaStorageImpl.java    |  105 +-
 .../processors/metric/GridMetricManager.java       |    5 +-
 .../processors/odbc/ClientListenerProcessor.java   |    2 +-
 .../processors/odbc/odbc/OdbcColumnMeta.java       |   39 +-
 .../odbc/odbc/OdbcConnectionContext.java           |    6 +-
 .../processors/odbc/odbc/OdbcMessageParser.java    |    4 +-
 .../processors/odbc/odbc/OdbcRequestHandler.java   |    7 +-
 .../processors/odbc/odbc/OdbcResultSet.java        |    2 +-
 .../internal/processors/odbc/odbc/OdbcUtils.java   |    7 +-
 .../FilePerformanceStatisticsReader.java           |  529 --------
 .../FilePerformanceStatisticsWriter.java           |  502 --------
 .../performancestatistics/OperationType.java       |  185 ---
 .../PerformanceStatisticsHandler.java              |   97 --
 .../PerformanceStatisticsMBeanImpl.java            |   50 -
 .../PerformanceStatisticsProcessor.java            |  293 -----
 .../platform/binary/PlatformBinaryProcessor.java   |   14 +-
 .../platform/utils/PlatformConfigurationUtils.java |   45 +-
 .../processors/query/GridQueryFieldMetadata.java   |    9 -
 .../processors/query/GridQueryIndexing.java        |   28 -
 .../processors/query/GridRunningQueryInfo.java     |   26 -
 .../processors/query/RunningQueryManager.java      |   32 -
 .../rest/protocols/tcp/GridTcpRestNioListener.java |  104 +-
 .../processors/service/IgniteServiceProcessor.java |   26 +-
 .../internal/processors/service/ServiceInfo.java   |   36 +-
 .../processors/task/GridTaskProcessor.java         |    9 -
 .../suggestions/OsConfigurationSuggestions.java    |   38 +-
 .../ignite/internal/util/BasicRateLimiter.java     |   20 +-
 .../ignite/internal/util/HostAndPortRange.java     |  133 +-
 .../apache/ignite/internal/util/IgniteUtils.java   |  221 +++-
 .../util/ReentrantReadWriteLockWithTracking.java   |  218 ----
 .../internal/util/collection/IntHashMap.java       |   26 -
 .../ignite/internal/util/collection/IntMap.java    |    6 -
 .../internal/util/collection/IntRWHashMap.java     |   22 -
 .../util/tostring/GridToStringBuilder.java         |   46 -
 .../visor/annotation/InterruptibleVisorTask.java   |   32 -
 .../VisorDefragmentationOperation.java             |   28 -
 .../defragmentation/VisorDefragmentationTask.java  |  181 ---
 .../VisorDefragmentationTaskArg.java               |   77 --
 .../VisorDefragmentationTaskResult.java            |   72 --
 .../availability/VisorConnectivityArgs.java        |   68 -
 .../availability/VisorConnectivityResult.java      |   69 -
 .../availability/VisorConnectivityTask.java        |  125 --
 .../encryption/VisorCacheGroupEncryptionTask.java  |  137 --
 .../VisorCacheGroupEncryptionTaskArg.java          |   68 -
 .../VisorCacheGroupEncryptionTaskResult.java       |   90 --
 .../encryption/VisorChangeCacheGroupKeyTask.java   |   61 -
 .../encryption/VisorEncryptionKeyIdsTask.java      |   83 --
 .../encryption/VisorReencryptionRateTask.java      |  133 --
 .../encryption/VisorReencryptionRateTaskArg.java   |   74 --
 .../encryption/VisorReencryptionResumeTask.java    |   60 -
 .../encryption/VisorReencryptionStatusTask.java    |   88 --
 .../encryption/VisorReencryptionSuspendTask.java   |   83 --
 .../ignite/internal/visor/misc/VisorWalTask.java   |    2 +-
 .../internal/visor/verify/VisorIdleVerifyJob.java  |   13 -
 .../ignite/maintenance/MaintenanceRegistry.java    |   22 +-
 .../ignite/mxbean/DefragmentationMXBean.java       |   73 --
 .../ignite/mxbean/PerformanceStatisticsMBean.java  |   40 -
 .../ignite/spi/discovery/tcp/ServerImpl.java       |    6 +-
 .../main/resources/META-INF/classnames.properties  |   29 -
 .../java/org/apache/ignite/TestStorageUtils.java   |  104 --
 .../ignite/cache/NoUnnecessaryRebalancesTest.java  |  163 ---
 .../ignite/client/ConnectToStartingNodeTest.java   |   18 +-
 .../org/apache/ignite/client/ConnectionTest.java   |   31 +-
 .../apache/ignite/client/LocalIgniteCluster.java   |   14 +-
 .../apache/ignite/client/SslParametersTest.java    |    4 +-
 .../internal/client/thin/ComputeTaskTest.java      |   13 -
 .../internal/client/thin/ReliableChannelTest.java  |    9 +-
 .../internal/client/thin/TestTaskCustomType.java   |   47 -
 .../ThinClientAbstractPartitionAwarenessTest.java  |    9 +-
 ...lientPartitionAwarenessResourceReleaseTest.java |   14 +-
 ...ectByteBufferStreamImplV2ByteOrderSelfTest.java |   47 -
 .../encryption/AbstractEncryptionTest.java         |   15 -
 .../encryption/CacheGroupKeyChangeTest.java        |    6 +-
 .../encryption/CacheGroupReencryptionTest.java     |   16 +-
 .../encryption/EncryptedCacheNodeJoinTest.java     |  118 +-
 .../cache/CacheReadBeforeActivationTest.java       |  106 --
 .../GridCacheKeyCheckNearEnabledSelfTest.java}     |   17 +-
 .../cache/GridCacheKeyCheckSelfTest.java           |  209 ++++
 .../processors/cache/GridCacheUtilsSelfTest.java   |  199 +++
 .../cache/WalModeChangeAdvancedSelfTest.java       |   13 +-
 .../preloader/latch/ExchangeLatchManagerTest.java  |  106 --
 .../IgniteCacheExpiryPolicyAbstractTest.java       |  111 +-
 .../persistence/CheckpointReadLockFailureTest.java |   39 +-
 .../IgnitePdsCacheEntriesExpirationTest.java       |    4 +-
 .../IgnitePdsDefragmentationEncryptionTest.java    |   43 -
 ...itePdsDefragmentationRandomLruEvictionTest.java |   35 -
 .../persistence/IgnitePdsDefragmentationTest.java  |  593 ---------
 .../db/IgnitePdsDataRegionMetricsTest.java         |    7 -
 .../db/IgnitePdsReserveWalSegmentsTest.java        |  172 +--
 .../db/IgnitePdsStartWIthEmptyArchive.java         |    2 +-
 .../cache/persistence/db/IgnitePdsWithTtlTest.java |    4 +-
 .../db/checkpoint/LightweightCheckpointTest.java   |    4 +-
 .../db/wal/IgniteWalIteratorSwitchSegmentTest.java |  171 ++-
 .../db/wal/WalDeletionArchiveAbstractTest.java     |  113 +-
 .../db/wal/WriteAheadLogManagerSelfTest.java       |  175 ---
 .../defragmentation/DefragmentationMXBeanTest.java |  322 -----
 .../persistence/defragmentation/LinkMapTest.java   |   83 --
 .../cache/persistence/pagemem/NoOpWALManager.java  |    7 +-
 .../persistence/wal/aware/SegmentAwareTest.java    |  225 +---
 .../wal/memtracker/PageMemoryTracker.java          |    8 +-
 .../reader/StandaloneWalRecordsIteratorTest.java   |    5 -
 .../TxRecoveryOnCoordniatorFailTest.java           |  116 --
 .../metastorage/DistributedMetaStorageTest.java    |   16 -
 .../AbstractPerformanceStatisticsTest.java         |  194 ---
 .../performancestatistics/CacheStartTest.java      |  152 ---
 .../performancestatistics/ForwardReadTest.java     |  147 ---
 .../PerformanceStatisticsMultipleStartTest.java    |   69 -
 .../PerformanceStatisticsPropertiesTest.java       |  201 ---
 .../PerformanceStatisticsSelfTest.java             |  318 -----
 .../PerformanceStatisticsThinClientTest.java       |  244 ----
 .../performancestatistics/StringCacheTest.java     |   81 --
 .../performancestatistics/TopologyChangesTest.java |  137 --
 .../processors/query/DummyQueryIndexing.java       |   19 -
 ...ServiceHotRedeploymentViaDeploymentSpiTest.java |   93 +-
 .../processors/service/ServiceInfoSelfTest.java    |   23 -
 .../ignite/internal/util/BasicRateLimiterTest.java |   15 -
 .../ignite/internal/util/HostAndPortRangeTest.java |  181 ---
 .../ignite/internal/util/IgniteUtilsSelfTest.java  |   32 -
 .../ignite/platform/PlatformDeployServiceTask.java |  161 ---
 .../java/org/apache/ignite/platform/model/ACL.java |   23 -
 .../org/apache/ignite/platform/model/Account.java  |   71 --
 .../org/apache/ignite/platform/model/Address.java  |   47 -
 .../apache/ignite/platform/model/Department.java   |   34 -
 .../org/apache/ignite/platform/model/Employee.java |   47 -
 .../java/org/apache/ignite/platform/model/Key.java |   51 -
 .../org/apache/ignite/platform/model/Role.java     |   39 -
 .../org/apache/ignite/platform/model/User.java     |   67 -
 .../org/apache/ignite/platform/model/Value.java    |   51 -
 .../testframework/junits/GridAbstractTest.java     |    2 +-
 .../ignite/testsuites/IgniteBasicTestSuite.java    |    3 -
 .../IgniteBasicWithPersistenceTestSuite.java       |   18 +-
 .../ignite/testsuites/IgniteCacheTestSuite.java    |    5 +
 .../ignite/testsuites/IgniteCacheTestSuite9.java   |    5 -
 .../ignite/testsuites/IgnitePdsMvccTestSuite4.java |    8 -
 .../ignite/testsuites/IgnitePdsTestSuite2.java     |    3 -
 .../ignite/testsuites/IgnitePdsTestSuite4.java     |   10 -
 .../ignite/testsuites/IgniteUtilSelfTestSuite.java |    7 +-
 ...ridCommandHandlerClusterByClassTest_help.output |   30 -
 ...andHandlerClusterByClassWithSSLTest_help.output |   30 -
 .../processors/query/h2/H2SqlFieldMetadata.java    |   11 +-
 .../internal/processors/query/h2/H2Utils.java      |   16 +-
 .../processors/query/h2/IgniteH2Indexing.java      |   49 +-
 .../defragmentation/IndexingDefragmentation.java   |  482 -------
 .../query/h2/twostep/GridMapQueryExecutor.java     |   21 -
 .../query/h2/twostep/GridReduceQueryExecutor.java  |    4 -
 .../visor/verify/ValidateIndexesClosure.java       |  264 ++--
 .../visor/verify/ValidateIndexesContext.java       |   30 -
 .../visor/verify/VisorValidateIndexesTask.java     |   18 -
 .../IgniteCacheAbstractFieldsQuerySelfTest.java    |   49 +-
 .../cache/IgniteCacheUpdateSqlQuerySelfTest.java   |    4 +-
 .../processors/cache/index/H2RowCacheSelfTest.java |   39 +-
 .../IgnitePdsIndexingDefragmentationTest.java      |  323 -----
 ...xingMultithreadedLoadContinuousRestartTest.java |    4 +-
 .../IgniteClusterSnapshotWithIndexesTest.java      |    3 +-
 .../processors/database/RebuildIndexTest.java      |    2 +-
 .../RebuildIndexWithHistoricalRebalanceTest.java   |    2 +-
 .../PerformanceStatisticsQueryTest.java            |  356 ------
 .../query/IgniteSqlSplitterSelfTest.java           |    3 -
 .../processors/query/SqlResultSetMetaSelfTest.java |   84 --
 .../IgniteBinaryCacheQueryTestSuite.java           |    3 -
 .../testsuites/IgniteCacheQuerySelfTestSuite6.java |    4 +-
 .../testsuites/IgnitePdsWithIndexingTestSuite.java |    4 +-
 modules/ml/catboost-model-parser/pom.xml           |   88 --
 .../ml/catboost/CatboostClassificationModel.java   |   73 --
 .../CatboostClassificationModelParser.java         |   44 -
 .../ml/catboost/CatboostRegressionModel.java       |   71 --
 .../ml/catboost/CatboostRegressionModelParser.java |   44 -
 .../apache/ignite/ml/catboost/package-info.java    |   23 -
 .../ml/catboost/IgniteMLCatboostTestSuite.java     |   33 -
 .../CatboostClassificationModelParserTest.java     |   74 --
 .../parser/CatboostRegressionModelParserTest.java  |   76 --
 ...ee-access-challenge-sample-expected-results.csv |    4 -
 .../amazon-employee-access-challenge-sample.csv    |    5 -
 .../datasets/amazon-employee-access-challenge.csv  | 1000 ---------------
 .../src/test/resources/fit_script/.gitignore       |    2 -
 .../src/test/resources/fit_script/README.md        |   19 -
 .../src/test/resources/fit_script/install.txt      |    3 -
 .../src/test/resources/fit_script/train_clf.py     |   71 --
 .../src/test/resources/fit_script/train_reg.py     |   68 -
 .../src/test/resources/models/model_clf.cbm        |  Bin 1089896 -> 0 bytes
 .../src/test/resources/models/model_reg.cbm        |  Bin 1089824 -> 0 bytes
 modules/ml/pom.xml                                 |    5 -
 .../ml/sparkmodelparser/SparkModelParser.java      |   81 +-
 .../apache/ignite/ml/clustering/gmm/GmmModel.java  |    6 -
 .../ml/clustering/kmeans/ClusterizationModel.java  |    4 +-
 .../ignite/ml/clustering/kmeans/KMeansModel.java   |  125 +-
 .../ignite/ml/clustering/kmeans/KMeansTrainer.java |    4 +-
 .../ignite/ml/composition/ModelsComposition.java   |   16 +-
 .../ml/composition/ModelsCompositionFormat.java    |    6 +-
 .../composition/boosting/GDBLearningStrategy.java  |    4 +-
 .../ignite/ml/composition/boosting/GDBModel.java   |  118 --
 .../ignite/ml/composition/boosting/GDBTrainer.java |   43 +-
 .../PredictionsAggregator.java                     |    9 -
 .../WeightedPredictionsAggregator.java             |    7 +-
 .../apache/ignite/ml/inference/json/JSONModel.java |   55 -
 .../ignite/ml/inference/json/JSONModelMixIn.java   |   31 -
 .../ignite/ml/inference/json/JSONWritable.java     |   37 -
 .../ignite/ml/inference/json/JacksonHelper.java    |   39 -
 .../ignite/ml/knn/NNClassificationModel.java       |   11 -
 .../ignite/ml/knn/ann/ANNClassificationModel.java  |  130 +-
 .../ml/knn/ann/ANNClassificationTrainer.java       |   14 +-
 .../apache/ignite/ml/knn/ann/ProbableLabel.java    |    5 +-
 .../ml/math/distances/BrayCurtisDistance.java      |    4 -
 .../ignite/ml/math/distances/DistanceMeasure.java  |   17 -
 .../ml/math/distances/MinkowskiDistance.java       |   16 +-
 .../math/distances/WeightedMinkowskiDistance.java  |   35 +-
 .../ignite/ml/math/stat/DistributionMixture.java   |    9 +-
 .../compound/CompoundNaiveBayesModel.java          |   73 +-
 .../discrete/DiscreteNaiveBayesModel.java          |   83 +-
 .../discrete/DiscreteNaiveBayesSumsHolder.java     |   11 -
 .../gaussian/GaussianNaiveBayesModel.java          |   75 +-
 .../gaussian/GaussianNaiveBayesSumsHolder.java     |   15 -
 .../encoding/EncoderPartitionData.java             |   19 -
 .../ml/preprocessing/encoding/EncoderTrainer.java  |  228 +---
 .../ml/preprocessing/encoding/EncoderType.java     |    5 +-
 .../encoding/target/TargetCounter.java             |   78 --
 .../encoding/target/TargetEncoderPreprocessor.java |   99 --
 .../encoding/target/TargetEncodingMeta.java        |   56 -
 .../encoding/target/package-info.java              |   22 -
 .../linear/LinearRegressionLSQRTrainer.java        |    8 +-
 .../regressions/linear/LinearRegressionModel.java  |  114 +-
 .../linear/LinearRegressionSGDTrainer.java         |    4 +-
 .../logistic/LogisticRegressionModel.java          |  112 +-
 .../apache/ignite/ml/structures/DatasetRow.java    |    4 -
 .../apache/ignite/ml/structures/LabeledVector.java |    4 -
 .../ml/svm/SVMLinearClassificationModel.java       |  112 +-
 .../ml/svm/SVMLinearClassificationTrainer.java     |    2 +-
 ...{DecisionTreeTrainer.java => DecisionTree.java} |   20 +-
 .../ml/tree/DecisionTreeClassificationTrainer.java |    2 +-
 .../ml/tree/DecisionTreeConditionalNode.java       |   16 +-
 .../ignite/ml/tree/DecisionTreeLeafNode.java       |   10 +-
 .../apache/ignite/ml/tree/DecisionTreeModel.java   |  111 --
 .../apache/ignite/ml/tree/DecisionTreeNode.java    |   15 +-
 .../ml/tree/DecisionTreeRegressionTrainer.java     |    2 +-
 .../java/org/apache/ignite/ml/tree/NodeData.java   |   90 --
 .../tree/boosting/GDBOnTreesLearningStrategy.java  |   10 +-
 .../RandomForestClassifierTrainer.java             |    7 +-
 .../ml/tree/randomforest/RandomForestModel.java    |  106 --
 .../RandomForestRegressionTrainer.java             |    7 +-
 .../ml/tree/randomforest/RandomForestTrainer.java  |   37 +-
 .../ignite/ml/tree/randomforest/data/NodeId.java   |   11 +-
 .../ml/tree/randomforest/data/NodeSplit.java       |    9 +-
 .../ignite/ml/tree/randomforest/data/TreeNode.java |    9 +-
 .../{RandomForestTreeModel.java => TreeRoot.java}  |   25 +-
 .../data/impurity/ImpurityHistogramsComputer.java  |    8 +-
 .../data/statistics/LeafValuesComputer.java        |    8 +-
 .../ignite/ml/clustering/KMeansModelTest.java      |    4 +-
 .../apache/ignite/ml/common/KeepBinaryTest.java    |    2 +-
 .../ml/composition/boosting/GDBTrainerTest.java    |    6 +-
 .../ignite/ml/math/distances/DistanceTest.java     |    6 +-
 .../distances/WeightedMinkowskiDistanceTest.java   |   10 +-
 .../encoding/TargetEncoderPreprocessorTest.java    |  102 --
 .../linear/LinearRegressionLSQRTrainerTest.java    |   16 +-
 .../linear/LinearRegressionSGDTrainerTest.java     |   16 +-
 .../ml/selection/cv/CrossValidationTest.java       |    8 +-
 ...onTreeClassificationTrainerIntegrationTest.java |    7 +-
 .../DecisionTreeClassificationTrainerTest.java     |    6 +-
 ...cisionTreeRegressionTrainerIntegrationTest.java |    8 +-
 .../ml/tree/DecisionTreeRegressionTrainerTest.java |    6 +-
 .../RandomForestClassifierTrainerTest.java         |   13 +-
 .../randomforest/RandomForestIntegrationTest.java  |    3 +-
 .../RandomForestRegressionTrainerTest.java         |    9 +-
 .../ml/tree/randomforest/data/TreeNodeTest.java    |   14 +-
 modules/opencensus/pom.xml                         |   23 -
 .../cpp/core-test/config/affinity-test-32.xml      |   52 -
 .../cpp/core-test/config/affinity-test-default.xml |   79 --
 .../cpp/core-test/config/affinity-test.xml         |   34 -
 .../cpp/core-test/config/cache-test-default.xml    |    4 +
 .../cpp/core-test/include/ignite/test_utils.h      |   30 -
 .../platforms/cpp/core-test/src/affinity_test.cpp  |  123 +-
 .../platforms/cpp/core-test/src/compute_test.cpp   |  363 ++----
 modules/platforms/cpp/core-test/src/test_utils.cpp |   36 +-
 modules/platforms/cpp/core/namespaces.dox          |    4 +-
 .../platforms/cpp/odbc-test/include/test_utils.h   |    6 +-
 .../cpp/odbc-test/src/attributes_test.cpp          |   44 -
 .../cpp/odbc-test/src/authentication_test.cpp      |   91 +-
 .../cpp/odbc-test/src/meta_queries_test.cpp        |  260 ----
 .../platforms/cpp/odbc-test/src/queries_test.cpp   |   38 +-
 modules/platforms/cpp/odbc-test/src/test_utils.cpp |   42 +-
 .../platforms/cpp/odbc-test/src/utility_test.cpp   |   64 -
 .../ignite/odbc/config/connection_string_parser.h  |    6 -
 .../cpp/odbc/include/ignite/odbc/dsn_config.h      |    5 +-
 .../odbc/include/ignite/odbc/meta/column_meta.h    |   41 +-
 .../odbc/include/ignite/odbc/protocol_version.h    |    3 -
 .../platforms/cpp/odbc/os/win/src/system_dsn.cpp   |    4 +-
 .../odbc/src/config/connection_string_parser.cpp   |   18 +-
 modules/platforms/cpp/odbc/src/connection.cpp      |    2 +-
 modules/platforms/cpp/odbc/src/cursor.cpp          |    7 +-
 modules/platforms/cpp/odbc/src/dsn_config.cpp      |   12 +-
 .../platforms/cpp/odbc/src/meta/column_meta.cpp    |   30 +-
 modules/platforms/cpp/odbc/src/odbc.cpp            |    5 +-
 .../platforms/cpp/odbc/src/protocol_version.cpp    |    6 +-
 .../platforms/cpp/odbc/src/query/data_query.cpp    |    6 +-
 modules/platforms/cpp/odbc/src/statement.cpp       |    2 -
 modules/platforms/cpp/odbc/src/utility.cpp         |   14 +-
 .../cpp/thin-client-test/src/test_utils.cpp        |   34 +-
 .../Apache.Ignite.Core.Tests.csproj                |    6 +-
 .../Binary/BinaryDateTimeTest.cs                   |  213 +---
 .../Cache/Affinity/AffinityBackupFilterTest.cs     |  132 --
 .../Cache/Affinity/AffinityFunctionSpringTest.cs   |   20 +-
 .../Cache/Affinity/AffinityFunctionTest.cs         |  128 +-
 .../Cache/CacheAbstractTest.cs                     |   39 -
 .../Cache/CacheConfigurationTest.cs                |   47 +-
 .../Client/Cache/CacheTestNoMeta.cs                |    2 +-
 .../Config/Cache/Affinity/affinity-function.xml    |   18 -
 .../Config/full-config.xml                         |   31 +-
 .../Config/spring-test.xml                         |    1 -
 .../IgniteConfigurationSerializerTest.cs           |   12 +-
 .../IgniteConfigurationTest.cs                     |   27 +-
 .../Services/IJavaService.cs                       |   32 -
 .../Services/JavaServiceDynamicProxy.cs            |  374 ------
 .../Apache.Ignite.Core.Tests/Services/Model.cs     |  147 ---
 .../Services/ServiceProxyTest.cs                   |   41 +-
 .../Services/ServiceTypeAutoResolveTest.cs         |  213 ----
 .../Services/ServicesTest.cs                       |   84 +-
 .../Apache.Ignite.Core/Apache.Ignite.Core.csproj   |    3 -
 .../Binary/BinaryConfiguration.cs                  |   38 +-
 .../Binary/BinaryReflectiveSerializer.cs           |   22 +-
 .../Binary/ITimestampConverter.cs                  |   38 -
 .../Cache/Affinity/IAffinityBackupFilter.cs        |   34 -
 .../ClusterNodeAttributeAffinityBackupFilter.cs    |   57 -
 .../Rendezvous/RendezvousAffinityFunction.cs       |   16 +-
 .../IgniteClientConfigurationSection.xsd           |   17 -
 .../Apache.Ignite.Core/IgniteConfiguration.cs      |   24 +-
 .../IgniteConfigurationSection.xsd                 |   48 -
 .../Impl/Binary/BinaryProcessor.cs                 |   43 +-
 .../Impl/Binary/BinaryProcessorClient.cs           |   23 +-
 .../Apache.Ignite.Core/Impl/Binary/BinaryReader.cs |    8 +-
 .../Impl/Binary/BinarySystemHandlers.cs            |   56 +-
 .../Apache.Ignite.Core/Impl/Binary/BinaryUtils.cs  |   30 +-
 .../Apache.Ignite.Core/Impl/Binary/BinaryWriter.cs |   12 +-
 .../Impl/Binary/IBinaryProcessor.cs                |    3 +-
 .../Apache.Ignite.Core/Impl/Binary/Marshaller.cs   |   45 +-
 .../Cache/Affinity/AffinityFunctionSerializer.cs   |   81 +-
 .../Impl/PlatformTargetAdapter.cs                  |    4 +-
 .../Impl/Services/ServiceProxySerializer.cs        |   11 +-
 .../Apache.Ignite.Core/Impl/Services/Services.cs   |   18 +-
 .../transactions/proxy/ClientTransactionProxy.java |   60 -
 .../proxy/ClientTransactionProxyFactory.java       |   61 -
 .../transactions/proxy/IgniteTransactionProxy.java |   60 -
 .../proxy/IgniteTransactionProxyFactory.java       |   62 -
 .../transactions/proxy/TransactionProxy.java       |   41 -
 .../proxy/TransactionProxyFactory.java             |   27 -
 .../spring/AbstractSpringTransactionManager.java   |  309 -----
 .../IgniteClientSpringTransactionManager.java      |  117 --
 .../spring/IgniteTransactionHolder.java            |    9 +-
 .../spring/SpringTransactionManager.java           |  288 ++++-
 .../src/test/config/enc/enc-cache-client.xml       |    2 +-
 .../ignite/testsuites/IgniteSpringTestSuite.java   |    2 -
 .../GridSpringTransactionManagerAbstractTest.java  |    4 +-
 .../GridSpringTransactionManagerSelfTest.java      |    7 +-
 ...SpringTransactionManagerSpringBeanSelfTest.java |    7 +-
 .../spring/GridSpringTransactionService.java       |   91 +-
 .../IgniteClientSpringTransactionManagerTest.java  |  118 --
 packaging/deb/control                              |    2 +-
 parent/pom.xml                                     |    8 -
 pom.xml                                            |    1 -
 563 files changed, 4312 insertions(+), 33927 deletions(-)

diff --git a/assembly/dependencies-apache-ignite-slim.xml b/assembly/dependencies-apache-ignite-slim.xml
index 6d222c7..e98695c 100644
--- a/assembly/dependencies-apache-ignite-slim.xml
+++ b/assembly/dependencies-apache-ignite-slim.xml
@@ -156,7 +156,6 @@
                 <exclude>org.apache.ignite:ignite-ml-h2o-model-parser</exclude>
                 <exclude>org.apache.ignite:ignite-ml-spark-model-parser</exclude>
                 <exclude>org.apache.ignite:ignite-ml-xgboost-model-parser</exclude>
-                <exclude>org.apache.ignite:ignite-ml-catboost-model-parser</exclude>
                 <exclude>org.apache.ignite:ignite-osgi</exclude>
                 <exclude>org.apache.ignite:ignite-osgi-karaf</exclude>
                 <exclude>org.apache.ignite:ignite-osgi-paxlogging</exclude>
diff --git a/docs/_config.yml b/docs/_config.yml
index 0562d1a..00c4e79 100644
--- a/docs/_config.yml
+++ b/docs/_config.yml
@@ -14,7 +14,7 @@
 # limitations under the License.
 exclude: [guidelines.md,  "Gemfile", "Gemfile.lock", README.adoc, "_docs/code-snippets", "_docs/includes", '*.sh']
 attrs: &asciidoc_attributes
-  version: 2.9.1
+  version: 2.9.0 
   base_url: /docs
   stylesdir: /docs/assets/css
   imagesdir: /docs
diff --git a/docs/_docs/extensions-and-integrations/mybatis-l2-cache.adoc b/docs/_docs/extensions-and-integrations/mybatis-l2-cache.adoc
index bdbc81a..8c3e73b 100644
--- a/docs/_docs/extensions-and-integrations/mybatis-l2-cache.adoc
+++ b/docs/_docs/extensions-and-integrations/mybatis-l2-cache.adoc
@@ -52,4 +52,4 @@ tab:XML[]
 --
 
 and configure your Ignite cache in `config/default-config.xml`. (Simple reference configurations are available on
-https://github.com/mybatis/ignite-cache/tree/master/config[GitHub, window=_blank])
+https://github.com/mybatis/ignite-cache/tree/master/config[Github, window=_blank])
diff --git a/docs/_docs/index.adoc b/docs/_docs/index.adoc
index 1e8aadc..2a3ceb6 100644
--- a/docs/_docs/index.adoc
+++ b/docs/_docs/index.adoc
@@ -14,40 +14,20 @@
 // limitations under the License.
 = Apache Ignite Documentation
 
-Apache Ignite is a distributed database for in-memory speed at petabyte scale.
+Apache Ignite is a horizontally scalable, fault-tolerant distributed in-memory computing platform for building real-time
+applications that can process terabytes of data with in-memory speed.
 
-The technical documentation introduces you to the key capabilities, shows how to use certain features, or how to
+Ignite documentation introduces you to the project's main capabilities, shows how to use certain features, or how to
 approach cluster optimizations and issues troubleshooting. If you are new to Ignite, then start with the
-link:quick-start/java[Quick Start Guides], and build the first application in a matter of 5-10 minutes.
+link:docs/latest/quick-start/java[Quick Start Guides],  and build the first application in a matter of 5-10 minutes.
 Otherwise, select the topic of your interest and have your problems solved, and questions answered.
 Good luck with your Ignite journey!
 
-== APIs
-
-API reference for various programming languages.
-
-*Latest Stable Version*
-
-* link:/releases/latest/javadoc/[JavaDoc]
-* link:/releases/latest/dotnetdoc/api/[C#/.NET]
-* link:/releases/latest/cppdoc/[C++]
-* link:/releases/latest/scaladoc/scalar/index.html[Scala]
-
-*Older Versions*
-
-* With the top-level navigation menu, change an Ignite version and select a version-specific API from the APIs drop-down list.
-* Or, go to the link:/download.cgi[downloads page] for a full archive of the versions.
+== Programming Languages
+include::includes/intro-languages.adoc[]
 
-== Examples
+== Ignite Examples
 
 The Apache Ignite github repository contains a number of runnable examples that illustrate various Ignite functionality.
-
-* link:{githubUrl}/examples[Java^]
-* link:{githubUrl}/modules/platforms/dotnet/examples[C#/.NET^]
-* link:{githubUrl}/modules/platforms/cpp/examples[C++^]
-* link:{githubUrl}/modules/platforms/python/examples[Python^]
-* link:{githubUrl}/modules/platforms/nodejs/examples[Node.JS^]
-* link:{githubUrl}/modules/platforms/php/examples[PHP^]
-
-== Programming Languages
-include::includes/intro-languages.adoc[]
+Refer to the link:{githubUrl}/examples[Examples^] folder in the Ignite code base.
+The examples are also available in the binary distribution.
diff --git a/docs/_docs/machine-learning/binary-classification/decision-trees.adoc b/docs/_docs/machine-learning/binary-classification/decision-trees.adoc
index bc9ff05..57ab7bf 100644
--- a/docs/_docs/machine-learning/binary-classification/decision-trees.adoc
+++ b/docs/_docs/machine-learning/binary-classification/decision-trees.adoc
@@ -39,12 +39,12 @@ The model works this way - the split process stops when either the algorithm has
 
 == Model
 
-The Model in a decision tree classification is represented by the class `DecisionTreeModel`. We can make a prediction for a given vector of features in the following way:
+The Model in a decision tree classification is represented by the class `DecisionTreeNode`. We can make a prediction for a given vector of features in the following way:
 
 
 [source, java]
 ----
-DecisionTreeModel mdl = ...;
+DecisionTreeNode mdl = ...;
 
 double prediction = mdl.apply(observation);
 ----
@@ -68,7 +68,7 @@ DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTraine
 );
 
 // Train model.
-DecisionTreeModel mdl = trainer.fit(ignite, dataCache, vectorizer);
+DecisionTreeNode mdl = trainer.fit(ignite, dataCache, vectorizer);
 ----
 
 
diff --git a/docs/_docs/machine-learning/importing-model/model-import-from-apache-spark.adoc b/docs/_docs/machine-learning/importing-model/model-import-from-apache-spark.adoc
index 065cb78..92992f8 100644
--- a/docs/_docs/machine-learning/importing-model/model-import-from-apache-spark.adoc
+++ b/docs/_docs/machine-learning/importing-model/model-import-from-apache-spark.adoc
@@ -71,7 +71,7 @@ To load in Ignite ML you should use SparkModelParser class via method parse() ca
 
 [source, java]
 ----
-DecisionTreeModel mdl = (DecisionTreeModel)SparkModelParser.parse(
+DecisionTreeNode mdl = (DecisionTreeNode)SparkModelParser.parse(
    SPARK_MDL_PATH,
    SupportedSparkModels.DECISION_TREE
 );
diff --git a/docs/_docs/machine-learning/model-selection/cross-validation.adoc b/docs/_docs/machine-learning/model-selection/cross-validation.adoc
index 39e00f1..8e64c68 100644
--- a/docs/_docs/machine-learning/model-selection/cross-validation.adoc
+++ b/docs/_docs/machine-learning/model-selection/cross-validation.adoc
@@ -27,7 +27,7 @@ Let’s imagine that we have a trainer, a training set and we want to make cross
 DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(4, 0);
 
 // Create cross-validation instance
-CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator
+CrossValidation<DecisionTreeNode, Integer, Vector> scoreCalculator
   = new CrossValidation<>();
 
 // Set up the cross-validation process
@@ -67,7 +67,7 @@ Pipeline<Integer, Vector, Integer, Double> pipeline
 
 
 // Create cross-validation instance
-CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator
+CrossValidation<DecisionTreeNode, Integer, Vector> scoreCalculator
   = new CrossValidation<>();
 
 // Set up the cross-validation process
diff --git a/docs/_docs/machine-learning/model-selection/pipeline-api.adoc b/docs/_docs/machine-learning/model-selection/pipeline-api.adoc
index 9b2798c..7f0cb93 100644
--- a/docs/_docs/machine-learning/model-selection/pipeline-api.adoc
+++ b/docs/_docs/machine-learning/model-selection/pipeline-api.adoc
@@ -64,7 +64,7 @@ Preprocessor<Integer, Vector> normalizationPreprocessor = new NormalizationTrain
 
 DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
 
-CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator = new CrossValidation<>();
+CrossValidation<DecisionTreeNode, Integer, Vector> scoreCalculator = new CrossValidation<>();
 
 ParamGrid paramGrid = new ParamGrid()
   .addHyperParam("maxDeep", trainerCV::withMaxDeep, new Double[] {1.0, 2.0, 3.0, 4.0, 5.0, 10.0})
@@ -101,7 +101,7 @@ Pipeline<Integer, Vector, Integer, Double> pipeline = new Pipeline<Integer, Vect
   .addPreprocessingTrainer(new MinMaxScalerTrainer<Integer, Vector>())
   .addTrainer(trainer);
 
-CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator = new CrossValidation<>();
+CrossValidation<DecisionTreeNode, Integer, Vector> scoreCalculator = new CrossValidation<>();
 
 ParamGrid paramGrid = new ParamGrid()
   .addHyperParam("maxDeep", trainer::withMaxDeep, new Double[] {1.0, 2.0, 3.0, 4.0, 5.0, 10.0})
diff --git a/docs/_docs/machine-learning/regression/decision-trees-regression.adoc b/docs/_docs/machine-learning/regression/decision-trees-regression.adoc
index 2abbaa8..48f9d5c 100644
--- a/docs/_docs/machine-learning/regression/decision-trees-regression.adoc
+++ b/docs/_docs/machine-learning/regression/decision-trees-regression.adoc
@@ -39,12 +39,12 @@ The model works this way - the split process stops when either the algorithm has
 
 == Model
 
-The Model in a decision tree classification is represented by the class `DecisionTreeModel`. We can make a prediction for a given vector of features in the following way:
+The Model in a decision tree classification is represented by the class `DecisionTreeNode`. We can make a prediction for a given vector of features in the following way:
 
 
 [source, java]
 ----
-DecisionTreeModel mdl = ...;
+DecisionTreeNode mdl = ...;
 
 double prediction = mdl.apply(observation);
 ----
@@ -67,7 +67,7 @@ DecisionTreeRegressionTrainer trainer = new DecisionTreeRegressionTrainer(
 );
 
 // Train model.
-DecisionTreeModel mdl = trainer.fit(ignite, dataCache, vectorizer);
+DecisionTreeNode mdl = trainer.fit(ignite, dataCache, vectorizer);
 ----
 
 == Examples
diff --git a/docs/_docs/monitoring-metrics/system-views.adoc b/docs/_docs/monitoring-metrics/system-views.adoc
index ac45667..1d400c6 100644
--- a/docs/_docs/monitoring-metrics/system-views.adoc
+++ b/docs/_docs/monitoring-metrics/system-views.adoc
@@ -676,30 +676,3 @@ This view exposes information about the distribution of cache group partitions a
 |STATE | string | Partition state. Possible states: MOVING - partition is being loaded from another node to this node; OWNING - this node is either a primary or backup owner; RENTING - this node is neither primary nor back up owner (is being currently evicted); EVICTED - partition has been evicted; LOST - partition state is invalid, the partition should not be used.
 |IS_PRIMARY | boolean  | Primary partition flag
 |===
-
-== BINARY_METADATA
-
-This view exposes information about all available binary types.
-
-[{table_opts}]
-|===
-|Column | Data type |  Description
-|TYPE_ID | int | Type ID
-|TYPE_NAME | string | Type name
-|AFF_KEY_FIELD_NAME | string | Affinity key field name
-|FIELDS_COUNT | int | Fields count
-|FIELDS | string | Recorded object fields
-|SCHEMAS_IDS | string | Schema IDs registered for this type
-|IS_ENUM | boolean | Whether this is enum type
-|===
-
-== METASTORAGE
-
-This view exposes the contents of the metastorage cache.
-
-[{table_opts}]
-|===
-|Column | Data type |  Description
-|NAME | string | Name
-|VALUE | string | String or raw binary (if data could not be deserialized for some reason) representation of an element
-|===
diff --git a/examples/pom-standalone-lgpl.xml b/examples/pom-standalone-lgpl.xml
index a21776d..6d2fe41 100644
--- a/examples/pom-standalone-lgpl.xml
+++ b/examples/pom-standalone-lgpl.xml
@@ -112,12 +112,6 @@
 
         <dependency>
             <groupId>org.apache.ignite</groupId>
-            <artifactId>ignite-ml-catboost-model-parser</artifactId>
-            <version>to_be_replaced_by_ignite_version</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.ignite</groupId>
             <artifactId>ignite-ml-spark-model-parser</artifactId>
             <version>to_be_replaced_by_ignite_version</version>
         </dependency>
diff --git a/examples/pom-standalone.xml b/examples/pom-standalone.xml
index ccce355..ca1d097 100644
--- a/examples/pom-standalone.xml
+++ b/examples/pom-standalone.xml
@@ -112,12 +112,6 @@
 
         <dependency>
             <groupId>org.apache.ignite</groupId>
-            <artifactId>ignite-ml-catboost-model-parser</artifactId>
-            <version>to_be_replaced_by_ignite_version</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.ignite</groupId>
             <artifactId>ignite-ml-spark-model-parser</artifactId>
             <version>to_be_replaced_by_ignite_version</version>
         </dependency>
diff --git a/examples/pom.xml b/examples/pom.xml
index 62087ed..08fe50a 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -106,12 +106,6 @@
             <groupId>org.apache.ignite</groupId>
             <artifactId>ignite-ml</artifactId>
             <version>${project.version}</version>
-            <exclusions>
-                <exclusion>
-                    <groupId>com.fasterxml.jackson.core</groupId>
-                    <artifactId>*</artifactId>
-                </exclusion>
-            </exclusions>
         </dependency>
 
         <dependency>
@@ -122,12 +116,6 @@
 
         <dependency>
             <groupId>org.apache.ignite</groupId>
-            <artifactId>ignite-ml-catboost-model-parser</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.ignite</groupId>
             <artifactId>ignite-ml-h2o-model-parser</artifactId>
             <version>${project.version}</version>
         </dependency>
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/clustering/KMeansClusterizationExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/clustering/KMeansClusterizationExample.java
index 3127418..beee4f6 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/clustering/KMeansClusterizationExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/clustering/KMeansClusterizationExample.java
@@ -73,8 +73,8 @@ public class KMeansClusterizationExample {
                 );
 
                 System.out.println(">>> KMeans centroids");
-                Tracer.showAscii(mdl.centers()[0]);
-                Tracer.showAscii(mdl.centers()[1]);
+                Tracer.showAscii(mdl.getCenters()[0]);
+                Tracer.showAscii(mdl.getCenters()[1]);
                 System.out.println(">>>");
 
                 System.out.println(">>> --------------------------------------------");
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/catboost/CatboostClassificationModelParserExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/catboost/CatboostClassificationModelParserExample.java
deleted file mode 100644
index e6f9f65..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/catboost/CatboostClassificationModelParserExample.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.catboost;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.HashMap;
-import java.util.Scanner;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Future;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.internal.util.IgniteUtils;
-import org.apache.ignite.ml.catboost.CatboostClassificationModelParser;
-import org.apache.ignite.ml.inference.Model;
-import org.apache.ignite.ml.inference.builder.AsyncModelBuilder;
-import org.apache.ignite.ml.inference.builder.IgniteDistributedModelBuilder;
-import org.apache.ignite.ml.inference.reader.FileSystemModelReader;
-import org.apache.ignite.ml.inference.reader.ModelReader;
-import org.apache.ignite.ml.math.primitives.vector.NamedVector;
-import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
-
-/**
- * This example demonstrates how to import Catboost model and use imported model for distributed inference in Apache
- * Ignite.
- */
-public class CatboostClassificationModelParserExample {
-    /**
-     * Test model resource name.
-     */
-    private static final String TEST_MODEL_RES = "examples/src/main/resources/models/catboost/model_clf.cbm";
-
-    /**
-     * Test data.
-     */
-    private static final String TEST_DATA_RES = "examples/src/main/resources/datasets/amazon-employee-access-challenge-sample.csv";
-
-    /**
-     * Test expected results.
-     */
-    private static final String TEST_ER_RES = "examples/src/main/resources/datasets/amazon-employee-access-challenge-sample-catboost-expected-results.csv";
-
-    /**
-     * Parser.
-     */
-    private static final CatboostClassificationModelParser parser = new CatboostClassificationModelParser();
-
-    /**
-     * Run example.
-     */
-    public static void main(String... args) throws ExecutionException, InterruptedException,
-        FileNotFoundException {
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            File mdlRsrc = IgniteUtils.resolveIgnitePath(TEST_MODEL_RES);
-            if (mdlRsrc == null)
-                throw new IllegalArgumentException("File not found [resource_path=" + TEST_MODEL_RES + "]");
-
-            ModelReader reader = new FileSystemModelReader(mdlRsrc.getPath());
-
-            AsyncModelBuilder mdlBuilder = new IgniteDistributedModelBuilder(ignite, 4, 4);
-
-            File testData = IgniteUtils.resolveIgnitePath(TEST_DATA_RES);
-            if (testData == null)
-                throw new IllegalArgumentException("File not found [resource_path=" + TEST_DATA_RES + "]");
-
-            File testExpRes = IgniteUtils.resolveIgnitePath(TEST_ER_RES);
-            if (testExpRes == null)
-                throw new IllegalArgumentException("File not found [resource_path=" + TEST_ER_RES + "]");
-
-            try (Model<NamedVector, Future<Double>> mdl = mdlBuilder.build(reader, parser);
-                 Scanner testDataScanner = new Scanner(testData);
-                 Scanner testExpResultsScanner = new Scanner(testExpRes)) {
-                String header = testDataScanner.nextLine();
-                String[] columns = header.split(",");
-
-                while (testDataScanner.hasNextLine()) {
-                    String testDataStr = testDataScanner.nextLine();
-                    String testExpResultsStr = testExpResultsScanner.nextLine();
-
-                    HashMap<String, Double> testObj = new HashMap<>();
-                    String[] values = testDataStr.split(",");
-
-                    for (int i = 0; i < columns.length; i++) {
-                      testObj.put(columns[i], Double.valueOf(values[i]));
-                    }
-
-                    double prediction = mdl.predict(VectorUtils.of(testObj)).get();
-                    double expPrediction = Double.parseDouble(testExpResultsStr);
-
-                    System.out.println("Expected: " + expPrediction + ", prediction: " + prediction);
-                }
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-  }
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/catboost/CatboostRegressionModelParserExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/catboost/CatboostRegressionModelParserExample.java
deleted file mode 100644
index 3e5e258..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/catboost/CatboostRegressionModelParserExample.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.catboost;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.HashMap;
-import java.util.Scanner;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Future;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.internal.util.IgniteUtils;
-import org.apache.ignite.ml.catboost.CatboostRegressionModelParser;
-import org.apache.ignite.ml.inference.Model;
-import org.apache.ignite.ml.inference.builder.AsyncModelBuilder;
-import org.apache.ignite.ml.inference.builder.IgniteDistributedModelBuilder;
-import org.apache.ignite.ml.inference.reader.FileSystemModelReader;
-import org.apache.ignite.ml.inference.reader.ModelReader;
-import org.apache.ignite.ml.math.primitives.vector.NamedVector;
-import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
-
-/**
- * This example demonstrates how to import Catboost model and use imported model for distributed inference in Apache
- * Ignite.
- */
-public class CatboostRegressionModelParserExample {
-    /**
-     * Test model resource name.
-     * */
-    private static final String TEST_MODEL_RES = "examples/src/main/resources/models/catboost/model_reg.cbm";
-
-    /**
-     * Test data.
-     */
-    private static final String TEST_DATA_RES = "examples/src/main/resources/datasets/boston_housing_dataset.txt";
-
-    /**
-     * Test expected results.
-     */
-    private static final String TEST_ER_RES = "examples/src/main/resources/datasets/boston_housing_dataset-catboost-expected-results.txt";
-
-    /**
-     * Parser.
-     */
-    private static final CatboostRegressionModelParser parser = new CatboostRegressionModelParser();
-
-    /**
-     * Run example.
-     */
-    public static void main(String... args) throws ExecutionException, InterruptedException,
-        FileNotFoundException {
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            File mdlRsrc = IgniteUtils.resolveIgnitePath(TEST_MODEL_RES);
-            if (mdlRsrc == null)
-                throw new IllegalArgumentException("File not found [resource_path=" + TEST_MODEL_RES + "]");
-
-            ModelReader reader = new FileSystemModelReader(mdlRsrc.getPath());
-            AsyncModelBuilder mdlBuilder = new IgniteDistributedModelBuilder(ignite, 4, 4);
-
-            File testData = IgniteUtils.resolveIgnitePath(TEST_DATA_RES);
-            if (testData == null)
-                throw new IllegalArgumentException("File not found [resource_path=" + TEST_DATA_RES + "]");
-
-            File testExpRes = IgniteUtils.resolveIgnitePath(TEST_ER_RES);
-            if (testExpRes == null)
-                throw new IllegalArgumentException("File not found [resource_path=" + TEST_ER_RES + "]");
-
-            try (Model<NamedVector, Future<Double>> mdl = mdlBuilder.build(reader, parser);
-                 Scanner testDataScanner = new Scanner(testData);
-                 Scanner testExpResultsScanner = new Scanner(testExpRes)) {
-                String[] columns = new String[]{
-                    "f_0",
-                    "f_1",
-                    "f_2",
-                    "f_3",
-                    "f_4",
-                    "f_5",
-                    "f_6",
-                    "f_7",
-                    "f_8",
-                    "f_9",
-                    "f_10",
-                    "f_11",
-                    "f_12",
-                };
-
-                while (testDataScanner.hasNextLine()) {
-                    String testDataStr = testDataScanner.nextLine();
-                    String testExpResultsStr = testExpResultsScanner.nextLine();
-
-                    HashMap<String, Double> testObj = new HashMap<>();
-                    String[] values = testDataStr.split(",");
-
-                    for (int i = 0; i < columns.length; i++) {
-                        testObj.put(columns[i], Double.valueOf(values[i]));
-                    }
-
-                    double prediction = mdl.predict(VectorUtils.of(testObj)).get();
-                    double expPrediction = Double.parseDouble(testExpResultsStr);
-
-                    System.out.println("Expected: " + expPrediction + ", prediction: " + prediction);
-                }
-            }
-    }
-    finally {
-        System.out.flush();
-    }
-  }
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/ANNClassificationExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/ANNClassificationExportImportExample.java
deleted file mode 100644
index 618e4c6..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/ANNClassificationExportImportExample.java
+++ /dev/null
@@ -1,339 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.exchange;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Arrays;
-import java.util.UUID;
-import javax.cache.Cache;
-import org.apache.commons.math3.util.Precision;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
-import org.apache.ignite.cache.query.QueryCursor;
-import org.apache.ignite.cache.query.ScanQuery;
-import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer;
-import org.apache.ignite.ml.knn.NNClassificationModel;
-import org.apache.ignite.ml.knn.ann.ANNClassificationModel;
-import org.apache.ignite.ml.knn.ann.ANNClassificationTrainer;
-import org.apache.ignite.ml.math.distances.EuclideanDistance;
-import org.apache.ignite.ml.math.distances.ManhattanDistance;
-import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector;
-
-/**
- * Run ANN multi-class classification trainer ({@link ANNClassificationTrainer}) over distributed dataset.
- * <p>
- * Code in this example launches Ignite grid and fills the cache with test data points (based on the
- * <a href="https://en.wikipedia.org/wiki/Iris_flower_data_set"></a>Iris dataset</a>).</p>
- * <p>
- * After that it trains the model based on the specified data using
- * <a href="https://en.wikipedia.org/wiki/K-nearest_neighbors_algorithm">kNN</a> algorithm.</p>
- * <p>
- * Finally, this example loops over the test set of data points, applies the trained model to predict what cluster does
- * this point belong to, and compares prediction to expected outcome (ground truth).</p>
- * <p>
- * You can change the test data used in this example and re-run it to explore this algorithm further.</p>
- */
-public class ANNClassificationExportImportExample {
-    /**
-     * Run example.
-     */
-    public static void main(String[] args) throws IOException {
-        System.out.println();
-        System.out.println(">>> ANN multi-class classification algorithm over cached dataset usage example started.");
-        // Start ignite grid.
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println(">>> Ignite grid started.");
-
-            IgniteCache<Integer, double[]> dataCache = null;
-            Path jsonMdlPath = null;
-            try {
-                dataCache = getTestCache(ignite);
-
-                ANNClassificationTrainer trainer = new ANNClassificationTrainer()
-                    .withDistance(new ManhattanDistance())
-                    .withK(50)
-                    .withMaxIterations(1000)
-                    .withEpsilon(1e-2);
-
-                ANNClassificationModel mdl = (ANNClassificationModel) trainer.fit(
-                    ignite,
-                    dataCache,
-                    new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST)
-                ).withK(5)
-                    .withDistanceMeasure(new EuclideanDistance())
-                    .withWeighted(true);
-
-                System.out.println("\n>>> Exported ANN model: " + mdl.toString(true));
-
-                double accuracy = evaluateModel(dataCache, mdl);
-
-                System.out.println("\n>>> Accuracy for exported ANN model:" + accuracy);
-
-                jsonMdlPath = Files.createTempFile(null, null);
-                mdl.toJSON(jsonMdlPath);
-
-                ANNClassificationModel modelImportedFromJSON = ANNClassificationModel.fromJSON(jsonMdlPath);
-
-                System.out.println("\n>>> Imported ANN model: " + modelImportedFromJSON.toString(true));
-
-                accuracy = evaluateModel(dataCache, modelImportedFromJSON);
-
-                System.out.println("\n>>> Accuracy for imported ANN model:" + accuracy);
-
-                System.out.println(">>> ANN multi-class classification algorithm over cached dataset usage example completed.");
-            }
-            finally {
-                if (dataCache != null)
-                    dataCache.destroy();
-                if (jsonMdlPath != null)
-                    Files.deleteIfExists(jsonMdlPath);
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-
-    private static double evaluateModel(IgniteCache<Integer, double[]> dataCache, NNClassificationModel knnMdl) {
-        int amountOfErrors = 0;
-        int totalAmount = 0;
-
-        double accuracy;
-        try (QueryCursor<Cache.Entry<Integer, double[]>> observations = dataCache.query(new ScanQuery<>())) {
-            System.out.println(">>> ---------------------------------");
-            System.out.println(">>> | Prediction\t| Ground Truth\t|");
-            System.out.println(">>> ---------------------------------");
-
-            for (Cache.Entry<Integer, double[]> observation : observations) {
-                double[] val = observation.getValue();
-                double[] inputs = Arrays.copyOfRange(val, 1, val.length);
-                double groundTruth = val[0];
-
-                double prediction = knnMdl.predict(new DenseVector(inputs));
-
-                totalAmount++;
-                if (!Precision.equals(groundTruth, prediction, Precision.EPSILON))
-                    amountOfErrors++;
-
-                System.out.printf(">>> | %.4f\t\t| %.4f\t\t|\n", prediction, groundTruth);
-            }
-
-            System.out.println(">>> ---------------------------------");
-
-            accuracy = 1 - amountOfErrors / (double) totalAmount;
-
-        }
-        return accuracy;
-    }
-
-    /**
-     * Fills cache with data and returns it.
-     *
-     * @param ignite Ignite instance.
-     * @return Filled Ignite Cache.
-     */
-    private static IgniteCache<Integer, double[]> getTestCache(Ignite ignite) {
-        CacheConfiguration<Integer, double[]> cacheConfiguration = new CacheConfiguration<>();
-        cacheConfiguration.setName("TEST_" + UUID.randomUUID());
-        cacheConfiguration.setAffinity(new RendezvousAffinityFunction(false, 10));
-
-        IgniteCache<Integer, double[]> cache = ignite.createCache(cacheConfiguration);
-
-        for (int k = 0; k < 10; k++) { // multiplies the Iris dataset k times.
-            for (int i = 0; i < data.length; i++)
-                cache.put(k * 10000 + i, mutate(data[i], k));
-        }
-
-        return cache;
-    }
-
-    /**
-     * Tiny changing of data depending on k parameter.
-     *
-     * @param datum The vector data.
-     * @param k     The passed parameter.
-     * @return The changed vector data.
-     */
-    private static double[] mutate(double[] datum, int k) {
-        for (int i = 0; i < datum.length; i++)
-            datum[i] += k / 100000;
-        return datum;
-    }
-
-    /**
-     * The Iris dataset.
-     */
-    private static final double[][] data = {
-        {1, 5.1, 3.5, 1.4, 0.2},
-        {1, 4.9, 3, 1.4, 0.2},
-        {1, 4.7, 3.2, 1.3, 0.2},
-        {1, 4.6, 3.1, 1.5, 0.2},
-        {1, 5, 3.6, 1.4, 0.2},
-        {1, 5.4, 3.9, 1.7, 0.4},
-        {1, 4.6, 3.4, 1.4, 0.3},
-        {1, 5, 3.4, 1.5, 0.2},
-        {1, 4.4, 2.9, 1.4, 0.2},
-        {1, 4.9, 3.1, 1.5, 0.1},
-        {1, 5.4, 3.7, 1.5, 0.2},
-        {1, 4.8, 3.4, 1.6, 0.2},
-        {1, 4.8, 3, 1.4, 0.1},
-        {1, 4.3, 3, 1.1, 0.1},
-        {1, 5.8, 4, 1.2, 0.2},
-        {1, 5.7, 4.4, 1.5, 0.4},
-        {1, 5.4, 3.9, 1.3, 0.4},
-        {1, 5.1, 3.5, 1.4, 0.3},
-        {1, 5.7, 3.8, 1.7, 0.3},
-        {1, 5.1, 3.8, 1.5, 0.3},
-        {1, 5.4, 3.4, 1.7, 0.2},
-        {1, 5.1, 3.7, 1.5, 0.4},
-        {1, 4.6, 3.6, 1, 0.2},
-        {1, 5.1, 3.3, 1.7, 0.5},
-        {1, 4.8, 3.4, 1.9, 0.2},
-        {1, 5, 3, 1.6, 0.2},
-        {1, 5, 3.4, 1.6, 0.4},
-        {1, 5.2, 3.5, 1.5, 0.2},
-        {1, 5.2, 3.4, 1.4, 0.2},
-        {1, 4.7, 3.2, 1.6, 0.2},
-        {1, 4.8, 3.1, 1.6, 0.2},
-        {1, 5.4, 3.4, 1.5, 0.4},
-        {1, 5.2, 4.1, 1.5, 0.1},
-        {1, 5.5, 4.2, 1.4, 0.2},
-        {1, 4.9, 3.1, 1.5, 0.1},
-        {1, 5, 3.2, 1.2, 0.2},
-        {1, 5.5, 3.5, 1.3, 0.2},
-        {1, 4.9, 3.1, 1.5, 0.1},
-        {1, 4.4, 3, 1.3, 0.2},
-        {1, 5.1, 3.4, 1.5, 0.2},
-        {1, 5, 3.5, 1.3, 0.3},
-        {1, 4.5, 2.3, 1.3, 0.3},
-        {1, 4.4, 3.2, 1.3, 0.2},
-        {1, 5, 3.5, 1.6, 0.6},
-        {1, 5.1, 3.8, 1.9, 0.4},
-        {1, 4.8, 3, 1.4, 0.3},
-        {1, 5.1, 3.8, 1.6, 0.2},
-        {1, 4.6, 3.2, 1.4, 0.2},
-        {1, 5.3, 3.7, 1.5, 0.2},
-        {1, 5, 3.3, 1.4, 0.2},
-        {2, 7, 3.2, 4.7, 1.4},
-        {2, 6.4, 3.2, 4.5, 1.5},
-        {2, 6.9, 3.1, 4.9, 1.5},
-        {2, 5.5, 2.3, 4, 1.3},
-        {2, 6.5, 2.8, 4.6, 1.5},
-        {2, 5.7, 2.8, 4.5, 1.3},
-        {2, 6.3, 3.3, 4.7, 1.6},
-        {2, 4.9, 2.4, 3.3, 1},
-        {2, 6.6, 2.9, 4.6, 1.3},
-        {2, 5.2, 2.7, 3.9, 1.4},
-        {2, 5, 2, 3.5, 1},
-        {2, 5.9, 3, 4.2, 1.5},
-        {2, 6, 2.2, 4, 1},
-        {2, 6.1, 2.9, 4.7, 1.4},
-        {2, 5.6, 2.9, 3.6, 1.3},
-        {2, 6.7, 3.1, 4.4, 1.4},
-        {2, 5.6, 3, 4.5, 1.5},
-        {2, 5.8, 2.7, 4.1, 1},
-        {2, 6.2, 2.2, 4.5, 1.5},
-        {2, 5.6, 2.5, 3.9, 1.1},
-        {2, 5.9, 3.2, 4.8, 1.8},
-        {2, 6.1, 2.8, 4, 1.3},
-        {2, 6.3, 2.5, 4.9, 1.5},
-        {2, 6.1, 2.8, 4.7, 1.2},
-        {2, 6.4, 2.9, 4.3, 1.3},
-        {2, 6.6, 3, 4.4, 1.4},
-        {2, 6.8, 2.8, 4.8, 1.4},
-        {2, 6.7, 3, 5, 1.7},
-        {2, 6, 2.9, 4.5, 1.5},
-        {2, 5.7, 2.6, 3.5, 1},
-        {2, 5.5, 2.4, 3.8, 1.1},
-        {2, 5.5, 2.4, 3.7, 1},
-        {2, 5.8, 2.7, 3.9, 1.2},
-        {2, 6, 2.7, 5.1, 1.6},
-        {2, 5.4, 3, 4.5, 1.5},
-        {2, 6, 3.4, 4.5, 1.6},
-        {2, 6.7, 3.1, 4.7, 1.5},
-        {2, 6.3, 2.3, 4.4, 1.3},
-        {2, 5.6, 3, 4.1, 1.3},
-        {2, 5.5, 2.5, 4, 1.3},
-        {2, 5.5, 2.6, 4.4, 1.2},
-        {2, 6.1, 3, 4.6, 1.4},
-        {2, 5.8, 2.6, 4, 1.2},
-        {2, 5, 2.3, 3.3, 1},
-        {2, 5.6, 2.7, 4.2, 1.3},
-        {2, 5.7, 3, 4.2, 1.2},
-        {2, 5.7, 2.9, 4.2, 1.3},
-        {2, 6.2, 2.9, 4.3, 1.3},
-        {2, 5.1, 2.5, 3, 1.1},
-        {2, 5.7, 2.8, 4.1, 1.3},
-        {3, 6.3, 3.3, 6, 2.5},
-        {3, 5.8, 2.7, 5.1, 1.9},
-        {3, 7.1, 3, 5.9, 2.1},
-        {3, 6.3, 2.9, 5.6, 1.8},
-        {3, 6.5, 3, 5.8, 2.2},
-        {3, 7.6, 3, 6.6, 2.1},
-        {3, 4.9, 2.5, 4.5, 1.7},
-        {3, 7.3, 2.9, 6.3, 1.8},
-        {3, 6.7, 2.5, 5.8, 1.8},
-        {3, 7.2, 3.6, 6.1, 2.5},
-        {3, 6.5, 3.2, 5.1, 2},
-        {3, 6.4, 2.7, 5.3, 1.9},
-        {3, 6.8, 3, 5.5, 2.1},
-        {3, 5.7, 2.5, 5, 2},
-        {3, 5.8, 2.8, 5.1, 2.4},
-        {3, 6.4, 3.2, 5.3, 2.3},
-        {3, 6.5, 3, 5.5, 1.8},
-        {3, 7.7, 3.8, 6.7, 2.2},
-        {3, 7.7, 2.6, 6.9, 2.3},
-        {3, 6, 2.2, 5, 1.5},
-        {3, 6.9, 3.2, 5.7, 2.3},
-        {3, 5.6, 2.8, 4.9, 2},
-        {3, 7.7, 2.8, 6.7, 2},
-        {3, 6.3, 2.7, 4.9, 1.8},
-        {3, 6.7, 3.3, 5.7, 2.1},
-        {3, 7.2, 3.2, 6, 1.8},
-        {3, 6.2, 2.8, 4.8, 1.8},
-        {3, 6.1, 3, 4.9, 1.8},
-        {3, 6.4, 2.8, 5.6, 2.1},
-        {3, 7.2, 3, 5.8, 1.6},
-        {3, 7.4, 2.8, 6.1, 1.9},
-        {3, 7.9, 3.8, 6.4, 2},
-        {3, 6.4, 2.8, 5.6, 2.2},
-        {3, 6.3, 2.8, 5.1, 1.5},
-        {3, 6.1, 2.6, 5.6, 1.4},
-        {3, 7.7, 3, 6.1, 2.3},
-        {3, 6.3, 3.4, 5.6, 2.4},
-        {3, 6.4, 3.1, 5.5, 1.8},
-        {3, 6, 3, 4.8, 1.8},
-        {3, 6.9, 3.1, 5.4, 2.1},
-        {3, 6.7, 3.1, 5.6, 2.4},
-        {3, 6.9, 3.1, 5.1, 2.3},
-        {3, 5.8, 2.7, 5.1, 1.9},
-        {3, 6.8, 3.2, 5.9, 2.3},
-        {3, 6.7, 3.3, 5.7, 2.5},
-        {3, 6.7, 3, 5.2, 2.3},
-        {3, 6.3, 2.5, 5, 1.9},
-        {3, 6.5, 3, 5.2, 2},
-        {3, 6.2, 3.4, 5.4, 2.3},
-        {3, 5.9, 3, 5.1, 1.8}
-    };
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/CompoundNaiveBayesExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/CompoundNaiveBayesExportImportExample.java
deleted file mode 100644
index 7d05f5e..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/CompoundNaiveBayesExportImportExample.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.exchange;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
-import org.apache.ignite.examples.ml.util.SandboxMLCache;
-import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
-import org.apache.ignite.ml.math.primitives.vector.Vector;
-import org.apache.ignite.ml.naivebayes.compound.CompoundNaiveBayesModel;
-import org.apache.ignite.ml.naivebayes.compound.CompoundNaiveBayesTrainer;
-import org.apache.ignite.ml.naivebayes.discrete.DiscreteNaiveBayesTrainer;
-import org.apache.ignite.ml.naivebayes.gaussian.GaussianNaiveBayesTrainer;
-import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
-import org.apache.ignite.ml.selection.scoring.metric.MetricName;
-
-import static java.util.Arrays.asList;
-
-/**
- * Run naive Compound Bayes classification model based on <a href="https://en.wikipedia.org/wiki/Naive_Bayes_classifier">
- * Nnaive Bayes classifier</a> algorithm ({@link GaussianNaiveBayesTrainer})and <a
- * href=https://en.wikipedia.org/wiki/Naive_Bayes_classifier#Multinomial_naive_Bayes"> Discrete naive Bayes
- * classifier</a> algorithm ({@link DiscreteNaiveBayesTrainer}) over distributed cache.
- * <p>
- * Code in this example launches Ignite grid and fills the cache with test data points.
- * <p>
- * After that it trains the naive Bayes classification model based on the specified data.</p>
- * <p>
- * Finally, this example loops over the test set of data points, applies the trained model to predict the target value,
- * compares prediction to expected outcome (ground truth), and builds
- * <a href="https://en.wikipedia.org/wiki/Confusion_matrix">confusion matrix</a>.</p>
- * <p>
- * You can change the test data used in this example and re-run it to explore this algorithm further.</p>
- */
-public class CompoundNaiveBayesExportImportExample {
-    /** Run example. */
-    public static void main(String[] args) throws IOException {
-        System.out.println();
-        System.out.println(">>> Compound Naive Bayes classification model over partitioned dataset usage example started.");
-        // Start ignite grid.
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println(">>> Ignite grid started.");
-
-            IgniteCache<Integer, Vector> dataCache = null;
-            Path jsonMdlPath = null;
-            try {
-                dataCache = new SandboxMLCache(ignite)
-                    .fillCacheWith(MLSandboxDatasets.MIXED_DATASET);
-
-                double[] priorProbabilities = new double[]{.5, .5};
-                double[][] thresholds = new double[][]{{.5}, {.5}, {.5}, {.5}, {.5}};
-
-                System.out.println("\n>>> Create new naive Bayes classification trainer object.");
-                CompoundNaiveBayesTrainer trainer = new CompoundNaiveBayesTrainer()
-                    .withPriorProbabilities(priorProbabilities)
-                    .withGaussianNaiveBayesTrainer(new GaussianNaiveBayesTrainer())
-                    .withGaussianFeatureIdsToSkip(asList(3, 4, 5, 6, 7))
-                    .withDiscreteNaiveBayesTrainer(new DiscreteNaiveBayesTrainer()
-                        .setBucketThresholds(thresholds))
-                    .withDiscreteFeatureIdsToSkip(asList(0, 1, 2));
-                System.out.println("\n>>> Perform the training to get the model.");
-
-                Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>()
-                    .labeled(Vectorizer.LabelCoordinate.FIRST);
-
-                CompoundNaiveBayesModel mdl = trainer.fit(ignite, dataCache, vectorizer);
-
-                System.out.println("\n>>> Exported Compound Naive Bayes model: " + mdl.toString(true));
-
-                double accuracy = Evaluator.evaluate(
-                    dataCache,
-                    mdl,
-                    vectorizer,
-                    MetricName.ACCURACY
-                );
-
-                System.out.println("\n>>> Accuracy for exported Compound Naive Bayes model:" + accuracy);
-
-                jsonMdlPath = Files.createTempFile(null, null);
-                mdl.toJSON(jsonMdlPath);
-
-                CompoundNaiveBayesModel modelImportedFromJSON = CompoundNaiveBayesModel.fromJSON(jsonMdlPath);
-
-                System.out.println("\n>>> Imported Compound Naive Bayes model: " + modelImportedFromJSON.toString(true));
-
-                accuracy = Evaluator.evaluate(
-                    dataCache,
-                    modelImportedFromJSON,
-                    vectorizer,
-                    MetricName.ACCURACY
-                );
-
-                System.out.println("\n>>> Accuracy for imported Compound Naive Bayes model:" + accuracy);
-
-                System.out.println("\n>>> Compound Naive Bayes model over partitioned dataset usage example completed.");
-            }
-            finally {
-                if (dataCache != null)
-                    dataCache.destroy();
-                if (jsonMdlPath != null)
-                    Files.deleteIfExists(jsonMdlPath);
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DecisionTreeClassificationExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DecisionTreeClassificationExportImportExample.java
deleted file mode 100644
index e7ad7ca..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DecisionTreeClassificationExportImportExample.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.exchange;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Random;
-import org.apache.commons.math3.util.Precision;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
-import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorizer;
-import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
-import org.apache.ignite.ml.structures.LabeledVector;
-import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
-
-/**
- * Example of using distributed {@link DecisionTreeClassificationTrainer}.
- * <p>
- * Code in this example launches Ignite grid and fills the cache with pseudo random training data points.</p>
- * <p>
- * After that it creates classification trainer and uses it to train the model on the training set.</p>
- * <p>
- * Finally, this example loops over the pseudo randomly generated test set of data points, applies the trained model,
- * and compares prediction to expected outcome.</p>
- * <p>
- * You can change the test data used in this example and re-run it to explore this algorithm further.</p>
- */
-public class DecisionTreeClassificationExportImportExample {
-    /**
-     * Executes example.
-     *
-     * @param args Command line arguments, none required.
-     */
-    public static void main(String[] args) throws IOException {
-        System.out.println(">>> Decision tree classification trainer example started.");
-
-        // Start ignite grid.
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println("\n>>> Ignite grid started.");
-
-            // Create cache with training data.
-            CacheConfiguration<Integer, LabeledVector<Double>> trainingSetCfg = new CacheConfiguration<>();
-            trainingSetCfg.setName("TRAINING_SET");
-            trainingSetCfg.setAffinity(new RendezvousAffinityFunction(false, 10));
-
-            IgniteCache<Integer, LabeledVector<Double>> trainingSet = null;
-            Path jsonMdlPath = null;
-            try {
-                trainingSet = ignite.createCache(trainingSetCfg);
-
-                Random rnd = new Random(0);
-
-                // Fill training data.
-                for (int i = 0; i < 1000; i++)
-                    trainingSet.put(i, generatePoint(rnd));
-
-                // Create classification trainer.
-                DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(4, 0);
-
-                // Train decision tree model.
-                LabeledDummyVectorizer<Integer, Double> vectorizer = new LabeledDummyVectorizer<>();
-                DecisionTreeModel mdl = trainer.fit(
-                    ignite,
-                    trainingSet,
-                    vectorizer
-                );
-
-                System.out.println("\n>>> Exported Decision tree classification model: " + mdl);
-
-                int correctPredictions = evaluateModel(rnd, mdl);
-
-                System.out.println("\n>>> Accuracy for exported Decision tree classification model: " + correctPredictions / 10.0 + "%");
-
-                jsonMdlPath = Files.createTempFile(null, null);
-                mdl.toJSON(jsonMdlPath);
-
-                DecisionTreeModel modelImportedFromJSON = DecisionTreeModel.fromJSON(jsonMdlPath);
-
-                System.out.println("\n>>> Imported Decision tree classification model: " + modelImportedFromJSON);
-
-                correctPredictions = evaluateModel(rnd, modelImportedFromJSON);
-
-                System.out.println("\n>>> Accuracy for imported Decision tree classification model: " + correctPredictions / 10.0 + "%");
-
-                System.out.println("\n>>> Decision tree classification trainer example completed.");
-            }
-            finally {
-                if (trainingSet != null)
-                    trainingSet.destroy();
-                if (jsonMdlPath != null)
-                    Files.deleteIfExists(jsonMdlPath);
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-
-    private static int evaluateModel(Random rnd, DecisionTreeModel mdl) {
-        // Calculate score.
-        int correctPredictions = 0;
-        for (int i = 0; i < 1000; i++) {
-            LabeledVector<Double> pnt = generatePoint(rnd);
-
-            double prediction = mdl.predict(pnt.features());
-            double lbl = pnt.label();
-
-            if (i % 50 == 1)
-                System.out.printf(">>> test #: %d\t\t predicted: %.4f\t\tlabel: %.4f\n", i, prediction, lbl);
-
-            if (Precision.equals(prediction, lbl, Precision.EPSILON))
-                correctPredictions++;
-        }
-        return correctPredictions;
-    }
-
-    /**
-     * Generate point with {@code x} in (-0.5, 0.5) and {@code y} in the same interval. If {@code x * y > 0} then label
-     * is 1, otherwise 0.
-     *
-     * @param rnd Random.
-     * @return Point with label.
-     */
-    private static LabeledVector<Double> generatePoint(Random rnd) {
-
-        double x = rnd.nextDouble() - 0.5;
-        double y = rnd.nextDouble() - 0.5;
-
-        return new LabeledVector<>(VectorUtils.of(x, y), x * y > 0 ? 1. : 0.);
-    }
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DecisionTreeRegressionExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DecisionTreeRegressionExportImportExample.java
deleted file mode 100644
index 9857ba9..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DecisionTreeRegressionExportImportExample.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.exchange;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
-import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorizer;
-import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
-import org.apache.ignite.ml.structures.LabeledVector;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
-import org.apache.ignite.ml.tree.DecisionTreeRegressionTrainer;
-
-/**
- * Example of using distributed {@link DecisionTreeRegressionTrainer}.
- * <p>
- * Code in this example launches Ignite grid and fills the cache with generated test data points ({@code sin(x)} on
- * interval {@code [0, 10)}).</p>
- * <p>
- * After that it creates classification trainer and uses it to train the model on the training set.</p>
- * <p>
- * Finally, this example loops over the test data points, applies the trained model, and compares prediction to expected
- * outcome (ground truth).</p>
- * <p>
- * You can change the test data used in this example and re-run it to explore this algorithm further.</p>
- */
-public class DecisionTreeRegressionExportImportExample {
-    /**
-     * Executes example.
-     *
-     * @param args Command line arguments, none required.
-     */
-    public static void main(String... args) throws IOException {
-        System.out.println(">>> Decision tree regression trainer example started.");
-
-        // Start ignite grid.
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println("\n>>> Ignite grid started.");
-
-            // Create cache with training data.
-            CacheConfiguration<Integer, LabeledVector<Double>> trainingSetCfg = new CacheConfiguration<>();
-            trainingSetCfg.setName("TRAINING_SET");
-            trainingSetCfg.setAffinity(new RendezvousAffinityFunction(false, 10));
-
-            IgniteCache<Integer, LabeledVector<Double>> trainingSet = null;
-            Path jsonMdlPath = null;
-            try {
-                trainingSet = ignite.createCache(trainingSetCfg);
-
-                // Fill training data.
-                generatePoints(trainingSet);
-
-                // Create regression trainer.
-                DecisionTreeRegressionTrainer trainer = new DecisionTreeRegressionTrainer(10, 0);
-
-                // Train decision tree model.
-                DecisionTreeModel mdl = trainer.fit(ignite, trainingSet, new LabeledDummyVectorizer<>());
-
-                System.out.println("\n>>> Exported Decision tree regression model: " + mdl);
-
-                jsonMdlPath = Files.createTempFile(null, null);
-                mdl.toJSON(jsonMdlPath);
-
-                DecisionTreeModel modelImportedFromJSON = DecisionTreeModel.fromJSON(jsonMdlPath);
-
-                System.out.println("\n>>> Imported Decision tree regression model: " + modelImportedFromJSON);
-
-                System.out.println(">>> ---------------------------------");
-                System.out.println(">>> | Prediction\t| Ground Truth\t|");
-                System.out.println(">>> ---------------------------------");
-
-                // Calculate score.
-                for (int x = 0; x < 10; x++) {
-                    double predicted = mdl.predict(VectorUtils.of(x));
-
-                    System.out.printf(">>> | %.4f\t\t| %.4f\t\t|\n", predicted, Math.sin(x));
-                }
-
-                System.out.println(">>> ---------------------------------");
-
-                System.out.println("\n>>> Decision tree regression trainer example completed.");
-            }
-            finally {
-                if (trainingSet != null)
-                    trainingSet.destroy();
-                if (jsonMdlPath != null)
-                    Files.deleteIfExists(jsonMdlPath);
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-
-    /**
-     * Generates {@code sin(x)} on interval {@code [0, 10)} and loads into the specified cache.
-     */
-    private static void generatePoints(IgniteCache<Integer, LabeledVector<Double>> trainingSet) {
-        for (int i = 0; i < 1000; i++) {
-            double x = i / 100.0;
-            double y = Math.sin(x);
-
-            trainingSet.put(i, new LabeledVector<>(VectorUtils.of(x), y));
-        }
-    }
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DiscreteNaiveBayesExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DiscreteNaiveBayesExportImportExample.java
deleted file mode 100644
index c4d44c4..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DiscreteNaiveBayesExportImportExample.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.exchange;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
-import org.apache.ignite.examples.ml.util.SandboxMLCache;
-import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
-import org.apache.ignite.ml.math.primitives.vector.Vector;
-import org.apache.ignite.ml.naivebayes.discrete.DiscreteNaiveBayesModel;
-import org.apache.ignite.ml.naivebayes.discrete.DiscreteNaiveBayesTrainer;
-import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
-import org.apache.ignite.ml.selection.scoring.metric.MetricName;
-
-/**
- * Run naive Bayes classification model based on <a href=https://en.wikipedia.org/wiki/Naive_Bayes_classifier#Multinomial_naive_Bayes">
- * naive Bayes classifier</a> algorithm ({@link DiscreteNaiveBayesTrainer}) over distributed cache.
- * <p>
- * Code in this example launches Ignite grid and fills the cache with test data points.
- * </p>
- * <p>
- * After that it trains the Discrete naive Bayes classification model based on the specified data.</p>
- * <p>
- * Finally, this example loops over the test set of data points, applies the trained model to predict the target value,
- * compares prediction to expected outcome (ground truth), and builds
- * <a href="https://en.wikipedia.org/wiki/Confusion_matrix">confusion matrix</a>.</p>
- * <p>
- * You can change the test data used in this example and re-run it to explore this algorithm further.</p>
- */
-public class DiscreteNaiveBayesExportImportExample {
-    /**
-     * Run example.
-     */
-    public static void main(String[] args) throws IOException {
-        System.out.println(">>> Discrete naive Bayes classification model over partitioned dataset usage example started.");
-        // Start ignite grid.
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println(">>> Ignite grid started.");
-
-            IgniteCache<Integer, Vector> dataCache = null;
-            Path jsonMdlPath = null;
-            try {
-                dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.ENGLISH_VS_SCOTTISH);
-
-                double[][] thresholds = new double[][] {{.5}, {.5}, {.5}, {.5}, {.5}};
-                System.out.println(">>> Create new Discrete naive Bayes classification trainer object.");
-                DiscreteNaiveBayesTrainer trainer = new DiscreteNaiveBayesTrainer()
-                    .setBucketThresholds(thresholds);
-
-                System.out.println("\n>>> Perform the training to get the model.");
-                Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>()
-                    .labeled(Vectorizer.LabelCoordinate.FIRST);
-
-                DiscreteNaiveBayesModel mdl = trainer.fit(ignite, dataCache, vectorizer);
-                System.out.println("\n>>> Exported Discrete Naive Bayes model: " + mdl.toString(true));
-
-                double accuracy = Evaluator.evaluate(
-                    dataCache,
-                    mdl,
-                    vectorizer,
-                    MetricName.ACCURACY
-                );
-
-                System.out.println("\n>>> Accuracy for exported Discrete Naive Bayes model:" + accuracy);
-
-                jsonMdlPath = Files.createTempFile(null, null);
-                mdl.toJSON(jsonMdlPath);
-
-                DiscreteNaiveBayesModel modelImportedFromJSON = DiscreteNaiveBayesModel.fromJSON(jsonMdlPath);
-
-                System.out.println("\n>>> Imported Discrete Naive Bayes model: " + modelImportedFromJSON.toString(true));
-
-                accuracy = Evaluator.evaluate(
-                    dataCache,
-                    modelImportedFromJSON,
-                    vectorizer,
-                    MetricName.ACCURACY
-                );
-
-                System.out.println("\n>>> Accuracy for imported Discrete Naive Bayes model:" + accuracy);
-
-                System.out.println("\n>>> Discrete Naive bayes model over partitioned dataset usage example completed.");
-            }
-            finally {
-                if (dataCache != null)
-                    dataCache.destroy();
-                if (jsonMdlPath != null)
-                    Files.deleteIfExists(jsonMdlPath);
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GDBOnTreesClassificationExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GDBOnTreesClassificationExportImportExample.java
deleted file mode 100644
index 9aa8f22..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GDBOnTreesClassificationExportImportExample.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.exchange;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
-import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.ml.composition.boosting.GDBModel;
-import org.apache.ignite.ml.composition.boosting.GDBTrainer;
-import org.apache.ignite.ml.composition.boosting.convergence.mean.MeanAbsValueConvergenceCheckerFactory;
-import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer;
-import org.apache.ignite.ml.math.functions.IgniteFunction;
-import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
-import org.apache.ignite.ml.tree.boosting.GDBBinaryClassifierOnTreesTrainer;
-import org.jetbrains.annotations.NotNull;
-
-/**
- * Example represents a solution for the task of classification learning based on Gradient Boosting on trees
- * implementation. It shows an initialization of {@link GDBBinaryClassifierOnTreesTrainer}, initialization of Ignite
- * Cache, learning step and comparing of predicted and real values.
- * <p>
- * In this example dataset is created automatically by meander function {@code f(x) = [sin(x) > 0]}.</p>
- */
-public class GDBOnTreesClassificationExportImportExample {
-    /**
-     * Run example.
-     *
-     * @param args Command line arguments, none required.
-     */
-    public static void main(String[] args) throws IOException {
-        System.out.println();
-        System.out.println(">>> GDB classification trainer example started.");
-        // Start ignite grid.
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println("\n>>> Ignite grid started.");
-
-            // Create cache with training data.
-            CacheConfiguration<Integer, double[]> trainingSetCfg = createCacheConfiguration();
-            IgniteCache<Integer, double[]> trainingSet = null;
-            Path jsonMdlPath = null;
-            try {
-                trainingSet = fillTrainingData(ignite, trainingSetCfg);
-
-                // Create classification trainer.
-                GDBTrainer trainer = new GDBBinaryClassifierOnTreesTrainer(1.0, 300, 2, 0.)
-                    .withCheckConvergenceStgyFactory(new MeanAbsValueConvergenceCheckerFactory(0.1));
-
-                // Train decision tree model.
-                GDBModel mdl = trainer.fit(
-                    ignite,
-                    trainingSet,
-                    new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.LAST)
-                );
-
-                System.out.println("\n>>> Exported GDB classification model: " + mdl.toString(true));
-
-                predictOnGeneratedData(mdl);
-
-                jsonMdlPath = Files.createTempFile(null, null);
-                mdl.toJSON(jsonMdlPath);
-
-                IgniteFunction<Double, Double> lbMapper = lb -> lb > 0.5 ? 1.0 : 0.0;
-                GDBModel modelImportedFromJSON = GDBModel.fromJSON(jsonMdlPath).withLblMapping(lbMapper);
-
-                System.out.println("\n>>> Imported GDB classification model: " + modelImportedFromJSON.toString(true));
-
-                predictOnGeneratedData(modelImportedFromJSON);
-
-                System.out.println(">>> GDB classification trainer example completed.");
-            }
-            finally {
-                if (trainingSet != null)
-                    trainingSet.destroy();
-                if (jsonMdlPath != null)
-                    Files.deleteIfExists(jsonMdlPath);
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-
-    private static void predictOnGeneratedData(GDBModel mdl) {
-        System.out.println(">>> ---------------------------------");
-        System.out.println(">>> | Prediction\t| Valid answer\t|");
-        System.out.println(">>> ---------------------------------");
-
-        // Calculate score.
-        for (int x = -5; x < 5; x++) {
-            double predicted = mdl.predict(VectorUtils.of(x));
-
-            System.out.printf(">>> | %.4f\t\t| %.4f\t\t|\n", predicted, Math.sin(x) < 0 ? 0.0 : 1.0);
-        }
-
-        System.out.println(">>> ---------------------------------");
-        System.out.println(">>> Count of trees = " + mdl.getModels().size());
-        System.out.println(">>> ---------------------------------");
-    }
-
-    /**
-     * Create cache configuration.
-     */
-    @NotNull private static CacheConfiguration<Integer, double[]> createCacheConfiguration() {
-        CacheConfiguration<Integer, double[]> trainingSetCfg = new CacheConfiguration<>();
-        trainingSetCfg.setName("TRAINING_SET");
-        trainingSetCfg.setAffinity(new RendezvousAffinityFunction(false, 10));
-        return trainingSetCfg;
-    }
-
-    /**
-     * Fill meander-like training data.
-     *
-     * @param ignite         Ignite instance.
-     * @param trainingSetCfg Training set config.
-     */
-    @NotNull private static IgniteCache<Integer, double[]> fillTrainingData(Ignite ignite,
-        CacheConfiguration<Integer, double[]> trainingSetCfg) {
-        IgniteCache<Integer, double[]> trainingSet = ignite.getOrCreateCache(trainingSetCfg);
-        for (int i = -50; i <= 50; i++) {
-            double x = ((double)i) / 10.0;
-            double y = Math.sin(x) < 0 ? 0.0 : 1.0;
-            trainingSet.put(i, new double[] {x, y});
-        }
-        return trainingSet;
-    }
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GDBOnTreesRegressionExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GDBOnTreesRegressionExportImportExample.java
deleted file mode 100644
index 14233e3..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GDBOnTreesRegressionExportImportExample.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.exchange;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
-import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.ml.composition.boosting.GDBModel;
-import org.apache.ignite.ml.composition.boosting.GDBTrainer;
-import org.apache.ignite.ml.composition.boosting.convergence.mean.MeanAbsValueConvergenceCheckerFactory;
-import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer;
-import org.apache.ignite.ml.math.functions.IgniteFunction;
-import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
-import org.apache.ignite.ml.tree.boosting.GDBRegressionOnTreesTrainer;
-import org.jetbrains.annotations.NotNull;
-
-/**
- * Example represents a solution for the task of regression learning based on Gradient Boosting on trees implementation.
- * It shows an initialization of {@link GDBRegressionOnTreesTrainer}, initialization of Ignite Cache, learning step and
- * comparing of predicted and real values.
- * <p>
- * In this example dataset is created automatically by parabolic function {@code f(x) = x^2}.</p>
- */
-public class GDBOnTreesRegressionExportImportExample {
-    /**
-     * Run example.
-     *
-     * @param args Command line arguments, none required.
-     */
-    public static void main(String[] args) throws IOException {
-        System.out.println();
-        System.out.println(">>> GDB regression trainer example started.");
-        // Start ignite grid.
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println(">>> Ignite grid started.");
-
-            // Create cache with training data.
-            CacheConfiguration<Integer, double[]> trainingSetCfg = createCacheConfiguration();
-            IgniteCache<Integer, double[]> trainingSet = null;
-            Path jsonMdlPath = null;
-            try {
-                trainingSet = fillTrainingData(ignite, trainingSetCfg);
-
-                // Create regression trainer.
-                GDBTrainer trainer = new GDBRegressionOnTreesTrainer(1.0, 2000, 1, 0.)
-                    .withCheckConvergenceStgyFactory(new MeanAbsValueConvergenceCheckerFactory(0.001));
-
-                // Train decision tree model.
-                GDBModel mdl = trainer.fit(
-                    ignite,
-                    trainingSet,
-                    new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.LAST)
-                );
-
-                System.out.println("\n>>> Exported GDB regression model: " + mdl.toString(true));
-
-                predictOnGeneratedData(mdl);
-
-                jsonMdlPath = Files.createTempFile(null, null);
-                mdl.toJSON(jsonMdlPath);
-
-                IgniteFunction<Double, Double> lbMapper = lb -> lb;
-                GDBModel modelImportedFromJSON = GDBModel.fromJSON(jsonMdlPath).withLblMapping(lbMapper);
-
-                System.out.println("\n>>> Imported GDB regression model: " + modelImportedFromJSON.toString(true));
-
-                predictOnGeneratedData(modelImportedFromJSON);
-
-                System.out.println(">>> GDB regression trainer example completed.");
-            }
-            finally {
-                if (trainingSet != null)
-                    trainingSet.destroy();
-                if (jsonMdlPath != null)
-                    Files.deleteIfExists(jsonMdlPath);
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-
-    private static void predictOnGeneratedData(GDBModel mdl) {
-        System.out.println(">>> ---------------------------------");
-        System.out.println(">>> | Prediction\t| Valid answer \t|");
-        System.out.println(">>> ---------------------------------");
-
-        // Calculate score.
-        for (int x = -5; x < 5; x++) {
-            double predicted = mdl.predict(VectorUtils.of(x));
-
-            System.out.printf(">>> | %.4f\t\t| %.4f\t\t|\n", predicted, Math.pow(x, 2));
-        }
-
-        System.out.println(">>> ---------------------------------");
-    }
-
-    /**
-     * Create cache configuration.
-     */
-    @NotNull private static CacheConfiguration<Integer, double[]> createCacheConfiguration() {
-        CacheConfiguration<Integer, double[]> trainingSetCfg = new CacheConfiguration<>();
-        trainingSetCfg.setName("TRAINING_SET");
-        trainingSetCfg.setAffinity(new RendezvousAffinityFunction(false, 10));
-        return trainingSetCfg;
-    }
-
-    /**
-     * Fill parabolic training data.
-     *
-     * @param ignite         Ignite instance.
-     * @param trainingSetCfg Training set config.
-     */
-    @NotNull private static IgniteCache<Integer, double[]> fillTrainingData(Ignite ignite,
-        CacheConfiguration<Integer, double[]> trainingSetCfg) {
-        IgniteCache<Integer, double[]> trainingSet = ignite.getOrCreateCache(trainingSetCfg);
-        for (int i = -50; i <= 50; i++) {
-            double x = ((double)i) / 10.0;
-            double y = Math.pow(x, 2);
-            trainingSet.put(i, new double[] {x, y});
-        }
-        return trainingSet;
-    }
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GaussianNaiveBayesExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GaussianNaiveBayesExportImportExample.java
deleted file mode 100644
index b6fb9c9..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GaussianNaiveBayesExportImportExample.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.exchange;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
-import org.apache.ignite.examples.ml.util.SandboxMLCache;
-import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
-import org.apache.ignite.ml.math.primitives.vector.Vector;
-import org.apache.ignite.ml.naivebayes.gaussian.GaussianNaiveBayesModel;
-import org.apache.ignite.ml.naivebayes.gaussian.GaussianNaiveBayesTrainer;
-import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
-import org.apache.ignite.ml.selection.scoring.metric.MetricName;
-
-/**
- * Run naive Bayes classification model based on <a href="https://en.wikipedia.org/wiki/Naive_Bayes_classifier"> naive
- * Bayes classifier</a> algorithm ({@link GaussianNaiveBayesTrainer}) over distributed cache.
- * <p>
- * Code in this example launches Ignite grid and fills the cache with test data points (based on the
- * <a href="https://en.wikipedia.org/wiki/Iris_flower_data_set"></a>Iris dataset</a>).</p>
- * <p>
- * After that it trains the naive Bayes classification model based on the specified data.</p>
- * <p>
- * Finally, this example loops over the test set of data points, applies the trained model to predict the target value,
- * compares prediction to expected outcome (ground truth), and builds
- * <a href="https://en.wikipedia.org/wiki/Confusion_matrix">confusion matrix</a>.</p>
- * <p>
- * You can change the test data used in this example and re-run it to explore this algorithm further.</p>
- */
-public class GaussianNaiveBayesExportImportExample {
-    /**
-     * Run example.
-     */
-    public static void main(String[] args) throws IOException {
-        System.out.println();
-        System.out.println(">>> Naive Bayes classification model over partitioned dataset usage example started.");
-        // Start ignite grid.
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println(">>> Ignite grid started.");
-
-            IgniteCache<Integer, Vector> dataCache = null;
-            Path jsonMdlPath = null;
-            try {
-                dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.TWO_CLASSED_IRIS);
-
-                System.out.println(">>> Create new Gaussian Naive Bayes classification trainer object.");
-                GaussianNaiveBayesTrainer trainer = new GaussianNaiveBayesTrainer();
-
-                System.out.println("\n>>> Perform the training to get the model.");
-
-                Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>()
-                    .labeled(Vectorizer.LabelCoordinate.FIRST);
-
-                GaussianNaiveBayesModel mdl = trainer.fit(ignite, dataCache, vectorizer);
-                System.out.println("\n>>> Exported Gaussian Naive Bayes model: " + mdl.toString(true));
-
-                double accuracy = Evaluator.evaluate(
-                    dataCache,
-                    mdl,
-                    vectorizer,
-                    MetricName.ACCURACY
-                );
-
-                System.out.println("\n>>> Accuracy for exported Gaussian Naive Bayes model:" + accuracy);
-
-                jsonMdlPath = Files.createTempFile(null, null);
-                mdl.toJSON(jsonMdlPath);
-
-                GaussianNaiveBayesModel modelImportedFromJSON = GaussianNaiveBayesModel.fromJSON(jsonMdlPath);
-
-                System.out.println("\n>>> Imported Gaussian Naive Bayes model: " + modelImportedFromJSON.toString(true));
-
-                accuracy = Evaluator.evaluate(
-                    dataCache,
-                    modelImportedFromJSON,
-                    vectorizer,
-                    MetricName.ACCURACY
-                );
-
-                System.out.println("\n>>> Accuracy for imported Gaussian Naive Bayes model:" + accuracy);
-
-                System.out.println("\n>>> Gaussian Naive bayes model over partitioned dataset usage example completed.");
-            }
-            finally {
-                if (dataCache != null)
-                    dataCache.destroy();
-                if (jsonMdlPath != null)
-                    Files.deleteIfExists(jsonMdlPath);
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/KMeansClusterizationExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/KMeansClusterizationExportImportExample.java
deleted file mode 100644
index ec5e689..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/KMeansClusterizationExportImportExample.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.exchange;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
-import org.apache.ignite.examples.ml.util.SandboxMLCache;
-import org.apache.ignite.ml.clustering.kmeans.KMeansModel;
-import org.apache.ignite.ml.clustering.kmeans.KMeansTrainer;
-import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
-import org.apache.ignite.ml.math.distances.WeightedMinkowskiDistance;
-import org.apache.ignite.ml.math.primitives.vector.Vector;
-
-/**
- * Run KMeans clustering algorithm ({@link KMeansTrainer}) over distributed dataset.
- * <p>
- * Code in this example launches Ignite grid and fills the cache with test data points (based on the
- * <a href="https://en.wikipedia.org/wiki/Iris_flower_data_set"></a>Iris dataset</a>).</p>
- * <p>
- * After that it trains the model based on the specified data using
- * <a href="https://en.wikipedia.org/wiki/K-means_clustering">KMeans</a> algorithm.</p>
- * <p>
- * Finally, this example loops over the test set of data points, applies the trained model to predict what cluster does
- * this point belong to, and compares prediction to expected outcome (ground truth).</p>
- * <p>
- * You can change the test data used in this example and re-run it to explore this algorithm further.</p>
- */
-public class KMeansClusterizationExportImportExample {
-    /**
-     * Run example.
-     */
-    public static void main(String[] args) throws IOException {
-        System.out.println();
-        System.out.println(">>> KMeans clustering algorithm over cached dataset usage example started.");
-        // Start ignite grid.
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println(">>> Ignite grid started.");
-
-            IgniteCache<Integer, Vector> dataCache = null;
-            Path jsonMdlPath = null;
-            try {
-                dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.TWO_CLASSED_IRIS);
-
-                Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);
-
-                KMeansTrainer trainer = new KMeansTrainer()
-                    .withDistance(new WeightedMinkowskiDistance(2, new double[] {5.9360, 2.7700, 4.2600, 1.3260}));
-                   //.withDistance(new MinkowskiDistance(2));
-
-                KMeansModel mdl = trainer.fit(
-                    ignite,
-                    dataCache,
-                    vectorizer
-                );
-
-                System.out.println("\n>>> Exported KMeans model: " + mdl);
-
-                jsonMdlPath = Files.createTempFile(null, null);
-                mdl.toJSON(jsonMdlPath);
-
-                KMeansModel modelImportedFromJSON = KMeansModel.fromJSON(jsonMdlPath);
-
-                System.out.println("\n>>> Imported KMeans model: " + modelImportedFromJSON);
-
-                System.out.println("\n>>> KMeans clustering algorithm over cached dataset usage example completed.");
-            }
-            finally {
-                if (dataCache != null)
-                    dataCache.destroy();
-                if (jsonMdlPath != null)
-                    Files.deleteIfExists(jsonMdlPath);
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/LinearRegressionExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/LinearRegressionExportImportExample.java
deleted file mode 100644
index 723784b..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/LinearRegressionExportImportExample.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.exchange;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
-import org.apache.ignite.examples.ml.util.SandboxMLCache;
-import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
-import org.apache.ignite.ml.math.primitives.vector.Vector;
-import org.apache.ignite.ml.regressions.linear.LinearRegressionLSQRTrainer;
-import org.apache.ignite.ml.regressions.linear.LinearRegressionModel;
-import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
-import org.apache.ignite.ml.selection.scoring.metric.MetricName;
-
-/**
- * Run linear regression model based on <a href="http://web.stanford.edu/group/SOL/software/lsqr/">LSQR algorithm</a>
- * ({@link LinearRegressionLSQRTrainer}) over cached dataset.
- * <p>
- * Code in this example launches Ignite grid and fills the cache with simple test data.</p>
- * <p>
- * After that it trains the linear regression model based on the specified data.</p>
- * <p>
- * Finally, this example loops over the test set of data points, applies the trained model to predict the target value
- * and compares prediction to expected outcome (ground truth).</p>
- * <p>
- * You can change the test data used in this example and re-run it to explore this algorithm further.</p>
- */
-public class LinearRegressionExportImportExample {
-    /**
-     * Run example.
-     */
-    public static void main(String[] args) throws IOException {
-        System.out.println();
-        System.out.println(">>> Linear regression model over cache based dataset usage example started.");
-        // Start ignite grid.
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println(">>> Ignite grid started.");
-
-            IgniteCache<Integer, Vector> dataCache = null;
-            Path jsonMdlPath = null;
-            try {
-                dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.MORTALITY_DATA);
-
-                System.out.println("\n>>> Create new linear regression trainer object.");
-                LinearRegressionLSQRTrainer trainer = new LinearRegressionLSQRTrainer();
-
-                System.out.println("\n>>> Perform the training to get the model.");
-
-                LinearRegressionModel mdl = trainer.fit(
-                        ignite,
-                        dataCache,
-                        new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST)
-                    );
-
-                System.out.println("\n>>> Exported LinearRegression model: " + mdl);
-
-                double rmse = Evaluator.evaluate(
-                    dataCache,
-                    mdl,
-                    new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST),
-                    MetricName.RMSE
-                );
-
-                System.out.println("\n>>> RMSE for exported LinearRegression model: " + rmse);
-
-                jsonMdlPath = Files.createTempFile(null, null);
-                mdl.toJSON(jsonMdlPath);
-
-                LinearRegressionModel modelImportedFromJSON = LinearRegressionModel.fromJSON(jsonMdlPath);
-
-                System.out.println("\n>>> Imported LinearRegression model: " + modelImportedFromJSON);
-
-                rmse = Evaluator.evaluate(
-                    dataCache,
-                    mdl,
-                    new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST),
-                    MetricName.RMSE
-                );
-
-                System.out.println("\n>>> RMSE for imported LinearRegression model: " + rmse);
-
-                System.out.println("\n>>> Linear regression model over cache based dataset usage example completed.");
-            }
-            finally {
-                if (dataCache != null)
-                    dataCache.destroy();
-                if (jsonMdlPath != null)
-                    Files.deleteIfExists(jsonMdlPath);
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/LogisticRegressionExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/LogisticRegressionExportImportExample.java
deleted file mode 100644
index 6491f7e..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/LogisticRegressionExportImportExample.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.exchange;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
-import org.apache.ignite.examples.ml.util.SandboxMLCache;
-import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
-import org.apache.ignite.ml.math.primitives.vector.Vector;
-import org.apache.ignite.ml.nn.UpdatesStrategy;
-import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDParameterUpdate;
-import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator;
-import org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel;
-import org.apache.ignite.ml.regressions.logistic.LogisticRegressionSGDTrainer;
-import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
-import org.apache.ignite.ml.selection.scoring.metric.MetricName;
-
-/**
- * Run logistic regression model based on <a href="https://en.wikipedia.org/wiki/Stochastic_gradient_descent">
- * stochastic gradient descent</a> algorithm ({@link LogisticRegressionSGDTrainer}) over distributed cache.
- * <p>
- * Code in this example launches Ignite grid and fills the cache with test data points (based on the
- * <a href="https://en.wikipedia.org/wiki/Iris_flower_data_set"></a>Iris dataset</a>).</p>
- * <p>
- * After that it trains the logistic regression model based on the specified data.</p>
- * <p>
- * Finally, this example loops over the test set of data points, applies the trained model to predict the target value,
- * compares prediction to expected outcome (ground truth), and builds
- * <a href="https://en.wikipedia.org/wiki/Confusion_matrix">confusion matrix</a>.</p>
- * <p>
- * You can change the test data used in this example and re-run it to explore this algorithm further.</p>
- */
-public class LogisticRegressionExportImportExample {
-    /**
-     * Run example.
-     */
-    public static void main(String[] args) throws IOException {
-        System.out.println();
-        System.out.println(">>> Logistic regression model over partitioned dataset usage example started.");
-        // Start ignite grid.
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println("\n>>> Ignite grid started.");
-
-            IgniteCache<Integer, Vector> dataCache = null;
-            Path jsonMdlPath = null;
-            try {
-                dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.TWO_CLASSED_IRIS);
-
-                System.out.println("\n>>> Create new logistic regression trainer object.");
-                LogisticRegressionSGDTrainer trainer = new LogisticRegressionSGDTrainer()
-                    .withUpdatesStgy(new UpdatesStrategy<>(
-                        new SimpleGDUpdateCalculator(0.2),
-                        SimpleGDParameterUpdate.SUM_LOCAL,
-                        SimpleGDParameterUpdate.AVG
-                    ))
-                    .withMaxIterations(100000)
-                    .withLocIterations(100)
-                    .withBatchSize(10)
-                    .withSeed(123L);
-
-                System.out.println("\n>>> Perform the training to get the model.");
-                Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>()
-                    .labeled(Vectorizer.LabelCoordinate.FIRST);
-
-                LogisticRegressionModel mdl = trainer.fit(ignite, dataCache, vectorizer);
-
-                System.out.println("\n>>> Exported logistic regression model: " + mdl);
-
-                double accuracy = Evaluator.evaluate(dataCache,
-                    mdl, vectorizer, MetricName.ACCURACY
-                );
-
-                System.out.println("\n>>> Accuracy for exported logistic regression model " + accuracy);
-
-                jsonMdlPath = Files.createTempFile(null, null);
-                mdl.toJSON(jsonMdlPath);
-
-                LogisticRegressionModel modelImportedFromJSON = LogisticRegressionModel.fromJSON(jsonMdlPath);
-
-                System.out.println("\n>>> Imported logistic regression model: " + modelImportedFromJSON);
-
-                accuracy = Evaluator.evaluate(dataCache,
-                    modelImportedFromJSON, vectorizer, MetricName.ACCURACY
-                );
-
-                System.out.println("\n>>> Accuracy for imported logistic regression model " + accuracy);
-
-                System.out.println("\n>>> Logistic regression model over partitioned dataset usage example completed.");
-            }
-            finally {
-                if (dataCache != null)
-                    dataCache.destroy();
-                if (jsonMdlPath != null)
-                    Files.deleteIfExists(jsonMdlPath);
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/RandomForestClassificationExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/RandomForestClassificationExportImportExample.java
deleted file mode 100644
index 6bb368f..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/RandomForestClassificationExportImportExample.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.exchange;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.stream.Collectors;
-import java.util.stream.IntStream;
-import javax.cache.Cache;
-import org.apache.commons.math3.util.Precision;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.cache.query.QueryCursor;
-import org.apache.ignite.cache.query.ScanQuery;
-import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
-import org.apache.ignite.examples.ml.util.SandboxMLCache;
-import org.apache.ignite.ml.dataset.feature.FeatureMeta;
-import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
-import org.apache.ignite.ml.math.primitives.vector.Vector;
-import org.apache.ignite.ml.tree.randomforest.RandomForestClassifierTrainer;
-import org.apache.ignite.ml.tree.randomforest.RandomForestModel;
-import org.apache.ignite.ml.tree.randomforest.data.FeaturesCountSelectionStrategies;
-
-/**
- * Example represents a solution for the task of wine classification based on a
- * <a href ="https://en.wikipedia.org/wiki/Random_forest">Random Forest</a> implementation for
- * multi-classification.
- * <p>
- * Code in this example launches Ignite grid and fills the cache with test data points (based on the
- * <a href="https://archive.ics.uci.edu/ml/machine-learning-databases/wine/">Wine recognition dataset</a>).</p>
- * <p>
- * After that it initializes the  {@link RandomForestClassifierTrainer} with thread pool for multi-thread learning and
- * trains the model based on the specified data using random forest regression algorithm.</p>
- * <p>
- * Finally, this example loops over the test set of data points, compares prediction of the trained model to the
- * expected outcome (ground truth), and evaluates accuracy of the model.</p>
- * <p>
- * You can change the test data used in this example and re-run it to explore this algorithm further.</p>
- */
-public class RandomForestClassificationExportImportExample {
-    /**
-     * Run example.
-     */
-    public static void main(String[] args) throws IOException {
-        System.out.println();
-        System.out.println(">>> Random Forest multi-class classification algorithm over cached dataset usage example started.");
-        // Start ignite grid.
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println("\n>>> Ignite grid started.");
-
-            IgniteCache<Integer, Vector> dataCache = null;
-            Path jsonMdlPath = null;
-            try {
-                dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.WINE_RECOGNITION);
-
-                AtomicInteger idx = new AtomicInteger(0);
-                RandomForestClassifierTrainer classifier = new RandomForestClassifierTrainer(
-                    IntStream.range(0, dataCache.get(1).size() - 1).mapToObj(
-                        x -> new FeatureMeta("", idx.getAndIncrement(), false)).collect(Collectors.toList())
-                ).withAmountOfTrees(101)
-                    .withFeaturesCountSelectionStrgy(FeaturesCountSelectionStrategies.ONE_THIRD)
-                    .withMaxDepth(4)
-                    .withMinImpurityDelta(0.)
-                    .withSubSampleSize(0.3)
-                    .withSeed(0);
-
-                System.out.println(">>> Configured trainer: " + classifier.getClass().getSimpleName());
-
-                Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>()
-                    .labeled(Vectorizer.LabelCoordinate.FIRST);
-                RandomForestModel mdl = classifier.fit(ignite, dataCache, vectorizer);
-
-                System.out.println(">>> Exported Random Forest classification model: " + mdl.toString(true));
-
-                double accuracy = evaluateModel(dataCache, mdl);
-
-                System.out.println("\n>>> Accuracy for exported Random Forest classification model " + accuracy);
-
-                jsonMdlPath = Files.createTempFile(null, null);
-                mdl.toJSON(jsonMdlPath);
-
-                RandomForestModel modelImportedFromJSON = RandomForestModel.fromJSON(jsonMdlPath);
-
-                System.out.println("\n>>> Imported Random Forest classification model: " + modelImportedFromJSON);
-
-                accuracy = evaluateModel(dataCache, mdl);
-
-                System.out.println("\n>>> Accuracy for imported Random Forest classification model " + accuracy);
-
-                System.out.println("\n>>> Random Forest multi-class classification algorithm over cached dataset usage example completed.");
-
-            }
-            finally {
-                if (dataCache != null)
-                    dataCache.destroy();
-                if (jsonMdlPath != null)
-                    Files.deleteIfExists(jsonMdlPath);
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-
-    private static double evaluateModel(IgniteCache<Integer, Vector> dataCache, RandomForestModel randomForestMdl) {
-        int amountOfErrors = 0;
-        int totalAmount = 0;
-
-        try (QueryCursor<Cache.Entry<Integer, Vector>> observations = dataCache.query(new ScanQuery<>())) {
-            for (Cache.Entry<Integer, Vector> observation : observations) {
-                Vector val = observation.getValue();
-                Vector inputs = val.copyOfRange(1, val.size());
-                double groundTruth = val.get(0);
-
-                double prediction = randomForestMdl.predict(inputs);
-
-                totalAmount++;
-                if (!Precision.equals(groundTruth, prediction, Precision.EPSILON))
-                    amountOfErrors++;
-            }
-        }
-
-        return 1 - amountOfErrors / (double) totalAmount;
-    }
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/RandomForestRegressionExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/RandomForestRegressionExportImportExample.java
deleted file mode 100644
index 4d7d4ad..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/RandomForestRegressionExportImportExample.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.exchange;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.stream.Collectors;
-import java.util.stream.IntStream;
-import javax.cache.Cache;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.cache.query.QueryCursor;
-import org.apache.ignite.cache.query.ScanQuery;
-import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
-import org.apache.ignite.examples.ml.util.SandboxMLCache;
-import org.apache.ignite.ml.dataset.feature.FeatureMeta;
-import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
-import org.apache.ignite.ml.environment.LearningEnvironmentBuilder;
-import org.apache.ignite.ml.environment.logging.ConsoleLogger;
-import org.apache.ignite.ml.environment.parallelism.ParallelismStrategy;
-import org.apache.ignite.ml.math.primitives.vector.Vector;
-import org.apache.ignite.ml.tree.randomforest.RandomForestModel;
-import org.apache.ignite.ml.tree.randomforest.RandomForestRegressionTrainer;
-import org.apache.ignite.ml.tree.randomforest.data.FeaturesCountSelectionStrategies;
-
-/**
- * Example represents a solution for the task of price predictions for houses in Boston based on a
- * <a href ="https://en.wikipedia.org/wiki/Random_forest">Random Forest</a> implementation for regression.
- * <p>
- * Code in this example launches Ignite grid and fills the cache with test data points (based on the
- * <a href="https://archive.ics.uci.edu/ml/machine-learning-databases/housing/">Boston Housing dataset</a>).</p>
- * <p>
- * After that it initializes the {@link RandomForestRegressionTrainer} and trains the model based on the specified data
- * using random forest regression algorithm.</p>
- * <p>
- * Finally, this example loops over the test set of data points, compares prediction of the trained model to the
- * expected outcome (ground truth), and evaluates model quality in terms of Mean Squared Error (MSE) and Mean Absolute
- * Error (MAE).</p>
- * <p>
- * You can change the test data used in this example and re-run it to explore this algorithm further.</p>
- */
-public class RandomForestRegressionExportImportExample {
-    /**
-     * Run example.
-     */
-    public static void main(String[] args) throws IOException {
-        System.out.println();
-        System.out.println(">>> Random Forest regression algorithm over cached dataset usage example started.");
-        // Start ignite grid.
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println("\n>>> Ignite grid started.");
-
-            IgniteCache<Integer, Vector> dataCache = null;
-            Path jsonMdlPath = null;
-            try {
-                dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.BOSTON_HOUSE_PRICES);
-
-                AtomicInteger idx = new AtomicInteger(0);
-                RandomForestRegressionTrainer trainer = new RandomForestRegressionTrainer(
-                    IntStream.range(0, dataCache.get(1).size() - 1).mapToObj(
-                        x -> new FeatureMeta("", idx.getAndIncrement(), false)).collect(Collectors.toList())
-                ).withAmountOfTrees(101)
-                    .withFeaturesCountSelectionStrgy(FeaturesCountSelectionStrategies.ONE_THIRD)
-                    .withMaxDepth(4)
-                    .withMinImpurityDelta(0.)
-                    .withSubSampleSize(0.3)
-                    .withSeed(0);
-
-                trainer.withEnvironmentBuilder(LearningEnvironmentBuilder.defaultBuilder()
-                    .withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
-                    .withLoggingFactoryDependency(ConsoleLogger.Factory.LOW)
-                );
-
-                System.out.println("\n>>> Configured trainer: " + trainer.getClass().getSimpleName());
-
-                Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>()
-                    .labeled(Vectorizer.LabelCoordinate.FIRST);
-                RandomForestModel mdl = trainer.fit(ignite, dataCache, vectorizer);
-
-                System.out.println("\n>>> Exported Random Forest regression model: " + mdl.toString(true));
-
-                double mae = evaluateModel(dataCache, mdl);
-
-                System.out.println("\n>>> Mean absolute error (MAE) for exported Random Forest regression model " + mae);
-
-                jsonMdlPath = Files.createTempFile(null, null);
-                mdl.toJSON(jsonMdlPath);
-
-                RandomForestModel modelImportedFromJSON = RandomForestModel.fromJSON(jsonMdlPath);
-
-                System.out.println("\n>>> Exported Random Forest regression model: " + modelImportedFromJSON.toString(true));
-
-                mae = evaluateModel(dataCache, modelImportedFromJSON);
-
-                System.out.println("\n>>> Mean absolute error (MAE) for exported Random Forest regression model " + mae);
-
-                System.out.println("\n>>> Random Forest regression algorithm over cached dataset usage example completed.");
-            }
-            finally {
-                if (dataCache != null)
-                    dataCache.destroy();
-                if (jsonMdlPath != null)
-                    Files.deleteIfExists(jsonMdlPath);
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-
-    private static double evaluateModel(IgniteCache<Integer, Vector> dataCache, RandomForestModel randomForestMdl) {
-        double mae = 0.0;
-        int totalAmount = 0;
-
-        try (QueryCursor<Cache.Entry<Integer, Vector>> observations = dataCache.query(new ScanQuery<>())) {
-            for (Cache.Entry<Integer, Vector> observation : observations) {
-                Vector val = observation.getValue();
-                Vector inputs = val.copyOfRange(1, val.size());
-                double groundTruth = val.get(0);
-
-                double prediction = randomForestMdl.predict(inputs);
-
-                mae += Math.abs(prediction - groundTruth);
-
-                totalAmount++;
-            }
-
-            mae /= totalAmount;
-        }
-        return mae;
-    }
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/SVMExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/SVMExportImportExample.java
deleted file mode 100644
index 2426290..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/SVMExportImportExample.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.inference.exchange;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
-import org.apache.ignite.examples.ml.util.SandboxMLCache;
-import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
-import org.apache.ignite.ml.math.primitives.vector.Vector;
-import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
-import org.apache.ignite.ml.selection.scoring.metric.MetricName;
-import org.apache.ignite.ml.svm.SVMLinearClassificationModel;
-import org.apache.ignite.ml.svm.SVMLinearClassificationTrainer;
-
-/**
- * Run SVM binary-class classification model ({@link SVMLinearClassificationModel}) over distributed dataset.
- * <p>
- * Code in this example launches Ignite grid and fills the cache with test data points (based on the
- * <a href="https://en.wikipedia.org/wiki/Iris_flower_data_set"></a>Iris dataset</a>).</p>
- * <p>
- * After that it trains the model based on the specified data using KMeans algorithm.</p>
- * <p>
- * Finally, this example loops over the test set of data points, applies the trained model to predict what cluster does
- * this point belong to, compares prediction to expected outcome (ground truth), and builds
- * <a href="https://en.wikipedia.org/wiki/Confusion_matrix">confusion matrix</a>.</p>
- * <p>
- * You can change the test data used in this example and re-run it to explore this algorithm further.</p>
- */
-public class SVMExportImportExample {
-    /**
-     * Run example.
-     */
-    public static void main(String[] args) throws IOException {
-        System.out.println();
-        System.out.println(">>> SVM Binary classification model over cached dataset usage example started.");
-        // Start ignite grid.
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            System.out.println("\n>>> Ignite grid started.");
-
-            IgniteCache<Integer, Vector> dataCache = null;
-            Path jsonMdlPath = null;
-            try {
-                dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.TWO_CLASSED_IRIS);
-
-                SVMLinearClassificationTrainer trainer = new SVMLinearClassificationTrainer();
-
-                Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>()
-                    .labeled(Vectorizer.LabelCoordinate.FIRST);
-
-                SVMLinearClassificationModel mdl = trainer.fit(ignite, dataCache, vectorizer);
-
-                System.out.println("\n>>> Exported SVM model: " + mdl);
-
-                double accuracy = Evaluator.evaluate(
-                    dataCache,
-                    mdl,
-                    vectorizer,
-                    MetricName.ACCURACY
-                );
-
-                System.out.println("\n>>> Accuracy for exported SVM model: " + accuracy);
-
-                jsonMdlPath = Files.createTempFile(null, null);
-                mdl.toJSON(jsonMdlPath);
-
-                SVMLinearClassificationModel modelImportedFromJSON = SVMLinearClassificationModel.fromJSON(jsonMdlPath);
-
-                System.out.println("\n>>> Imported SVM model: " + modelImportedFromJSON);
-
-                accuracy = Evaluator.evaluate(
-                    dataCache,
-                    modelImportedFromJSON,
-                    vectorizer,
-                    MetricName.ACCURACY
-                );
-
-                System.out.println("\n>>> Accuracy for imported SVM model: " + accuracy);
-
-                System.out.println("\n>>> SVM Binary classification model over cache based dataset usage example completed.");
-            }
-            finally {
-                if (dataCache != null)
-                    dataCache.destroy();
-                if (jsonMdlPath != null)
-                    Files.deleteIfExists(jsonMdlPath);
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeFromSparkExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeFromSparkExample.java
index d03bb96..3340ed9 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeFromSparkExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeFromSparkExample.java
@@ -34,7 +34,7 @@ import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
 import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.sparkmodelparser.SparkModelParser;
 import org.apache.ignite.ml.sparkmodelparser.SupportedSparkModels;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * Run Decision Tree model loaded from snappy.parquet file. The snappy.parquet file was generated by Spark MLLib
@@ -69,7 +69,7 @@ public class DecisionTreeFromSparkExample {
 
                 final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 5, 6, 4).labeled(1);
 
-                DecisionTreeModel mdl = (DecisionTreeModel)SparkModelParser.parse(
+                DecisionTreeNode mdl = (DecisionTreeNode)SparkModelParser.parse(
                     SPARK_MDL_PATH,
                     SupportedSparkModels.DECISION_TREE,
                     env
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeRegressionFromSparkExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeRegressionFromSparkExample.java
index 5fd4461..9c36198 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeRegressionFromSparkExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeRegressionFromSparkExample.java
@@ -35,7 +35,7 @@ import org.apache.ignite.ml.math.primitives.vector.Vector;
 import org.apache.ignite.ml.sparkmodelparser.SparkModelParser;
 import org.apache.ignite.ml.sparkmodelparser.SupportedSparkModels;
 import org.apache.ignite.ml.structures.LabeledVector;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * Run Decision tree regression model loaded from snappy.parquet file. The snappy.parquet file was generated by Spark
@@ -69,7 +69,7 @@ public class DecisionTreeRegressionFromSparkExample {
 
                 final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 1, 5, 6).labeled(4);
 
-                DecisionTreeModel mdl = (DecisionTreeModel)SparkModelParser.parse(
+                DecisionTreeNode mdl = (DecisionTreeNode)SparkModelParser.parse(
                     SPARK_MDL_PATH,
                     SupportedSparkModels.DECISION_TREE_REGRESSION,
                     env
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExample.java
index 233cb13..c24091c 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExample.java
@@ -31,7 +31,7 @@ import org.apache.ignite.ml.preprocessing.encoding.EncoderType;
 import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
 import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * Example that shows how to use String Encoder preprocessor to encode features presented as a strings.
@@ -73,7 +73,7 @@ public class EncoderExample {
                 DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
 
                 // Train decision tree model.
-                DecisionTreeModel mdl = trainer.fit(
+                DecisionTreeNode mdl = trainer.fit(
                     ignite,
                     dataCache,
                     encoderPreprocessor
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExampleWithNormalization.java b/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExampleWithNormalization.java
index 7270b03..d9482a5 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExampleWithNormalization.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExampleWithNormalization.java
@@ -32,7 +32,7 @@ import org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer;
 import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
 import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * Example that shows how to combine together two preprocessors: String Encoder preprocessor to encode features presented as a strings
@@ -80,7 +80,7 @@ public class EncoderExampleWithNormalization {
                 DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
 
                 // Train decision tree model.
-                DecisionTreeModel mdl = trainer.fit(
+                DecisionTreeNode mdl = trainer.fit(
                     ignite,
                     dataCache,
                     normalizer
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/LabelEncoderExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/LabelEncoderExample.java
index 3547d7e..d97c49c 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/LabelEncoderExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/LabelEncoderExample.java
@@ -31,7 +31,7 @@ import org.apache.ignite.ml.preprocessing.encoding.EncoderType;
 import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
 import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * Example that shows how to use Label Encoder preprocessor to encode labels presented as a strings.
@@ -79,7 +79,7 @@ public class LabelEncoderExample {
                 DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
 
                 // Train decision tree model.
-                DecisionTreeModel mdl = trainer.fit(
+                DecisionTreeNode mdl = trainer.fit(
                     ignite,
                     dataCache,
                     lbEncoderPreprocessor
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/TargetEncoderExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/TargetEncoderExample.java
deleted file mode 100644
index e3864b6..0000000
--- a/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/TargetEncoderExample.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.examples.ml.preprocessing.encoding;
-
-import java.io.FileNotFoundException;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
-import org.apache.ignite.examples.ml.util.SandboxMLCache;
-import org.apache.ignite.ml.composition.ModelsComposition;
-import org.apache.ignite.ml.composition.boosting.GDBTrainer;
-import org.apache.ignite.ml.composition.boosting.convergence.median.MedianOfMedianConvergenceCheckerFactory;
-import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
-import org.apache.ignite.ml.dataset.feature.extractor.impl.ObjectArrayVectorizer;
-import org.apache.ignite.ml.preprocessing.Preprocessor;
-import org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer;
-import org.apache.ignite.ml.preprocessing.encoding.EncoderType;
-import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
-import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
-import org.apache.ignite.ml.tree.boosting.GDBBinaryClassifierOnTreesTrainer;
-
-/**
- * Example that shows how to use Target Encoder preprocessor to encode labels presented as a mean target value.
- * <p>
- * Code in this example launches Ignite grid and fills the cache with test data (based on mushrooms dataset).</p>
- * <p>
- * After that it defines preprocessors that extract features from an upstream data and encode category with avarage
- * target value (categories). </p>
- * <p>
- * Then, it trains the model based on the processed data using gradient boosing decision tree classification.</p>
- * <p>
- * Finally, this example uses {@link Evaluator} functionality to compute metrics from predictions.</p>
- *
- * <p>Daniele Miccii-Barreca (2001). A Preprocessing Scheme for High-Cardinality Categorical
- * Attributes in Classification and Prediction Problems. SIGKDD Explor. Newsl. 3, 1.
- * From http://dx.doi.org/10.1145/507533.507538</p>
- */
-public class TargetEncoderExample {
-    /**
-     * Run example.
-     */
-    public static void main(String[] args) {
-        System.out.println();
-        System.out.println(">>> Train Gradient Boosing Decision Tree model on amazon-employee-access-challenge_train.csv dataset.");
-
-        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
-            try {
-                IgniteCache<Integer, Object[]> dataCache = new SandboxMLCache(ignite)
-                    .fillObjectCacheWithCategoricalData(MLSandboxDatasets.AMAZON_EMPLOYEE_ACCESS);
-
-                Set<Integer> featuresIndexies = new HashSet<>(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9));
-                Set<Integer> targetEncodedfeaturesIndexies = new HashSet<>(Arrays.asList(1, 5, 6));
-                Integer targetIndex = 0;
-
-                final Vectorizer<Integer, Object[], Integer, Object> vectorizer = new ObjectArrayVectorizer<Integer>(featuresIndexies.toArray(new Integer[0]))
-                    .labeled(targetIndex);
-
-                Preprocessor<Integer, Object[]> strEncoderPreprocessor = new EncoderTrainer<Integer, Object[]>()
-                    .withEncoderType(EncoderType.STRING_ENCODER)
-                    .withEncodedFeature(0)
-                    .withEncodedFeatures(featuresIndexies)
-                    .fit(ignite,
-                        dataCache,
-                        vectorizer
-                    );
-
-                Preprocessor<Integer, Object[]> targetEncoderProcessor = new EncoderTrainer<Integer, Object[]>()
-                    .withEncoderType(EncoderType.TARGET_ENCODER)
-                    .labeled(0)
-                    .withEncodedFeatures(targetEncodedfeaturesIndexies)
-                    .minSamplesLeaf(1)
-                    .minCategorySize(1L)
-                    .smoothing(1d)
-                    .fit(ignite,
-                        dataCache,
-                        strEncoderPreprocessor
-                    );
-
-                Preprocessor<Integer, Object[]> lbEncoderPreprocessor = new EncoderTrainer<Integer, Object[]>()
-                    .withEncoderType(EncoderType.LABEL_ENCODER)
-                    .fit(ignite,
-                        dataCache,
-                        targetEncoderProcessor
-                    );
-
-                GDBTrainer trainer = new GDBBinaryClassifierOnTreesTrainer(0.5, 500, 4, 0.)
-                    .withCheckConvergenceStgyFactory(new MedianOfMedianConvergenceCheckerFactory(0.1));
-
-                // Train model.
-                ModelsComposition mdl = trainer.fit(
-                    ignite,
-                    dataCache,
-                    lbEncoderPreprocessor
-                );
-
-                System.out.println("\n>>> Trained model: " + mdl);
-
-                double accuracy = Evaluator.evaluate(
-                    dataCache,
-                    mdl,
-                    lbEncoderPreprocessor,
-                    new Accuracy()
-                );
-
-                System.out.println("\n>>> Accuracy " + accuracy);
-                System.out.println("\n>>> Test Error " + (1 - accuracy));
-
-                System.out.println(">>> Train Gradient Boosing Decision Tree model on amazon-employee-access-challenge_train.csv dataset.");
-
-            }
-            catch (FileNotFoundException e) {
-                e.printStackTrace();
-            }
-        }
-        finally {
-            System.out.flush();
-        }
-    }
-}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/BostonHousePricesPredictionExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/BostonHousePricesPredictionExample.java
index c572d81..511eb05 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/BostonHousePricesPredictionExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/BostonHousePricesPredictionExample.java
@@ -105,7 +105,7 @@ public class BostonHousePricesPredictionExample {
     private static String toString(LinearRegressionModel mdl) {
         BiFunction<Integer, Double, String> formatter = (idx, val) -> String.format("%.2f*f%d", val, idx);
 
-        Vector weights = mdl.weights();
+        Vector weights = mdl.getWeights();
         StringBuilder sb = new StringBuilder(formatter.apply(0, weights.get(0)));
 
         for (int fid = 1; fid < weights.size(); fid++) {
@@ -114,7 +114,7 @@ public class BostonHousePricesPredictionExample {
                 .append(formatter.apply(fid, Math.abs(w)));
         }
 
-        double intercept = mdl.intercept();
+        double intercept = mdl.getIntercept();
         sb.append(" ").append(intercept > 0 ? "+" : "-").append(" ")
             .append(String.format("%.2f", Math.abs(intercept)));
         return sb.toString();
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/selection/cv/CrossValidationExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/selection/cv/CrossValidationExample.java
index 93dc051..e6a4461 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/selection/cv/CrossValidationExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/selection/cv/CrossValidationExample.java
@@ -30,7 +30,7 @@ import org.apache.ignite.ml.selection.cv.CrossValidation;
 import org.apache.ignite.ml.selection.scoring.metric.MetricName;
 import org.apache.ignite.ml.structures.LabeledVector;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * Run <a href="https://en.wikipedia.org/wiki/Decision_tree">decision tree</a> classification with
@@ -75,7 +75,7 @@ public class CrossValidationExample {
 
                 LabeledDummyVectorizer<Integer, Double> vectorizer = new LabeledDummyVectorizer<>();
 
-                CrossValidation<DecisionTreeModel, Integer, LabeledVector<Double>> scoreCalculator
+                CrossValidation<DecisionTreeNode, Integer, LabeledVector<Double>> scoreCalculator
                     = new CrossValidation<>();
 
                 double[] accuracyScores = scoreCalculator
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLInferenceExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLInferenceExample.java
index 68058b7..543e211 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLInferenceExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLInferenceExample.java
@@ -30,7 +30,7 @@ import org.apache.ignite.ml.inference.IgniteModelStorageUtil;
 import org.apache.ignite.ml.sql.SQLFunctions;
 import org.apache.ignite.ml.sql.SqlDatasetBuilder;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 import static org.apache.ignite.examples.ml.sql.DecisionTreeClassificationTrainerSQLTableExample.loadTitanicDatasets;
 
@@ -101,7 +101,7 @@ public class DecisionTreeClassificationTrainerSQLInferenceExample {
                 DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(4, 0);
 
                 System.out.println(">>> Perform training...");
-                DecisionTreeModel mdl = trainer.fit(
+                DecisionTreeNode mdl = trainer.fit(
                     new SqlDatasetBuilder(ignite, "SQL_PUBLIC_TITANIC_TRAIN"),
                     new BinaryObjectVectorizer<>("pclass", "age", "sibsp", "parch", "fare")
                         .withFeature("sex", BinaryObjectVectorizer.Mapping.create().map("male", 1.0).defaultValue(0.0))
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLTableExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLTableExample.java
index d05d1a9..083608e 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLTableExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLTableExample.java
@@ -34,7 +34,7 @@ import org.apache.ignite.ml.math.primitives.vector.Vector;
 import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
 import org.apache.ignite.ml.sql.SqlDatasetBuilder;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * Example of using distributed {@link DecisionTreeClassificationTrainer} on a data stored in SQL table.
@@ -101,7 +101,7 @@ public class DecisionTreeClassificationTrainerSQLTableExample {
                 DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(4, 0);
 
                 System.out.println(">>> Perform training...");
-                DecisionTreeModel mdl = trainer.fit(
+                DecisionTreeNode mdl = trainer.fit(
                     new SqlDatasetBuilder(ignite, "SQL_PUBLIC_TITANIC_TRAIN"),
                     new BinaryObjectVectorizer<>("pclass", "age", "sibsp", "parch", "fare")
                         .withFeature("sex", BinaryObjectVectorizer.Mapping.create().map("male", 1.0).defaultValue(0.0))
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeClassificationTrainerExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeClassificationTrainerExample.java
index b1cf23e..600f4a5 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeClassificationTrainerExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeClassificationTrainerExample.java
@@ -28,7 +28,7 @@ import org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorize
 import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
 import org.apache.ignite.ml.structures.LabeledVector;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * Example of using distributed {@link DecisionTreeClassificationTrainer}.
@@ -75,7 +75,7 @@ public class DecisionTreeClassificationTrainerExample {
 
                 // Train decision tree model.
                 LabeledDummyVectorizer<Integer, Double> vectorizer = new LabeledDummyVectorizer<>();
-                DecisionTreeModel mdl = trainer.fit(
+                DecisionTreeNode mdl = trainer.fit(
                     ignite,
                     trainingSet,
                     vectorizer
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeRegressionTrainerExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeRegressionTrainerExample.java
index 5cfb828..1a19771 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeRegressionTrainerExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeRegressionTrainerExample.java
@@ -25,7 +25,7 @@ import org.apache.ignite.configuration.CacheConfiguration;
 import org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorizer;
 import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
 import org.apache.ignite.ml.structures.LabeledVector;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 import org.apache.ignite.ml.tree.DecisionTreeRegressionTrainer;
 
 /**
@@ -70,7 +70,7 @@ public class DecisionTreeRegressionTrainerExample {
                 DecisionTreeRegressionTrainer trainer = new DecisionTreeRegressionTrainer(10, 0);
 
                 // Train decision tree model.
-                DecisionTreeModel mdl = trainer.fit(ignite, trainingSet, new LabeledDummyVectorizer<>());
+                DecisionTreeNode mdl = trainer.fit(ignite, trainingSet, new LabeledDummyVectorizer<>());
 
                 System.out.println(">>> Decision tree regression model: " + mdl);
 
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesClassificationTrainerExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesClassificationTrainerExample.java
index 7e6c5d3..a2eaf47 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesClassificationTrainerExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesClassificationTrainerExample.java
@@ -22,12 +22,12 @@ import org.apache.ignite.IgniteCache;
 import org.apache.ignite.Ignition;
 import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
 import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.ml.composition.boosting.GDBModel;
-import org.apache.ignite.ml.composition.boosting.GDBTrainer;
+import org.apache.ignite.ml.composition.ModelsComposition;
 import org.apache.ignite.ml.composition.boosting.convergence.mean.MeanAbsValueConvergenceCheckerFactory;
 import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
 import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer;
 import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
+import org.apache.ignite.ml.trainers.DatasetTrainer;
 import org.apache.ignite.ml.tree.boosting.GDBBinaryClassifierOnTreesTrainer;
 import org.jetbrains.annotations.NotNull;
 
@@ -58,11 +58,11 @@ public class GDBOnTreesClassificationTrainerExample {
                 trainingSet = fillTrainingData(ignite, trainingSetCfg);
 
                 // Create classification trainer.
-                GDBTrainer trainer = new GDBBinaryClassifierOnTreesTrainer(1.0, 300, 2, 0.)
+                DatasetTrainer<ModelsComposition, Double> trainer = new GDBBinaryClassifierOnTreesTrainer(1.0, 300, 2, 0.)
                     .withCheckConvergenceStgyFactory(new MeanAbsValueConvergenceCheckerFactory(0.1));
 
                 // Train decision tree model.
-                GDBModel mdl = trainer.fit(
+                ModelsComposition mdl = trainer.fit(
                     ignite,
                     trainingSet,
                     new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.LAST)
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesRegressionTrainerExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesRegressionTrainerExample.java
index a6ea135..09dd708 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesRegressionTrainerExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesRegressionTrainerExample.java
@@ -22,12 +22,14 @@ import org.apache.ignite.IgniteCache;
 import org.apache.ignite.Ignition;
 import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
 import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.ml.composition.boosting.GDBModel;
-import org.apache.ignite.ml.composition.boosting.GDBTrainer;
+import org.apache.ignite.ml.composition.ModelsComposition;
 import org.apache.ignite.ml.composition.boosting.convergence.mean.MeanAbsValueConvergenceCheckerFactory;
 import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
 import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer;
+import org.apache.ignite.ml.inference.Model;
+import org.apache.ignite.ml.math.primitives.vector.Vector;
 import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
+import org.apache.ignite.ml.trainers.DatasetTrainer;
 import org.apache.ignite.ml.tree.boosting.GDBRegressionOnTreesTrainer;
 import org.jetbrains.annotations.NotNull;
 
@@ -58,11 +60,11 @@ public class GDBOnTreesRegressionTrainerExample {
                 trainingSet = fillTrainingData(ignite, trainingSetCfg);
 
                 // Create regression trainer.
-                GDBTrainer trainer = new GDBRegressionOnTreesTrainer(1.0, 2000, 1, 0.)
+                DatasetTrainer<ModelsComposition, Double> trainer = new GDBRegressionOnTreesTrainer(1.0, 2000, 1, 0.)
                     .withCheckConvergenceStgyFactory(new MeanAbsValueConvergenceCheckerFactory(0.001));
 
                 // Train decision tree model.
-                GDBModel mdl = trainer.fit(
+                Model<Vector, Double> mdl = trainer.fit(
                     ignite,
                     trainingSet,
                     new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.LAST)
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_11_Boosting.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_11_Boosting.java
index b8e1d00..b9006f5 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_11_Boosting.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_11_Boosting.java
@@ -21,8 +21,7 @@ import java.io.FileNotFoundException;
 import org.apache.ignite.Ignite;
 import org.apache.ignite.IgniteCache;
 import org.apache.ignite.Ignition;
-import org.apache.ignite.ml.composition.boosting.GDBModel;
-import org.apache.ignite.ml.composition.boosting.GDBTrainer;
+import org.apache.ignite.ml.composition.ModelsComposition;
 import org.apache.ignite.ml.composition.boosting.convergence.median.MedianOfMedianConvergenceCheckerFactory;
 import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
 import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
@@ -37,6 +36,7 @@ import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
 import org.apache.ignite.ml.selection.scoring.metric.MetricName;
 import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
 import org.apache.ignite.ml.selection.split.TrainTestSplit;
+import org.apache.ignite.ml.trainers.DatasetTrainer;
 import org.apache.ignite.ml.tree.boosting.GDBBinaryClassifierOnTreesTrainer;
 
 /**
@@ -102,11 +102,11 @@ public class Step_11_Boosting {
                     );
 
                 // Create classification trainer.
-                GDBTrainer trainer = new GDBBinaryClassifierOnTreesTrainer(0.5, 500, 4, 0.)
+                DatasetTrainer<ModelsComposition, Double> trainer = new GDBBinaryClassifierOnTreesTrainer(0.5, 500, 4, 0.)
                     .withCheckConvergenceStgyFactory(new MedianOfMedianConvergenceCheckerFactory(0.1));
 
                 // Train decision tree model.
-                GDBModel mdl = trainer.fit(
+                ModelsComposition mdl = trainer.fit(
                     ignite,
                     dataCache,
                     split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_1_Read_and_Learn.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_1_Read_and_Learn.java
index 97ccb58..b6df5d6 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_1_Read_and_Learn.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_1_Read_and_Learn.java
@@ -27,7 +27,7 @@ import org.apache.ignite.ml.math.primitives.vector.Vector;
 import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
 import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * Usage of {@link DecisionTreeClassificationTrainer} to predict death in the disaster.
@@ -56,7 +56,7 @@ public class Step_1_Read_and_Learn {
 
                 DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
 
-                DecisionTreeModel mdl = trainer.fit(
+                DecisionTreeNode mdl = trainer.fit(
                     ignite,
                     dataCache,
                     vectorizer
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_2_Imputing.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_2_Imputing.java
index a020dbe..094a966 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_2_Imputing.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_2_Imputing.java
@@ -29,7 +29,7 @@ import org.apache.ignite.ml.preprocessing.imputing.ImputerTrainer;
 import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
 import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * Usage of {@link ImputerTrainer} to fill missed data ({@code Double.NaN}) values in the chosen columns.
@@ -66,7 +66,7 @@ public class Step_2_Imputing {
                 DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
 
                 // Train decision tree model.
-                DecisionTreeModel mdl = trainer.fit(
+                DecisionTreeNode mdl = trainer.fit(
                     ignite,
                     dataCache,
                     vectorizer
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial.java
index c97ee38..68b05a4 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial.java
@@ -31,7 +31,7 @@ import org.apache.ignite.ml.preprocessing.imputing.ImputerTrainer;
 import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
 import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * Let's add two categorial features "sex", "embarked" to predict more precisely than in {@link Step_1_Read_and_Learn}.
@@ -80,7 +80,7 @@ public class Step_3_Categorial {
                 DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
 
                 // Train decision tree model.
-                DecisionTreeModel mdl = trainer.fit(
+                DecisionTreeNode mdl = trainer.fit(
                     ignite,
                     dataCache,
                     imputingPreprocessor
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial_with_One_Hot_Encoder.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial_with_One_Hot_Encoder.java
index 1355979..206d2dc 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial_with_One_Hot_Encoder.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial_with_One_Hot_Encoder.java
@@ -31,7 +31,7 @@ import org.apache.ignite.ml.preprocessing.imputing.ImputerTrainer;
 import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
 import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * Let's add two categorial features "sex", "embarked" to predict more precisely than in {@link
@@ -83,7 +83,7 @@ public class Step_3_Categorial_with_One_Hot_Encoder {
                 DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
 
                 // Train decision tree model.
-                DecisionTreeModel mdl = trainer.fit(
+                DecisionTreeNode mdl = trainer.fit(
                     ignite,
                     dataCache,
                     imputingPreprocessor
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_4_Add_age_fare.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_4_Add_age_fare.java
index f4763a1..1d85a14 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_4_Add_age_fare.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_4_Add_age_fare.java
@@ -31,7 +31,7 @@ import org.apache.ignite.ml.preprocessing.imputing.ImputerTrainer;
 import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
 import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * Add yet two numerical features "age", "fare" to improve our model over {@link Step_3_Categorial}.
@@ -79,7 +79,7 @@ public class Step_4_Add_age_fare {
                 DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
 
                 // Train decision tree model.
-                DecisionTreeModel mdl = trainer.fit(
+                DecisionTreeNode mdl = trainer.fit(
                     ignite,
                     dataCache,
                     imputingPreprocessor
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_5_Scaling.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_5_Scaling.java
index 05d0137..dfb6de0 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_5_Scaling.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_5_Scaling.java
@@ -33,7 +33,7 @@ import org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer;
 import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
 import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * {@link MinMaxScalerTrainer} and {@link NormalizationTrainer} are used in this example due to different values
@@ -97,7 +97,7 @@ public class Step_5_Scaling {
                 DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
 
                 // Train decision tree model.
-                DecisionTreeModel mdl = trainer.fit(
+                DecisionTreeNode mdl = trainer.fit(
                     ignite,
                     dataCache,
                     normalizationPreprocessor
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_7_Split_train_test.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_7_Split_train_test.java
index a60a8ba..e104c51 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_7_Split_train_test.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_7_Split_train_test.java
@@ -35,7 +35,7 @@ import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
 import org.apache.ignite.ml.selection.split.TrainTestSplit;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * The highest accuracy in the previous example ({@link Step_6_KNN}) is the result of
@@ -103,7 +103,7 @@ public class Step_7_Split_train_test {
                 DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
 
                 // Train decision tree model.
-                DecisionTreeModel mdl = trainer.fit(
+                DecisionTreeNode mdl = trainer.fit(
                     ignite,
                     dataCache,
                     split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV.java
index 20f4a72..0da797d 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV.java
@@ -38,7 +38,7 @@ import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
 import org.apache.ignite.ml.selection.split.TrainTestSplit;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * To choose the best hyper-parameters the cross-validation will be used in this example.
@@ -126,7 +126,7 @@ public class Step_8_CV {
                         DecisionTreeClassificationTrainer trainer
                             = new DecisionTreeClassificationTrainer(maxDeep, 0);
 
-                        CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator
+                        CrossValidation<DecisionTreeNode, Integer, Vector> scoreCalculator
                             = new CrossValidation<>();
 
                         double[] scores = scoreCalculator
@@ -167,7 +167,7 @@ public class Step_8_CV {
                 DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(bestMaxDeep, 0);
 
                 // Train decision tree model.
-                DecisionTreeModel bestMdl = trainer.fit(
+                DecisionTreeNode bestMdl = trainer.fit(
                     ignite,
                     dataCache,
                     split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid.java
index 963e1b7..5b62714 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid.java
@@ -40,7 +40,7 @@ import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
 import org.apache.ignite.ml.selection.split.TrainTestSplit;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example.
@@ -119,7 +119,7 @@ public class Step_8_CV_with_Param_Grid {
 
                 DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
 
-                CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator
+                CrossValidation<DecisionTreeNode, Integer, Vector> scoreCalculator
                     = new CrossValidation<>();
 
                 ParamGrid paramGrid = new ParamGrid()
@@ -156,7 +156,7 @@ public class Step_8_CV_with_Param_Grid {
                     -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
 
                 // Train decision tree model.
-                DecisionTreeModel bestMdl = trainer.fit(
+                DecisionTreeNode bestMdl = trainer.fit(
                     ignite,
                     dataCache,
                     split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid_and_pipeline.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid_and_pipeline.java
index 1aa2d57..6be8496 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid_and_pipeline.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid_and_pipeline.java
@@ -36,7 +36,7 @@ import org.apache.ignite.ml.selection.scoring.metric.MetricName;
 import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
 import org.apache.ignite.ml.selection.split.TrainTestSplit;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example.
@@ -91,7 +91,7 @@ public class Step_8_CV_with_Param_Grid_and_pipeline {
 
                 // Tune hyper-parameters with K-fold Cross-Validation on the split training set.
 
-                CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator
+                CrossValidation<DecisionTreeNode, Integer, Vector> scoreCalculator
                     = new CrossValidation<>();
 
                 ParamGrid paramGrid = new ParamGrid()
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_13_RandomSearch.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_13_RandomSearch.java
index c489fc9..d7e2f27 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_13_RandomSearch.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_13_RandomSearch.java
@@ -42,7 +42,7 @@ import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
 import org.apache.ignite.ml.selection.split.TrainTestSplit;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example.
@@ -123,7 +123,7 @@ public class Step_13_RandomSearch {
 
                 DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
 
-                CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator
+                CrossValidation<DecisionTreeNode, Integer, Vector> scoreCalculator
                     = new CrossValidation<>();
 
                 ParamGrid paramGrid = new ParamGrid()
@@ -166,7 +166,7 @@ public class Step_13_RandomSearch {
                     -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
 
                 // Train decision tree model.
-                DecisionTreeModel bestMdl = trainer.fit(
+                DecisionTreeNode bestMdl = trainer.fit(
                     ignite,
                     dataCache,
                     split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_14_Parallel_Brute_Force_Search.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_14_Parallel_Brute_Force_Search.java
index b63bf96..017f123 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_14_Parallel_Brute_Force_Search.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_14_Parallel_Brute_Force_Search.java
@@ -45,7 +45,7 @@ import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
 import org.apache.ignite.ml.selection.split.TrainTestSplit;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example.
@@ -126,7 +126,7 @@ public class Step_14_Parallel_Brute_Force_Search {
 
                 DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
 
-                CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator
+                CrossValidation<DecisionTreeNode, Integer, Vector> scoreCalculator
                     = new CrossValidation<>();
 
                 ParamGrid paramGrid = new ParamGrid()
@@ -168,7 +168,7 @@ public class Step_14_Parallel_Brute_Force_Search {
                     -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
 
                 // Train decision tree model.
-                DecisionTreeModel bestMdl = trainer.fit(
+                DecisionTreeNode bestMdl = trainer.fit(
                     ignite,
                     dataCache,
                     split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_15_Parallel_Random_Search.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_15_Parallel_Random_Search.java
index ac6c1eb..3a3e9e8 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_15_Parallel_Random_Search.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_15_Parallel_Random_Search.java
@@ -45,7 +45,7 @@ import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
 import org.apache.ignite.ml.selection.split.TrainTestSplit;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example.
@@ -125,7 +125,7 @@ public class Step_15_Parallel_Random_Search {
                 // Tune hyper-parameters with K-fold Cross-Validation on the split training set.
                 DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
 
-                CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator
+                CrossValidation<DecisionTreeNode, Integer, Vector> scoreCalculator
                     = new CrossValidation<>();
 
                 ParamGrid paramGrid = new ParamGrid()
@@ -171,7 +171,7 @@ public class Step_15_Parallel_Random_Search {
                     -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
 
                 // Train decision tree model.
-                DecisionTreeModel bestMdl = trainer.fit(
+                DecisionTreeNode bestMdl = trainer.fit(
                     ignite,
                     dataCache,
                     split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_16_Genetic_Programming_Search.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_16_Genetic_Programming_Search.java
index 408eb48..bee51e4 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_16_Genetic_Programming_Search.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_16_Genetic_Programming_Search.java
@@ -42,7 +42,7 @@ import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
 import org.apache.ignite.ml.selection.split.TrainTestSplit;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example.
@@ -123,7 +123,7 @@ public class Step_16_Genetic_Programming_Search {
 
                 DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
 
-                CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator
+                CrossValidation<DecisionTreeNode, Integer, Vector> scoreCalculator
                     = new CrossValidation<>();
 
                 ParamGrid paramGrid = new ParamGrid()
@@ -162,7 +162,7 @@ public class Step_16_Genetic_Programming_Search {
                     -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
 
                 // Train decision tree model.
-                DecisionTreeModel bestMdl = trainer.fit(
+                DecisionTreeNode bestMdl = trainer.fit(
                     ignite,
                     dataCache,
                     split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_17_Parallel_Genetic_Programming_Search.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_17_Parallel_Genetic_Programming_Search.java
index a9d39bd..34a8158 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_17_Parallel_Genetic_Programming_Search.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_17_Parallel_Genetic_Programming_Search.java
@@ -45,7 +45,7 @@ import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
 import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
 import org.apache.ignite.ml.selection.split.TrainTestSplit;
 import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeNode;
 
 /**
  * To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example.
@@ -126,7 +126,7 @@ public class Step_17_Parallel_Genetic_Programming_Search {
 
                 DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
 
-                CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator
+                CrossValidation<DecisionTreeNode, Integer, Vector> scoreCalculator
                     = new CrossValidation<>();
 
                 ParamGrid paramGrid = new ParamGrid()
@@ -168,7 +168,7 @@ public class Step_17_Parallel_Genetic_Programming_Search {
                     -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
 
                 // Train decision tree model.
-                DecisionTreeModel bestMdl = trainer.fit(
+                DecisionTreeNode bestMdl = trainer.fit(
                     ignite,
                     dataCache,
                     split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/util/MLSandboxDatasets.java b/examples/src/main/java/org/apache/ignite/examples/ml/util/MLSandboxDatasets.java
index 9f70659..7021e7d 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/util/MLSandboxDatasets.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/util/MLSandboxDatasets.java
@@ -68,10 +68,7 @@ public enum MLSandboxDatasets {
     MIXED_DATASET("examples/src/main/resources/datasets/mixed_dataset.csv", true, ","),
 
     /** A dataset with categorical features and labels. */
-    MUSHROOMS("examples/src/main/resources/datasets/mushrooms.csv", true, ","),
-
-    /** A dataset with categorical features and labels. */
-    AMAZON_EMPLOYEE_ACCESS("examples/src/main/resources/datasets/amazon-employee-access-challenge_train.csv", true, ",");
+    MUSHROOMS("examples/src/main/resources/datasets/mushrooms.csv", true, ",");
 
     /** Filename. */
     private final String filename;
diff --git a/examples/src/main/resources/datasets/amazon-employee-access-challenge-sample-catboost-expected-results.csv b/examples/src/main/resources/datasets/amazon-employee-access-challenge-sample-catboost-expected-results.csv
deleted file mode 100644
index b5c34f5..0000000
--- a/examples/src/main/resources/datasets/amazon-employee-access-challenge-sample-catboost-expected-results.csv
+++ /dev/null
@@ -1,4 +0,0 @@
-0.9928904609329371
-0.9963369818846654
-0.9775200762137463
-0.9491935983699706
diff --git a/examples/src/main/resources/datasets/amazon-employee-access-challenge-sample.csv b/examples/src/main/resources/datasets/amazon-employee-access-challenge-sample.csv
deleted file mode 100644
index ba86b87..0000000
--- a/examples/src/main/resources/datasets/amazon-employee-access-challenge-sample.csv
+++ /dev/null
@@ -1,5 +0,0 @@
-RESOURCE,MGR_ID,ROLE_ROLLUP_1,ROLE_ROLLUP_2,ROLE_DEPTNAME,ROLE_TITLE,ROLE_FAMILY_DESC,ROLE_FAMILY,ROLE_CODE
-39353,85475,117961,118300,123472,117905,117906,290919,117908
-17183,1540,117961,118343,123125,118536,118536,308574,118539
-36724,14457,118219,118220,117884,117879,267952,19721,117880
-36135,5396,117961,118343,119993,118321,240983,290919,118322
diff --git a/examples/src/main/resources/datasets/amazon-employee-access-challenge_train.csv b/examples/src/main/resources/datasets/amazon-employee-access-challenge_train.csv
deleted file mode 100644
index ec68fef..0000000
--- a/examples/src/main/resources/datasets/amazon-employee-access-challenge_train.csv
+++ /dev/null
@@ -1,100 +0,0 @@
-ACTION,RESOURCE,MGR_ID,ROLE_ROLLUP_1,ROLE_ROLLUP_2,ROLE_DEPTNAME,ROLE_TITLE,ROLE_FAMILY_DESC,ROLE_FAMILY,ROLE_CODE
-1,39353,85475,117961,118300,123472,117905,117906,290919,117908
-1,17183,1540,117961,118343,123125,118536,118536,308574,118539
-1,36724,14457,118219,118220,117884,117879,267952,19721,117880
-1,36135,5396,117961,118343,119993,118321,240983,290919,118322
-1,42680,5905,117929,117930,119569,119323,123932,19793,119325
-0,45333,14561,117951,117952,118008,118568,118568,19721,118570
-1,25993,17227,117961,118343,123476,118980,301534,118295,118982
-1,19666,4209,117961,117969,118910,126820,269034,118638,126822
-1,31246,783,117961,118413,120584,128230,302830,4673,128231
-1,78766,56683,118079,118080,117878,117879,304519,19721,117880
-1,4675,3005,117961,118413,118481,118784,117906,290919,118786
-1,15030,94005,117902,118041,119238,119093,138522,119095,119096
-1,79954,46608,118315,118463,122636,120773,123148,118960,120774
-1,4675,50997,91261,118026,118202,119962,168365,118205,119964
-1,95836,18181,117961,118343,118514,118321,117906,290919,118322
-1,19484,6657,118219,118220,118221,117885,117886,117887,117888
-1,114267,23136,117961,118052,119742,118321,117906,290919,118322
-1,35197,57715,117961,118446,118701,118702,118703,118704,118705
-1,86316,7002,117961,118343,123125,118278,132715,290919,118279
-1,27785,5636,117961,118413,122007,118321,117906,290919,118322
-1,37427,5220,117961,118300,118458,120006,303717,118424,120008
-1,15672,111936,117961,118300,118783,117905,240983,290919,117908
-1,92885,744,117961,118300,119181,118777,279443,308574,118779
-1,1020,85475,117961,118300,120410,118321,117906,290919,118322
-1,4675,7551,117961,118052,118867,118259,117906,290919,118261
-1,41334,28253,118315,118463,123089,118259,128796,290919,118261
-1,77385,14829,117961,118052,119986,117905,117906,290919,117908
-1,20273,11506,118216,118587,118846,179731,128361,117887,117973
-1,78098,46556,118090,118091,117884,118568,165015,19721,118570
-1,79328,4219,117961,118300,120312,120313,144958,118424,120315
-1,23921,4953,117961,118343,119598,120344,310997,118424,120346
-1,34687,815,117961,118300,123719,117905,117906,290919,117908
-1,43452,169112,117902,118041,119781,118563,121024,270488,118565
-1,33248,4929,117961,118300,118825,118826,226343,118424,118828
-1,78282,7445,117961,118343,122299,118054,121350,117887,118055
-1,17183,794,118752,119070,117945,280788,152940,292795,119082
-1,38658,1912,119134,119135,118042,120097,174445,270488,120099
-1,14354,50368,117926,118266,117884,118568,281735,19721,118570
-1,45019,1080,117961,118327,118378,120952,120953,118453,120954
-1,13878,1541,117961,118225,123173,120812,123174,118638,120814
-1,14570,46805,117929,117930,117920,118568,281735,19721,118570
-0,74310,49521,117961,118300,118301,119849,235245,118638,119851
-1,6977,1398,117961,118300,120722,118784,130735,290919,118786
-1,31613,5899,117961,118327,120318,118777,296252,308574,118779
-1,1020,21127,117961,118052,119408,118777,279443,308574,118779
-1,32270,3887,117961,118343,120347,120348,265969,118295,120350
-1,19629,19645,117961,118413,118481,118784,240983,290919,118786
-1,15702,1938,117961,118300,118066,120560,304465,118643,120562
-1,113037,5396,117961,118343,119993,120773,118959,118960,120774
-1,20279,17695,117890,117891,117878,117879,117879,19721,117880
-1,80746,16690,117961,118446,119064,122022,131302,119221,122024
-1,80263,36145,117961,118052,120304,307024,311622,118331,118332
-1,73753,70062,117961,118386,118746,117905,117906,290919,117908
-1,39883,7551,117961,118052,118867,117905,172635,290919,117908
-1,25993,7023,117961,117962,119223,118259,118260,290919,118261
-0,78106,50613,117916,118150,118810,118568,159905,19721,118570
-1,33150,1915,117961,118300,119181,118784,117906,290919,118786
-1,34817,5899,117961,118327,120318,118641,240982,118643,118644
-1,28354,3860,117961,118446,120317,118321,117906,290919,118322
-1,33642,13196,117951,117952,117941,117879,117897,19721,117880
-1,26430,56310,118212,118580,117895,117896,117913,117887,117898
-1,28149,50120,91261,118026,119507,118321,117906,290919,118322
-1,40867,6736,117961,117969,6725,122290,268766,6725,122292
-1,20293,273476,117926,118266,117920,118568,310732,19721,118570
-1,36020,2163,118219,118220,120694,118777,130218,308574,118779
-1,60006,16821,117961,118225,120535,118396,269406,118398,118399
-0,35043,14800,117961,117962,118352,118784,117906,290919,118786
-1,17308,4088,117961,118300,118458,118728,223125,118295,118730
-0,15716,18073,118256,118257,118623,118995,286106,292795,118997
-1,39883,55956,118555,118178,119262,117946,119727,292795,117948
-1,42031,88387,118315,118463,118522,119172,121927,118467,119174
-1,27124,2318,117961,118327,118933,117905,117906,290919,117908
-1,35498,18454,117961,118343,119598,125171,257115,118424,125173
-1,79168,58465,118602,118603,117941,117885,119621,117887,117888
-1,2252,782,117961,118413,127522,118784,240983,290919,118786
-1,45652,7338,117961,118225,119924,118321,118448,290919,118322
-1,23921,4145,117961,118300,120026,307024,303717,118331,118332
-1,95247,50690,118269,118270,117878,118568,118568,19721,118570
-1,78844,15645,117961,118052,122392,128903,160695,292795,128905
-1,19481,10627,118106,118107,119565,179731,155780,117887,117973
-1,18380,44022,117961,117962,122215,127782,130085,290919,127783
-1,37734,58406,117975,117976,117884,117885,117913,117887,117888
-1,3853,17550,117961,118446,118684,118321,117906,290919,118322
-1,278393,7076,117961,118225,120323,119093,136840,119095,119096
-1,35625,6454,117961,118343,118856,117905,240983,290919,117908
-1,35066,17465,91261,118026,118202,118278,118260,290919,118279
-1,3853,5043,117961,118300,118458,120006,310997,118424,120008
-1,41569,16671,117961,118052,118706,118523,310608,118331,118525
-1,25862,46224,117961,118327,118378,120952,143223,118453,120954
-1,75078,45963,117961,118386,118896,122645,309858,119221,122647
-1,1020,1483,117961,117962,118840,118641,306399,118643,118644
-0,22956,3967,117961,118052,118706,118321,117906,290919,118322
-1,20364,2612,117961,118386,123901,117905,117906,290919,117908
-1,28943,7547,117961,118052,118933,118784,213944,290919,118786
-1,75329,17414,118752,119070,118042,118043,151099,270488,118046
-1,41569,70066,91261,118026,118202,117905,117906,290919,117908
-1,4684,50806,117961,118446,119961,118259,118260,290919,118261
-1,77943,4478,117961,118386,118692,118321,117906,290919,118322
-1,38860,15541,118573,118574,118556,280788,127423,292795,119082
diff --git a/examples/src/main/resources/datasets/boston_housing_dataset-catboost-expected-results.txt b/examples/src/main/resources/datasets/boston_housing_dataset-catboost-expected-results.txt
deleted file mode 100644
index 70bd450..0000000
--- a/examples/src/main/resources/datasets/boston_housing_dataset-catboost-expected-results.txt
+++ /dev/null
@@ -1,505 +0,0 @@
-21.164552741740483
-34.44455359262485
-33.94734205787078
-35.358021389142024
-28.217148379945492
-21.979623483476228
-25.179151087820795
-16.663784000265505
-18.6332288900902
-16.057363242333274
-19.620020829117497
-21.425096156547266
-20.18953001360293
-18.541744236595534
-19.905009893658633
-22.817998611044295
-17.503116976482364
-19.602061128432926
-18.714094616880438
-13.043964078144153
-17.95103957809092
-16.11765782736545
-14.64413663576813
-16.185286414626507
-14.496079185487954
-15.881579761736644
-15.048892781075
-18.215320269522508
-20.433786993263364
-12.775459494813123
-15.38727503366468
-13.40734400102275
-13.593646293177692
-13.557122556169663
-20.283611241930156
-20.293302348210815
-21.318776069286322
-23.952502661770687
-30.344785709705167
-35.05826914648418
-27.20049305979657
-24.61178278078263
-24.618991512565287
-21.935606702480257
-20.0536180503364
-20.06581000678366
-17.215167736904057
-14.710548808235021
-19.00364327349821
-20.082422388118232
-20.736569614291824
-25.80355134178083
-22.525808056631174
-18.730933044239823
-35.60300729810213
-24.361817631604794
-31.72993595445449
-23.198333216803814
-19.85002226443056
-18.285418055538244
-16.75095037452558
-22.464570879312504
-24.679784812215896
-32.86400124477411
-24.05478101103431
-19.348380567799698
-20.999621681886243
-18.132191466275003
-20.832009749075805
-24.088971315686862
-21.813085306481824
-22.894053567336112
-23.379104331575054
-24.33053821245845
-22.152640952593423
-20.411872025744643
-21.408623510845107
-20.810299700032573
-20.70531852050972
-27.310483630842196
-23.94836472063911
-24.25239921382196
-22.649113302157275
-23.18983802717364
-26.379810372978937
-21.403543045498985
-23.041037015322697
-24.642863977405163
-29.241700734347805
-22.93261682207133
-22.172194719762174
-23.704440957027128
-25.15136140666791
-20.663894656862876
-27.853780131480843
-21.92962322037711
-38.82011666216642
-43.89667059543811
-32.91042869715988
-26.045561593459762
-26.110476484368686
-18.880196699725307
-19.960637302429078
-20.242191052597644
-18.881321953619217
-18.99356014672636
-20.192418865632042
-20.196003721285983
-19.188430635701017
-22.00423135469518
-22.930827665114776
-18.93940211813681
-18.947288931081346
-19.774110600032568
-18.220372489208046
-21.30392472528196
-19.752774329500976
-19.504171941732174
-19.698920034030287
-22.225853149605744
-20.688122106679526
-20.039462328714063
-16.925492403924604
-18.99852203470001
-21.254047505796265
-16.01625259336251
-15.94864028248406
-17.48642470955056
-15.28896627644686
-19.396660296420013
-19.782549243746097
-21.812708812078647
-17.710152224835046
-15.413100325589951
-17.70340364100517
-17.384525367273696
-17.907392701793928
-13.607239056111437
-16.847521564783612
-14.499023063866698
-14.265860573929002
-14.03589509226626
-15.019842627560848
-12.663891023766888
-14.131570716919317
-16.2461219598252
-14.298816599091582
-16.637092245956154
-15.604086293154985
-20.702527127474973
-19.46740940424256
-15.985108607193087
-18.245044278822125
-17.300417921712594
-15.478312040288447
-13.726513847035916
-40.46153474218855
-24.729832310129535
-23.958445104246024
-27.167020538774082
-49.81376987494329
-49.763899610387476
-49.740265263888446
-21.953542778261728
-24.566058286243084
-50.56979388715128
-22.787064866885373
-22.996984087726574
-22.575360458961253
-18.829906022042753
-19.95614299480929
-22.489287237090124
-24.31344455301194
-22.853874878872183
-29.231831811224854
-22.8922437114595
-24.120043903186193
-29.23591807260737
-37.73932876923115
-40.324333387355985
-34.67045200524914
-37.583641702156974
-32.247707746472045
-26.258909528694033
-29.340846958558156
-49.12469209055186
-31.02349742520599
-30.187382160082088
-35.02371794277118
-35.646983341238524
-30.553148093421097
-36.941046577918655
-30.593813975152443
-29.054323721705735
-49.615315903587565
-34.353585404168356
-30.36973215187362
-33.77575025416485
-33.991299512614276
-33.230401227171384
-23.713437729224786
-42.564342273096344
-48.79360052051148
-49.61205410881091
-23.08941526105016
-23.666777788950462
-21.66200393617301
-24.0390258095901
-19.488259028903943
-21.43819058183768
-19.49823133809392
-22.718210403212506
-27.154130376929203
-23.50554318162597
-25.08489891352236
-22.86599092502905
-27.047978614874694
-21.930162059226504
-23.554578852162642
-27.29640542044021
-21.505124097374143
-27.533662049839982
-27.86430902213703
-45.209006701495
-48.87051996243624
-38.46129207721255
-31.53450081366175
-46.364673518296854
-31.64997970969701
-23.49765869642234
-32.28680389245416
-42.04361549831056
-47.54834973862798
-28.575148034368123
-23.688344788258444
-25.37476714203794
-32.425419627828845
-24.347614887508044
-24.50129050865779
-22.798639215956133
-20.033968936438214
-21.386284275663467
-23.795449412147377
-17.45414151144564
-18.580643011708247
-23.377981149376673
-20.53341228602143
-23.52185945551625
-26.170461500376256
-24.362040264816457
-25.300382075623922
-29.775991729691
-42.55303137829672
-22.035104870392306
-20.563120544406843
-43.535566665229666
-49.77398640688496
-36.143807589723615
-30.632534298225195
-34.47606813686944
-42.53603143850725
-48.95898311562212
-31.471207216881954
-36.19925373952015
-22.85960349136891
-30.300818284084684
-49.75406749441813
-43.6129177696169
-21.327438277212263
-20.710391564187564
-25.36250083294143
-24.762201014336746
-35.71769761604902
-32.33753165907187
-31.913203988376274
-33.25328392022616
-32.73407024554032
-27.917981706763676
-34.878131976046504
-45.70427124038703
-35.71065315134212
-45.8587657829742
-50.187239973555606
-31.841635005997325
-22.501652910867552
-20.063893302618112
-23.257681189466947
-22.56026584874239
-24.250803257188185
-29.69251939059277
-36.19973924039158
-27.992073514249764
-24.32486806953272
-21.84106052875242
-28.378832199186164
-26.422031489981613
-19.959059689045365
-22.679639079970237
-29.656162409051543
-25.33847765530765
-23.282054947829984
-25.566786207892285
-33.26997806384548
-35.62200679627758
-28.17816316825635
-33.730764176133064
-28.46297981036522
-23.91387745908587
-20.31472216983363
-16.899493917810325
-22.65231504224782
-18.984427852082455
-21.755350311707566
-23.361440743987306
-17.624085959832733
-17.941534685477443
-19.052289922834408
-22.556847572697198
-21.307587876227466
-23.80074402757554
-23.276569706109512
-21.1864449651466
-18.461470447811372
-24.088155499568764
-25.40645329433642
-23.746955009726367
-21.580417567916797
-19.36526836266955
-22.618174026483366
-20.222332662324682
-17.5401189541411
-19.7325917476802
-22.25366785871715
-21.09161841102577
-20.897489003359453
-18.974567501413663
-18.414270972726804
-20.437094838240856
-19.665255050964582
-19.498610552709884
-32.4347320626257
-17.188042095340236
-24.520317088589213
-30.848528359497863
-17.962569662453305
-17.593474675697106
-23.311048681685914
-24.887093655576127
-26.338759207592332
-23.10634435908665
-23.746558541675498
-19.11156329389437
-30.276577355747925
-18.131565994922134
-20.466981614454447
-17.557677174097133
-21.416144608946404
-22.42945458898263
-22.25725888336469
-24.741343605559717
-19.234306772123688
-21.502699798934817
-17.844222132970824
-22.193450303574235
-27.522943412034657
-20.734608359385405
-23.22083675952978
-49.7268756453758
-50.316387345329446
-49.83714473296656
-49.17688834360419
-49.77567522132131
-14.182533585805587
-13.384975617772794
-15.371679422004473
-13.891899259189174
-13.078399336852993
-11.689877320993613
-10.40629866815365
-10.590988449775296
-11.187511710856114
-10.699404391438337
-11.299016068261464
-8.948054514582488
-7.6009282232279265
-9.812052746463724
-7.6331647802079825
-9.990588038608827
-11.308808817142909
-14.673930682603622
-21.155687492192005
-10.2551456581643
-14.460236437489163
-12.313406691919944
-13.2493083046598
-12.582734504218593
-10.197401747365666
-5.208812701405201
-7.017469485197408
-6.031846616563065
-8.71441499272153
-11.95215600950543
-8.410353928522795
-8.327592690998838
-5.439373354800356
-12.394442232227915
-27.66659948948724
-16.838093968757427
-26.208693358875212
-15.486109129210284
-17.564240425969317
-17.23959811092
-16.04148834711414
-7.424796860641504
-7.574708004733665
-8.374715347661533
-10.099487093475474
-8.117528752079668
-8.895503368583991
-16.15593811973578
-14.918601374011807
-19.823062668655528
-13.176943012611968
-11.787692311721155
-8.27071448325512
-10.83272368748242
-10.546354866229864
-10.822758452077135
-9.632229397417156
-14.283801758103968
-14.622417750027324
-16.63389720007538
-14.252782253498086
-12.106427930309948
-13.17368952190395
-10.382466517837265
-8.593917899919186
-8.341010543309778
-12.358234984129313
-10.437531194460895
-15.920114960755544
-17.156717190383212
-15.535071003939581
-10.90063547746989
-10.820305432589691
-14.921240356012788
-13.867640472748148
-14.20031078315193
-13.562450456065102
-13.174301057770835
-15.516887774877546
-16.6214238533029
-17.856618031639137
-13.995438919234763
-13.9918492639568
-13.462096502177403
-13.782174549809977
-15.81476545696465
-19.23055474486078
-16.721742536354455
-18.371669151125573
-19.54678404584893
-20.962653748742536
-21.92505449677977
-20.150160651727457
-18.01599065887942
-17.385312783339558
-18.75484442216188
-19.78934290291277
-19.34178188165078
-20.192165880962822
-21.571657564965964
-29.102871141380515
-14.452906705647992
-13.676531786057728
-17.078332183275275
-12.139745070856558
-15.627002831948143
-21.074845276293402
-22.277023781019544
-24.402402749088747
-25.384059230134838
-21.493854249161604
-20.694732491502627
-21.59250678105503
-19.381287338956593
-21.218798956538112
-14.800212716306316
-7.9354523912413395
-8.101134581126702
-14.030090136930372
-19.297195651119253
-21.180770151806414
-23.31680605026658
-22.639547784601067
-19.18771952159576
-19.418581405301946
-21.367034972981788
-18.510321322006938
-18.222278628288336
-22.415673688219442
-19.581687315022645
-23.395332403966947
-22.29817573266537
-13.314773833341942
diff --git a/examples/src/main/resources/models/catboost/model_clf.cbm b/examples/src/main/resources/models/catboost/model_clf.cbm
deleted file mode 100644
index f915c27..0000000
Binary files a/examples/src/main/resources/models/catboost/model_clf.cbm and /dev/null differ
diff --git a/examples/src/main/resources/models/catboost/model_reg.cbm b/examples/src/main/resources/models/catboost/model_reg.cbm
deleted file mode 100644
index d311a52..0000000
Binary files a/examples/src/main/resources/models/catboost/model_reg.cbm and /dev/null differ
diff --git a/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/thin/JmhThinClientAbstractBenchmark.java b/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/thin/JmhThinClientAbstractBenchmark.java
deleted file mode 100644
index 6b6dc53..0000000
--- a/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/thin/JmhThinClientAbstractBenchmark.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.benchmarks.jmh.thin;
-
-import java.util.stream.IntStream;
-
-import org.apache.ignite.Ignite;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.client.ClientCache;
-import org.apache.ignite.client.IgniteClient;
-import org.apache.ignite.configuration.ClientConfiguration;
-import org.apache.ignite.configuration.IgniteConfiguration;
-import org.apache.ignite.internal.benchmarks.jmh.JmhAbstractBenchmark;
-import org.apache.ignite.internal.util.typedef.internal.A;
-import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
-import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.TearDown;
-
-/**
- * Base class for thin client benchmarks.
- */
-@State(Scope.Benchmark)
-public abstract class JmhThinClientAbstractBenchmark extends JmhAbstractBenchmark {
-    /** Property: nodes count. */
-    protected static final String PROP_DATA_NODES = "ignite.jmh.thin.dataNodes";
-
-    /** Default amount of nodes. */
-    protected static final int DFLT_DATA_NODES = 4;
-
-    /** Items count. */
-    protected static final int CNT = 1000;
-
-    /** Cache value. */
-    protected static final byte[] PAYLOAD = new byte[1000];
-
-    /** IP finder shared across nodes. */
-    private static final TcpDiscoveryVmIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true);
-
-    /** Default cache name. */
-    private static final String DEFAULT_CACHE_NAME = "default";
-
-    /** Target node. */
-    protected Ignite node;
-
-    /** Target cache. */
-    protected ClientCache<Integer, byte[]> cache;
-
-    /** Thin client. */
-    protected IgniteClient client;
-
-    /**
-     * Setup routine. Child classes must invoke this method first.
-     *
-     */
-    @Setup
-    public void setup() {
-        System.out.println();
-        System.out.println("--------------------");
-        System.out.println("IGNITE BENCHMARK INFO: ");
-        System.out.println("\tdata nodes:                 " + intProperty(PROP_DATA_NODES, DFLT_DATA_NODES));
-        System.out.println("--------------------");
-        System.out.println();
-
-        int nodesCnt = intProperty(PROP_DATA_NODES, DFLT_DATA_NODES);
-
-        A.ensure(nodesCnt >= 1, "nodesCnt >= 1");
-
-        node = Ignition.start(configuration("node0"));
-
-        for (int i = 1; i < nodesCnt; i++)
-            Ignition.start(configuration("node" + i));
-
-        String[] addrs = IntStream
-                .range(10800, 10800 + nodesCnt)
-                .mapToObj(p -> "127.0.0.1:" + p)
-                .toArray(String[]::new);
-
-        ClientConfiguration cfg = new ClientConfiguration()
-                .setAddresses(addrs)
-                .setPartitionAwarenessEnabled(true);
-
-        client = Ignition.startClient(cfg);
-
-        cache = client.getOrCreateCache(DEFAULT_CACHE_NAME);
-
-        System.out.println("Loading test data...");
-
-        for (int i = 0; i < CNT; i++)
-            cache.put(i, PAYLOAD);
-
-        System.out.println("Test data loaded: " + CNT);
-    }
-
-    /**
-     * Tear down routine.
-     *
-     */
-    @TearDown
-    public void tearDown() throws Exception {
-        client.close();
-        Ignition.stopAll(true);
-    }
-
-    /**
-     * Create Ignite configuration.
-     *
-     * @param igniteInstanceName Ignite instance name.
-     * @return Configuration.
-     */
-    protected IgniteConfiguration configuration(String igniteInstanceName) {
-
-        return new IgniteConfiguration()
-                .setIgniteInstanceName(igniteInstanceName)
-                .setLocalHost("127.0.0.1")
-                .setDiscoverySpi(new TcpDiscoverySpi().setIpFinder(IP_FINDER));
-    }
-}
diff --git a/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/thin/JmhThinClientCacheBenchmark.java b/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/thin/JmhThinClientCacheBenchmark.java
deleted file mode 100644
index 88e6a87..0000000
--- a/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/thin/JmhThinClientCacheBenchmark.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.benchmarks.jmh.thin;
-
-import java.util.concurrent.ThreadLocalRandom;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.ignite.internal.benchmarks.jmh.runner.JmhIdeBenchmarkRunner;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.Mode;
-
-/**
- * Thin client cache benchmark.
- *
- * Results on i7-9700K, Ubuntu 20.04.1, JDK 1.8.0_275:
- * Benchmark                         Mode  Cnt      Score      Error  Units
- * JmhThinClientCacheBenchmark.get  thrpt   10  92501.557 ± 1380.384  ops/s
- * JmhThinClientCacheBenchmark.put  thrpt   10  82907.446 ± 7572.537  ops/s
- *
- * JmhThinClientCacheBenchmark.get  avgt    10  41.505 ± 1.018        us/op
- * JmhThinClientCacheBenchmark.put  avgt    10  44.623 ± 0.779        us/op
- */
-public class JmhThinClientCacheBenchmark extends JmhThinClientAbstractBenchmark {
-    /**
-     * Cache put benchmark.
-     */
-    @Benchmark
-    public void put() {
-        int key = ThreadLocalRandom.current().nextInt(CNT);
-
-        cache.put(key, PAYLOAD);
-    }
-
-    /**
-     * Cache get benchmark.
-     */
-    @Benchmark
-    public Object get() {
-        int key = ThreadLocalRandom.current().nextInt(CNT);
-
-        return cache.get(key);
-    }
-
-    /**
-     * Run benchmarks.
-     *
-     * @param args Arguments.
-     * @throws Exception If failed.
-     */
-    public static void main(String[] args) throws Exception {
-        JmhIdeBenchmarkRunner runner = JmhIdeBenchmarkRunner.create()
-                .forks(1)
-                .threads(4)
-                .benchmarks(JmhThinClientCacheBenchmark.class.getSimpleName())
-                .jvmArguments("-Xms4g", "-Xmx4g");
-
-        runner
-                .benchmarkModes(Mode.Throughput)
-                .run();
-
-        runner
-                .benchmarkModes(Mode.AverageTime)
-                .outputTimeUnit(TimeUnit.MICROSECONDS)
-                .run();
-    }
-}
diff --git a/modules/clients/src/test/java/org/apache/ignite/common/ClientSideCacheCreationDestructionWileTopologyChangeTest.java b/modules/clients/src/test/java/org/apache/ignite/common/ClientSideCacheCreationDestructionWileTopologyChangeTest.java
deleted file mode 100644
index 3f0f622..0000000
--- a/modules/clients/src/test/java/org/apache/ignite/common/ClientSideCacheCreationDestructionWileTopologyChangeTest.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.common;
-
-import java.util.UUID;
-import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.ignite.internal.IgniteInternalFuture;
-import org.apache.ignite.testframework.GridTestUtils;
-
-/**
- * {@inheritDoc} With topology events in parallel
- */
-public class ClientSideCacheCreationDestructionWileTopologyChangeTest extends ClientSizeCacheCreationDestructionTest {
-    /** **/
-    private static final int MAX_NODES_CNT = 10;
-
-    /** **/
-    IgniteInternalFuture topChangeProcFut;
-
-    /** **/
-    AtomicBoolean procTopChanges = new AtomicBoolean(true);
-
-    /** {@inheritDoc} */
-    @Override protected void beforeTest() throws Exception {
-        super.beforeTest();
-
-        topChangeProcFut = asyncTopologyChanges();
-    }
-
-    /** {@inheritDoc} */
-    @Override protected void afterTest() throws Exception {
-        procTopChanges.set(false);
-
-        topChangeProcFut.get();
-
-        super.afterTest();
-    }
-
-    /**
-     * @return {@code IgniteInternalFuture} to wait for topology process to stop in {@code afterTest()}.
-     */
-    private IgniteInternalFuture asyncTopologyChanges() {
-        return GridTestUtils.runAsync(() -> {
-            while (procTopChanges.get()) {
-                try {
-                    if (srv.cluster().nodes().size() < MAX_NODES_CNT)
-                        startGrid(UUID.randomUUID().toString());
-                }
-                catch (Exception e) {
-                    fail("Unable to add or remove node: " + e);
-                }
-            }
-        });
-    }
-}
diff --git a/modules/clients/src/test/java/org/apache/ignite/common/ClientSizeCacheCreationDestructionTest.java b/modules/clients/src/test/java/org/apache/ignite/common/ClientSizeCacheCreationDestructionTest.java
deleted file mode 100644
index ab657de..0000000
--- a/modules/clients/src/test/java/org/apache/ignite/common/ClientSizeCacheCreationDestructionTest.java
+++ /dev/null
@@ -1,1243 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.common;
-
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.Serializable;
-import java.net.URL;
-import java.net.URLConnection;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Map;
-import java.util.concurrent.CountDownLatch;
-import javax.cache.CacheException;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.Ignition;
-import org.apache.ignite.cache.CacheAtomicityMode;
-import org.apache.ignite.cache.CacheMode;
-import org.apache.ignite.client.ClientCacheConfiguration;
-import org.apache.ignite.client.ClientException;
-import org.apache.ignite.client.IgniteClient;
-import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.configuration.ClientConfiguration;
-import org.apache.ignite.configuration.ConnectorConfiguration;
-import org.apache.ignite.configuration.IgniteConfiguration;
-import org.apache.ignite.internal.IgniteInternalFuture;
-import org.apache.ignite.internal.jdbc.thin.JdbcThinConnection;
-import org.apache.ignite.testframework.GridTestUtils;
-import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
-import org.junit.Test;
-
-/**
- * Tests for cache creation and destruction from servers and clients: thin, thick, jdbc and rest.
- * Including simultaneous operations. Mainly within same cache group.
- */
-@SuppressWarnings({"ThrowableNotThrown", "unchecked"})
-public class ClientSizeCacheCreationDestructionTest extends GridCommonAbstractTest {
-    /** **/
-    private static final String CACHE_NAME = "CacheName";
-
-    /** **/
-    private static final String ANOTHER_CACHE_NAME = "AnotherCacheName";
-
-    /** **/
-    private static final String CLIENT_CACHE_NAME = "ClientCacheName";
-
-    /** **/
-    private static final String CACHE_GROUP_NAME = "CacheGroupName";
-
-    /** **/
-    protected Ignite srv;
-
-    /** **/
-    private Ignite thickClient;
-
-    /** **/
-    private IgniteClient thinClient;
-
-    /** **/
-    private Connection jdbcConn;
-
-    /** {@inheritDoc} */
-    @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
-        IgniteConfiguration configuration = super.getConfiguration(igniteInstanceName);
-
-        configuration.setConnectorConfiguration(new ConnectorConfiguration());
-
-        return configuration;
-    }
-
-    /** {@inheritDoc} */
-    @Override protected void beforeTest() throws Exception {
-        super.beforeTest();
-
-        srv = startGrid("server");
-
-        thickClient = startClientGrid(1);
-
-        thinClient = Ignition.startClient(new ClientConfiguration().setAddresses("127.0.0.1:10800"));
-
-        jdbcConn = DriverManager.getConnection("jdbc:ignite:thin://127.0.0.1:10800");
-    }
-
-    /** {@inheritDoc} */
-    @Override protected void afterTest() throws Exception {
-        super.afterTest();
-
-        if (thickClient != null)
-            thickClient.close();
-
-        if (thinClient != null)
-            thinClient.close();
-
-        if (jdbcConn != null)
-            jdbcConn.close();
-
-        stopAllGrids();
-    }
-
-    /**
-     * Direct scenario:
-     * <ol>
-     *     <li>Start server node, create cache in cache group.</li>
-     *     <li>Start client node and create cache in same cache group.</li>
-     *     <li>Assert no exception, cache successfully created, value may be inserted into this cache.</li>
-     * </ol>
-     *
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testServerThenClientCacheCreation() throws Exception {
-        createCache(srv, cacheConfig());
-
-        createCache(thickClient, cacheConfig().setName(CLIENT_CACHE_NAME));
-
-        IgniteCache cache = srv.cache(CLIENT_CACHE_NAME);
-
-        cache.put(1L, "abc");
-
-        assertEquals("abc", cache.get(1L));
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches in 4 different cache groups: each cache
-     *     in corresponding cache group.</li>
-     *     <li>Start <b>Thick</b> client node, create 1 new cache in each created cache group.</li>
-     *     <li>Assert that 4 cache groups exist with 2 caches each.</li>
-     *     <li>Try to insert and get some data from caches.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithinFourCacheGroupsThickClient() throws Exception {
-        for (int i = 0; i < 4; i++)
-            createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME + i).setName(CACHE_NAME + i));
-
-        for (int i = 0; i < 4; i++)
-            createCache(thickClient, cacheConfig().setGroupName(CACHE_GROUP_NAME + i).setName(CLIENT_CACHE_NAME + i));
-
-        // Assertions.
-        assertEquals(8, srv.cacheNames().size());
-
-        for (int i = 0; i < 4; i++) {
-            assertEquals(CACHE_GROUP_NAME + i,
-                srv.cache(CACHE_NAME + i).getConfiguration(CacheConfiguration.class).getGroupName());
-
-            assertEquals(CACHE_GROUP_NAME + i,
-                srv.cache(CLIENT_CACHE_NAME + i).getConfiguration(CacheConfiguration.class).getGroupName());
-
-            srv.cache(CACHE_NAME + i).put(1, "abc_srv" + i);
-            assertEquals("abc_srv" + i, srv.cache(CACHE_NAME + i).get(1));
-
-            srv.cache(CLIENT_CACHE_NAME + i).put(1, "abc_cli" + i);
-            assertEquals("abc_cli" + i, srv.cache(CLIENT_CACHE_NAME + i).get(1));
-        }
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches in 4 different cache groups: each cache
-     *     in corresponding cache group.</li>
-     *     <li>Start <b>Thin</b> client node, create 1 new cache in each created cache group.</li>
-     *     <li>Assert that 4 cache groups exist with 2 caches each.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithinFourCacheGroupsThinClient() throws Exception {
-        for (int i = 0; i < 4; i++)
-            createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME + i).setName(CACHE_NAME + i));
-
-        for (int i = 0; i < 4; i++) {
-            createCache(thinClient, clientCacheConfig().setGroupName(CACHE_GROUP_NAME + i).
-                setName(CLIENT_CACHE_NAME + i));
-        }
-
-        // Assertions.
-        assertEquals(8, srv.cacheNames().size());
-
-        for (int i = 0; i < 4; i++) {
-            assertEquals(CACHE_GROUP_NAME + i,
-                srv.cache(CACHE_NAME + i).getConfiguration(CacheConfiguration.class).getGroupName());
-
-            assertEquals(CACHE_GROUP_NAME + i,
-                srv.cache(CLIENT_CACHE_NAME + i).getConfiguration(CacheConfiguration.class).getGroupName());
-
-            srv.cache(CACHE_NAME + i).put(1, "abc_srv" + i);
-            assertEquals("abc_srv" + i, srv.cache(CACHE_NAME + i).get(1));
-
-            srv.cache(CLIENT_CACHE_NAME + i).put(1, "abc_cli" + i);
-            assertEquals("abc_cli" + i, srv.cache(CLIENT_CACHE_NAME + i).get(1));
-        }
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches in 4 different cache groups: each cache
-     *     in corresponding cache group.</li>
-     *     <li>Start <b>Jdbc Thin</b> client node, create 1 new cache in each created cache group.</li>
-     *     <li>Assert that 4 cache groups exist with 2 caches each.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithinFourCacheGroupsJdbcThinClient() throws Exception {
-        for (int i = 0; i < 4; i++)
-            createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME + i).setName(CACHE_NAME + i));
-
-        for (int i = 0; i < 4; i++)
-            createCache(jdbcConn, cacheConfig().setGroupName(CACHE_GROUP_NAME + i).setName(CLIENT_CACHE_NAME + i));
-
-        // Assertions.
-        assertEquals(8, srv.cacheNames().size());
-
-        for (int i = 0; i < 4; i++) {
-            assertEquals(CACHE_GROUP_NAME + i,
-                srv.cache(CACHE_NAME + i).getConfiguration(CacheConfiguration.class).getGroupName());
-
-            assertEquals(CACHE_GROUP_NAME + i,
-                srv.cache("SQL_PUBLIC_" + CLIENT_CACHE_NAME.toUpperCase() + i).
-                    getConfiguration(CacheConfiguration.class).getGroupName());
-
-            srv.cache(CACHE_NAME + i).put(1, "abc_srv" + i);
-            assertEquals("abc_srv" + i, srv.cache(CACHE_NAME + i).get(1));
-
-            srv.cache("SQL_PUBLIC_" + CLIENT_CACHE_NAME.toUpperCase() + i).put(1, "abc_cli" + i);
-            assertEquals("abc_cli" + i,
-                srv.cache("SQL_PUBLIC_" + CLIENT_CACHE_NAME.toUpperCase() + i).get(1));
-        }
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches in 4 different cache groups: each cache
-     *     in corresponding cache group.</li>
-     *     <li>Start <b>Rest</b> client node, create 1 new cache in each created cache group.</li>
-     *     <li>Assert that 4 cache groups exist with 2 caches each.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithinFourCacheGroupsRestClient() throws Exception {
-        for (int i = 0; i < 4; i++)
-            createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME + i).setName(CACHE_NAME + i));
-
-        for (int i = 0; i < 4; i++)
-            createCacheWithRestClient(cacheConfig().setGroupName(CACHE_GROUP_NAME + i).setName(CLIENT_CACHE_NAME + i));
-
-        // Assertions.
-        assertEquals(8, srv.cacheNames().size());
-
-        for (int i = 0; i < 4; i++) {
-            assertEquals(CACHE_GROUP_NAME + i,
-                srv.cache(CACHE_NAME + i).getConfiguration(CacheConfiguration.class).getGroupName());
-
-            assertEquals(CACHE_GROUP_NAME + i,
-                srv.cache(CLIENT_CACHE_NAME + i).getConfiguration(CacheConfiguration.class).getGroupName());
-
-            srv.cache(CACHE_NAME + i).put(1, "abc_srv" + i);
-            assertEquals("abc_srv" + i, srv.cache(CACHE_NAME + i).get(1));
-
-            srv.cache(CLIENT_CACHE_NAME + i).put(1, "abc_cli" + i);
-            assertEquals("abc_cli" + i, srv.cache(CLIENT_CACHE_NAME + i).get(1));
-        }
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches in 2 different cache groups in server node (2+2).</li>
-     *     <li>Start <b>Thick</b> client node, create 2 new caches in each created cache group.</li>
-     *     <li>Assert that 2 cache groups exist with 4 caches each.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithinTwoCacheGroupsThickClient() throws Exception {
-        for (int i = 0; i < 4; i++)
-            createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME + (i % 2)).setName(CACHE_NAME + i));
-
-        for (int i = 0; i < 4; i++) {
-            createCache(thickClient, cacheConfig().setGroupName(CACHE_GROUP_NAME + (i % 2)).
-                setName(CLIENT_CACHE_NAME + i));
-        }
-
-        // Assertions.
-        assertEquals(8, srv.cacheNames().size());
-
-        for (int i = 0; i < 4; i++) {
-            assertEquals(CACHE_GROUP_NAME + (i % 2),
-                srv.cache(CACHE_NAME + i).getConfiguration(CacheConfiguration.class).getGroupName());
-
-            assertEquals(CACHE_GROUP_NAME + (i % 2),
-                srv.cache(CLIENT_CACHE_NAME + i).getConfiguration(CacheConfiguration.class).getGroupName());
-
-            srv.cache(CACHE_NAME + i).put(1, "abc_srv" + i);
-            assertEquals("abc_srv" + i, srv.cache(CACHE_NAME + i).get(1));
-
-            srv.cache(CLIENT_CACHE_NAME + i).put(1, "abc_cli" + i);
-            assertEquals("abc_cli" + i, srv.cache(CLIENT_CACHE_NAME + i).get(1));
-        }
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches in 2 different cache groups in server node (2+2).</li>
-     *     <li>Start <b>Thin</b> client node, create 2 new caches in each created cache group.</li>
-     *     <li>Assert that 2 cache groups exist with 4 caches each.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithinTwoCacheGroupsThinClient() throws Exception {
-        for (int i = 0; i < 4; i++)
-            createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME + (i % 2)).setName(CACHE_NAME + i));
-
-        for (int i = 0; i < 4; i++) {
-            createCache(thinClient, clientCacheConfig().setGroupName(CACHE_GROUP_NAME + (i % 2)).
-                setName(CLIENT_CACHE_NAME + i));
-        }
-
-        // Assertions.
-        assertEquals(8, srv.cacheNames().size());
-
-        for (int i = 0; i < 4; i++) {
-            assertEquals(CACHE_GROUP_NAME + (i % 2),
-                srv.cache(CACHE_NAME + i).getConfiguration(CacheConfiguration.class).getGroupName());
-
-            assertEquals(CACHE_GROUP_NAME + (i % 2),
-                srv.cache(CLIENT_CACHE_NAME + i).getConfiguration(CacheConfiguration.class).getGroupName());
-
-            srv.cache(CACHE_NAME + i).put(1, "abc_srv" + i);
-            assertEquals("abc_srv" + i, srv.cache(CACHE_NAME + i).get(1));
-
-            srv.cache(CLIENT_CACHE_NAME + i).put(1, "abc_cli" + i);
-            assertEquals("abc_cli" + i, srv.cache(CLIENT_CACHE_NAME + i).get(1));
-        }
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches in 2 different cache groups in server node (2+2).</li>
-     *     <li>Start <b>Jdbc Thin</b> client node, create 2 new caches in each created cache group.</li>
-     *     <li>Assert that 2 cache groups exist with 4 caches each.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithinTwoCacheGroupsJdbcThinClient() throws Exception {
-        for (int i = 0; i < 4; i++)
-            createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME + (i % 2)).setName(CACHE_NAME + i));
-
-        for (int i = 0; i < 4; i++) {
-            createCache(jdbcConn, cacheConfig().setGroupName(CACHE_GROUP_NAME + (i % 2)).
-                setName(CLIENT_CACHE_NAME + i));
-        }
-        // Assertions.
-        assertEquals(8, srv.cacheNames().size());
-
-        for (int i = 0; i < 4; i++) {
-            assertEquals(CACHE_GROUP_NAME + (i % 2),
-                srv.cache(CACHE_NAME + i).getConfiguration(CacheConfiguration.class).getGroupName());
-
-            assertEquals(CACHE_GROUP_NAME + (i % 2),
-                srv.cache("SQL_PUBLIC_" + CLIENT_CACHE_NAME.toUpperCase() + i).
-                    getConfiguration(CacheConfiguration.class).getGroupName());
-
-            srv.cache(CACHE_NAME + i).put(1, "abc_srv" + i);
-            assertEquals("abc_srv" + i, srv.cache(CACHE_NAME + i).get(1));
-
-            srv.cache("SQL_PUBLIC_" + CLIENT_CACHE_NAME.toUpperCase() + i).put(1, "abc_cli" + i);
-            assertEquals("abc_cli" + i,
-                srv.cache("SQL_PUBLIC_" + CLIENT_CACHE_NAME.toUpperCase() + i).get(1));
-        }
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches in 2 different cache groups in server node (2+2).</li>
-     *     <li>Start <b>Rest</b> client node, create 2 new caches in each created cache group.</li>
-     *     <li>Assert that 2 cache groups exist with 4 caches each.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithinTwoCacheGroupsRestClient() throws Exception {
-        for (int i = 0; i < 4; i++)
-            createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME + (i % 2)).setName(CACHE_NAME + i));
-
-        for (int i = 0; i < 4; i++) {
-            createCacheWithRestClient(cacheConfig().setGroupName(CACHE_GROUP_NAME + (i % 2)).
-                setName(CLIENT_CACHE_NAME + i));
-        }
-        // Assertions.
-        assertEquals(8, srv.cacheNames().size());
-
-        for (int i = 0; i < 4; i++) {
-            assertEquals(CACHE_GROUP_NAME + (i % 2),
-                srv.cache(CACHE_NAME + i).getConfiguration(CacheConfiguration.class).getGroupName());
-
-            assertEquals(CACHE_GROUP_NAME + (i % 2),
-                srv.cache(CLIENT_CACHE_NAME + i).getConfiguration(CacheConfiguration.class).getGroupName());
-
-            srv.cache(CACHE_NAME + i).put(1, "abc_srv" + i);
-            assertEquals("abc_srv" + i, srv.cache(CACHE_NAME + i).get(1));
-
-            srv.cache(CLIENT_CACHE_NAME + i).put(1, "abc_cli" + i);
-            assertEquals("abc_cli" + i, srv.cache(CLIENT_CACHE_NAME + i).get(1));
-        }
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches without cache groups.</li>
-     *     <li>Start <b>Thick</b> client node, try to create cache with
-     *     cache group with a name == first cache name.</li>
-     *     <li>{@code CacheException} expected with message:
-     *     'Failed to start cache. Cache group name conflict with existing cache (change group name)'.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithCacheGroupNameEqualsFirstCacheNameThickClient() throws Exception {
-        for (int i = 0; i < 4; i++)
-            createCache(srv, cacheConfigWithoutCacheGroup().setName(CACHE_NAME + i));
-
-        GridTestUtils.assertThrows(
-            null,
-            () -> {
-                createCache(thickClient, cacheConfig().setGroupName(CACHE_NAME + 0).setName(CLIENT_CACHE_NAME));
-
-                return null;
-            },
-            CacheException.class,
-            "Failed to start cache. Cache group name conflict with existing cache (change group name)");
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches without cache groups.</li>
-     *     <li>Start <b>Thin</b> client node, try to create cache with cache group with a name == first cache name.</li>
-     *     <li>{@code ClientException} expected with message:
-     *     'Failed to start cache. Cache group name conflict with existing cache (change group name)'.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithCacheGroupNameEqualsFirstCacheNameThinClient() throws Exception {
-        for (int i = 0; i < 4; i++)
-            createCache(srv, cacheConfigWithoutCacheGroup().setName(CACHE_NAME + i));
-
-        GridTestUtils.assertThrows(
-            null,
-            () -> {
-                createCache(thinClient, clientCacheConfig().setGroupName(CACHE_NAME + 0).setName(CLIENT_CACHE_NAME));
-
-                return null;
-            },
-            ClientException.class,
-            "Failed to start cache. Cache group name conflict with existing cache (change group name)");
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches without cache groups.</li>
-     *     <li>Start <b>Jdbc Thin</b> client node, try to create cache
-     *     with cache group with a name == first cache name.</li>
-     *     <li>{@code SQLException} expected with message:
-     *     'Failed to start cache. Cache group name conflict with existing cache (change group name)'.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithCacheGroupNameEqualsFirstCacheNameJdbcThinClient() throws Exception {
-        for (int i = 0; i < 4; i++)
-            createCache(srv, cacheConfigWithoutCacheGroup().setName(CACHE_NAME + i));
-
-        GridTestUtils.assertThrows(
-            null,
-            () -> {
-                createCache(jdbcConn, cacheConfig().setGroupName(CACHE_NAME + 0).setName(CLIENT_CACHE_NAME));
-
-                return null;
-            },
-            SQLException.class,
-            "Failed to start cache. Cache group name conflict with existing cache (change group name)");
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches without cache groups.</li>
-     *     <li>Start <b>Rest</b> client node, try to create cache with cache group with a name == first cache name.</li>
-     *     <li>{@code Exception} expected.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithCacheGroupNameEqualsFirstCacheNameRestClient() throws Exception {
-        for (int i = 0; i < 4; i++)
-            createCache(srv, cacheConfigWithoutCacheGroup().setName(CACHE_NAME + i));
-
-        GridTestUtils.assertThrows(
-            null,
-            () -> {
-                createCacheWithRestClient(cacheConfig().setGroupName(CACHE_NAME + 0).setName(CLIENT_CACHE_NAME));
-                return null;
-            },
-            AssertionError.class,
-            "expected:<0> but was:<1>");
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches with cache groups.</li>
-     *     <li>Start <b>Thick</b> client node, try to create extra cache within same cache group but with different
-     *     config.</li>
-     *     <li>{@code CacheException} expected
-     *     with message 'Backups mismatch for caches related to the same group'.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithDifferentConfigThickClient() throws Exception {
-        createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME).setName(CACHE_NAME).setBackups(1));
-
-        GridTestUtils.assertThrows(
-            null,
-            () -> {
-                createCache(thickClient, cacheConfig().setGroupName(CACHE_GROUP_NAME).setName(CLIENT_CACHE_NAME).
-                    setBackups(2));
-
-                return null;
-            },
-            CacheException.class,
-            "Backups mismatch for caches related to the same group");
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches with cache groups.</li>
-     *     <li>Start <b>Thin</b> client node, try to create extra cache within same cache group but with different
-     *     config.</li>
-     *     <li>{@code ClientException} expected
-     *     with message 'Backups mismatch for caches related to the same group'.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithDifferentConfigThinClient() throws Exception {
-        createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME).setName(CACHE_NAME).setBackups(1));
-
-        GridTestUtils.assertThrows(
-            null,
-            () -> {
-                createCache(thinClient, clientCacheConfig().setGroupName(CACHE_GROUP_NAME).setName(CLIENT_CACHE_NAME).
-                    setBackups(2));
-
-                return null;
-            },
-            ClientException.class,
-            "Backups mismatch for caches related to the same group");
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches with cache groups.</li>
-     *     <li>Start <b>Jdbc Thin</b> client node, try to create extra cache within same cache group but with different
-     *     config.</li>
-     *     <li>{@code SQLException} expected with message 'Backups mismatch for caches related to the same group'.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithDifferentConfigJdbcThinClient() throws Exception {
-        createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME).setName(CACHE_NAME).setBackups(1));
-
-        GridTestUtils.assertThrows(
-            null,
-            () -> {
-                createCache(jdbcConn, cacheConfig().setGroupName(CACHE_GROUP_NAME).setName(CLIENT_CACHE_NAME).
-                    setBackups(2));
-
-                return null;
-            },
-            SQLException.class,
-            "Backups mismatch for caches related to the same group");
-    }
-
-    /**
-     * Few caches created in chain:
-     * <ol>
-     *     <li>Start server node, create 4 different caches with cache groups.</li>
-     *     <li>Start <b>Rest</b> client node, try to create extra cache within same cache group but with different
-     *     config.</li>
-     *     <li>Exception is expected.</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testFewCachesCreatedInChainWithDifferentConfigRestClient() throws Exception {
-        createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME).setName(CACHE_NAME).setBackups(1));
-
-        GridTestUtils.assertThrows(
-            null,
-            () -> {
-                createCacheWithRestClient(cacheConfig().setGroupName(CACHE_GROUP_NAME).setName(CLIENT_CACHE_NAME).
-                    setBackups(2));
-
-                return null;
-            },
-            AssertionError.class,
-            "expected:<0> but was:<1>");
-    }
-
-    /**
-     * Destroy caches:
-     * <ol>
-     *     <li>Start server node, create 2 caches in single cache group.</li>
-     *     <li>Start <b>Thick</b> client and try to destroy 2 caches at the same time from client and from server.</li>
-     *     <li>Assert that operation completed successfully, both caches are destroyed and cache group is no longer
-     *     exists (for example create cache with same name as deleted cache group)</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testDestroyCachesThickClient() throws Exception {
-        for (int i = 0; i < 2; i++)
-            createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME).setName(CACHE_NAME + i));
-
-        CountDownLatch latch = new CountDownLatch(1);
-
-        IgniteInternalFuture srv = GridTestUtils.runAsync(() -> {
-            try {
-                latch.await();
-            }
-            catch (InterruptedException e) {
-                fail(e.toString());
-            }
-            this.srv.destroyCache(CACHE_NAME + 0);
-        });
-
-        IgniteInternalFuture client = GridTestUtils.runAsync(() -> {
-            try {
-                latch.await();
-            }
-            catch (InterruptedException e) {
-                fail(e.toString());
-            }
-            thickClient.destroyCache(CACHE_NAME + 1);
-        });
-
-        latch.countDown();
-
-        srv.get();
-
-        client.get();
-
-        assertEquals(0, this.srv.cacheNames().size());
-
-        this.srv.createCache(CACHE_GROUP_NAME);
-    }
-
-    /**
-     * Destroy caches:
-     * <ol>
-     *     <li>Start server node, create 2 caches in single cache group.</li>
-     *     <li>Start <b>Thin</b> client and try to destroy 2 caches at the same time from client and from server.</li>
-     *     <li>Assert that operation completed successfully, both caches are destroyed and cache group is no longer
-     *     exists (for example create cache with same name as deleted cache group)</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testDestroyCachesThinClient() throws Exception {
-        for (int i = 0; i < 2; i++)
-            createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME).setName(CACHE_NAME + i));
-
-        CountDownLatch latch = new CountDownLatch(1);
-
-        IgniteInternalFuture srv = GridTestUtils.runAsync(() -> {
-            try {
-                latch.await();
-            }
-            catch (InterruptedException e) {
-                fail(e.toString());
-            }
-            this.srv.destroyCache(CACHE_NAME + 0);
-        });
-
-        IgniteInternalFuture client = GridTestUtils.runAsync(() -> {
-            try {
-                latch.await();
-            }
-            catch (InterruptedException e) {
-                fail(e.toString());
-            }
-            thinClient.destroyCache(CACHE_NAME + 1);
-        });
-
-        latch.countDown();
-
-        srv.get();
-
-        client.get();
-
-        assertEquals(0, this.srv.cacheNames().size());
-
-        this.srv.createCache(CACHE_GROUP_NAME);
-    }
-
-    /**
-     * Destroy caches:
-     * <ol>
-     *     <li>Start server node, create 2 caches in single cache group.</li>
-     *     <li>Start <b>Rest</b> client and try to destroy 2 caches at the same time from client and from server.</li>
-     *     <li>Assert that operation completed successfully, both caches are destroyed and cache group is no longer
-     *     exists (for example create cache with same name as deleted cache group)</li>
-     * </ol>
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testDestroyCachesRestClient() throws Exception {
-        for (int i = 0; i < 2; i++)
-            createCache(srv, cacheConfig().setGroupName(CACHE_GROUP_NAME).setName(CACHE_NAME + i));
-
-        CountDownLatch latch = new CountDownLatch(1);
-
-        IgniteInternalFuture srv = GridTestUtils.runAsync(() -> {
-            try {
-                latch.await();
-            }
-            catch (InterruptedException e) {
-                fail(e.toString());
-            }
-            this.srv.destroyCache(CACHE_NAME + 0);
-        });
-
-        IgniteInternalFuture client = GridTestUtils.runAsync(() -> {
-            try {
-                latch.await();
-            }
-            catch (InterruptedException e) {
-                fail(e.toString());
-            }
-
-            URLConnection conn = null;
-            try {
-                conn = new URL("http://localhost:8080/ignite?cmd=destcache&cacheName=" + CACHE_NAME + "1").
-                    openConnection();
-            }
-            catch (IOException e) {
-                fail(e.toString());
-            }
-
-            try {
-                conn.connect();
-
-                try (InputStreamReader streamReader = new InputStreamReader(conn.getInputStream())) {
-                    ObjectMapper objMapper = new ObjectMapper();
-                    Map<String, Object> myMap = objMapper.readValue(streamReader,
-                        new TypeReference<Map<String, Object>>() {
-                        });
-
-                    log.info("Version command response is: " + myMap);
-
-                    assertTrue(myMap.containsKey("response"));
-                    assertEquals(0, myMap.get("successStatus"));
-                }
-            }
-            catch (IOException e) {
-                fail(e.toString());
-            }
-
-        });
-
-        latch.countDown();
-
-        srv.get();
-
-        client.get();
-
-        assertEquals(0, this.srv.cacheNames().size());
-
-        this.srv.createCache(CACHE_GROUP_NAME);
-    }
-
-    /**
-     * Create and destroy caches:
-     * <p>
-     *     <b>Prerequisites:</b>
-     *     Start server node, create 1 cache in a single cache group.
-     * <p>
-     *     <b>Steps:</b>
-     *     <ol>
-     *         <li>Start <b>Thick</b> client.</li>
-     *         <li>Create new cache with an existing cache group on server side.</li>
-     *         <li>Destroy newly created cache through client.</li>
-     *     </ol>
-     * <p>
-     *      <b>Expected:</b>
-     *      Only one cache, initially created within server node is expected.
-     */
-    @Test
-    public void testCreateOnSrvDestroyOnThickClient() {
-        srv.createCache(cacheConfig().setName(ANOTHER_CACHE_NAME));
-
-        srv.createCache(cacheConfig());
-
-        thickClient.destroyCache(CACHE_NAME);
-
-        assertEquals(1, srv.cacheNames().size());
-
-        assertEquals(ANOTHER_CACHE_NAME, srv.cacheNames().iterator().next());
-    }
-
-    /**
-     * Create and destroy caches:
-     * <p>
-     *     <b>Prerequisites:</b>
-     *     Start server node, create 1 cache in a single cache group.
-     * <p>
-     *     <b>Steps:</b>
-     *     <ol>
-     *         <li>Start <b>Thin</b> client.</li>
-     *         <li>Create new cache with an existing cache group on server side.</li>
-     *         <li>Destroy newly created cache through client.</li>
-     *     </ol>
-     * <p>
-     *      <b>Expected:</b>
-     *      Only one cache, initially created within server node is expected.
-     */
-    @Test
-    public void testCreateOnSrvDestroyOnThinClient() {
-        srv.createCache(cacheConfig().setName(ANOTHER_CACHE_NAME));
-
-        srv.createCache(cacheConfig());
-
-        thinClient.destroyCache(CACHE_NAME);
-
-        assertEquals(1, srv.cacheNames().size());
-
-        assertEquals(ANOTHER_CACHE_NAME, srv.cacheNames().iterator().next());
-    }
-
-    /**
-     * Create and destroy caches:
-     * <p>
-     *     <b>Prerequisites:</b>
-     *     Start server node, create 1 cache in a single cache group.
-     * <p>
-     *     <b>Steps:</b>
-     *     <ol>
-     *         <li>Start <b>Rest</b> client.</li>
-     *         <li>Create new cache with an existing cache group on server side.</li>
-     *         <li>Destroy newly created cache through client.</li>
-     *     </ol>
-     * <p>
-     *      <b>Expected:</b>
-     *      Only one cache, initially created within server node is expected.
-     */
-    @Test
-    public void testCreateOnSrvDestroyOnRestClient() throws Exception {
-        srv.createCache(cacheConfig().setName(ANOTHER_CACHE_NAME));
-
-        srv.createCache(cacheConfig());
-
-        destroyCacheWithRestClient(CACHE_NAME);
-
-        assertEquals(1, srv.cacheNames().size());
-
-        assertEquals(ANOTHER_CACHE_NAME, srv.cacheNames().iterator().next());
-    }
-
-    /**
-     * Create and destroy caches:
-     * <p>
-     *     <b>Prerequisites:</b>
-     *     Start server node, create 1 cache in a single cache group.
-     * <p>
-     *     <b>Steps:</b>
-     *     <ol>
-     *         <li>Start <b>Thick</b> client.</li>
-     *         <li>Create new cache with an existing cache group on client side.</li>
-     *         <li>Destroy newly created cache through server node.</li>
-     *     </ol>
-     * <p>
-     *      <b>Expected:</b>
-     *      Only one cache, initially created within server node is expected.
-     */
-    @Test
-    public void testCreateOnThickClientDestroyOnSrv() {
-        srv.createCache(cacheConfig().setName(ANOTHER_CACHE_NAME));
-
-        thickClient.createCache(cacheConfig());
-
-        srv.destroyCache(CACHE_NAME);
-
-        assertEquals(1, srv.cacheNames().size());
-
-        assertEquals(ANOTHER_CACHE_NAME, srv.cacheNames().iterator().next());
-    }
-
-    /**
-     * Create and destroy caches:
-     * <p>
-     *     <b>Prerequisites:</b>
-     *     Start server node, create 1 cache in a single cache group.
-     * <p>
-     *     <b>Steps:</b>
-     *     <ol>
-     *         <li>Start <b>Thin</b> client.</li>
-     *         <li>Create new cache with an existing cache group on client side.</li>
-     *         <li>Destroy newly created cache through server node.</li>
-     *     </ol>
-     * <p>
-     *      <b>Expected:</b>
-     *      Only one cache, initially created within server node is expected.
-     */
-    @Test
-    public void testCreateOnThinClientSrvDestroyOnSrv() {
-        srv.createCache(cacheConfig().setName(ANOTHER_CACHE_NAME));
-
-        thinClient.createCache(clientCacheConfig());
-
-        srv.destroyCache(CACHE_NAME);
-
-        assertEquals(1, srv.cacheNames().size());
-
-        assertEquals(ANOTHER_CACHE_NAME, srv.cacheNames().iterator().next());
-    }
-
-    /**
-     * Create and destroy caches:
-     * <p>
-     *     <b>Prerequisites:</b>
-     *     Start server node, create 1 cache in a single cache group.
-     * <p>
-     *     <b>Steps:</b>
-     *     <ol>
-     *         <li>Start <b>jdbc</b> client.</li>
-     *         <li>Create new cache with an existing cache group on client side.</li>
-     *         <li>Destroy newly created cache through server node.</li>
-     *     </ol>
-     * <p>
-     *      <b>Expected:</b> Only one cache, initially created within server node is expected.
-     */
-    @Test
-    public void testCreateOnJdbcClientDestroyOnSrv() throws Exception {
-        srv.createCache(cacheConfig().setName(ANOTHER_CACHE_NAME));
-
-        createCache(jdbcConn, cacheConfig());
-
-        srv.destroyCache("SQL_PUBLIC_" + CACHE_NAME.toUpperCase());
-
-        assertEquals(1, srv.cacheNames().size());
-
-        assertEquals(ANOTHER_CACHE_NAME, srv.cacheNames().iterator().next());
-    }
-
-    /**
-     * Create and destroy caches:
-     * <p>
-     *     <b>Prerequisites:</b>
-     *     Start server node, create 1 cache in a single cache group.
-     * <p>
-     *     <b>Steps:</b>
-     *     <ol>
-     *         <li>Start <b>Rest</b> client.</li>
-     *         <li>Create new cache with an existing cache group on client side.</li>
-     *         <li>Destroy newly created cache through server node.</li>
-     *     </ol>
-     * <p>
-     *      <b>Expected:</b> Only one cache, initially created within server node is expected.
-     */
-    @Test
-    public void testCreateOnRestClientDestroyOnSrv() throws Exception {
-        srv.createCache(cacheConfig().setName(ANOTHER_CACHE_NAME));
-
-        createCacheWithRestClient(cacheConfig());
-
-        srv.destroyCache(CACHE_NAME);
-
-        assertEquals(1, srv.cacheNames().size());
-
-        assertEquals(ANOTHER_CACHE_NAME, srv.cacheNames().iterator().next());
-    }
-
-    /**
-     * Create and destroy caches:
-     * <p>
-     *     <b>Prerequisites:</b>
-     *     Start server node, create 1 cache in a single cache group.
-     * <p>
-     *     <b>Steps:</b>
-     *     <ol>
-     *         <li>Start <b>Thick</b> client.</li>
-     *         <li>Create new cache with an existing cache group on client side.</li>
-     *         <li>Destroy newly created cache through some other, previously created, <b>Thin</b> client node.</li>
-     *     </ol>
-     * <p>
-     *      <b>Expected:</b> Only one cache, initially created within server node is expected.
-     */
-    @Test
-    public void testCreateOnThickClientDestroyThinClient() {
-        srv.createCache(cacheConfig().setName(ANOTHER_CACHE_NAME));
-
-        thickClient.createCache(cacheConfig());
-
-        thinClient.destroyCache(CACHE_NAME);
-
-        assertEquals(1, srv.cacheNames().size());
-
-        assertEquals(ANOTHER_CACHE_NAME, srv.cacheNames().iterator().next());
-    }
-
-    /**
-     * Create and destroy caches:
-     * <p>
-     *     <b>Prerequisites:</b>
-     *     Start server node, create 1 cache in a single cache group.
-     * <p>
-     *     <b>Steps:</b>
-     *     <ol>
-     *         <li>Start <b>Thin</b> client.</li>
-     *         <li>Create new cache with an existing cache group on client side.</li>
-     *         <li>Destroy newly created cache through some other, previously created, <b>Rest</b> client node.</li>
-     *     </ol>
-     * <p>
-     *      <b>Expected:</b> Only one cache, initially created within server node is expected.
-     */
-    @Test
-    public void testCreateOnThinClientSrvDestroyOnRestClient() throws Exception {
-        srv.createCache(cacheConfig().setName(ANOTHER_CACHE_NAME));
-
-        thinClient.createCache(clientCacheConfig());
-
-        destroyCacheWithRestClient(CACHE_NAME);
-
-        assertEquals(1, srv.cacheNames().size());
-
-        assertEquals(ANOTHER_CACHE_NAME, srv.cacheNames().iterator().next());
-    }
-
-    /**
-     * Create and destroy caches:
-     * <p>
-     *     <b>Prerequisites:</b>
-     *     Start server node, create 1 cache in a single cache group.
-     * <p>
-     *     <b>Steps:</b>
-     *     <ol>
-     *         <li>Start <b>Jdbc Thin</b> client.</li>
-     *         <li>Create new cache with an existing cache group on client side.</li>
-     *         <li>Destroy newly created cache through some other, previously created, <b>Thin</b> client node.</li>
-     *     </ol>
-     * <p>
-     *      <b>Expected:</b> Only one cache, initially created within server node is expected.
-     */
-    @Test
-    public void testCreateOnJdbcClientDestroyOnThinClient() throws Exception {
-        srv.createCache(cacheConfig().setName(ANOTHER_CACHE_NAME));
-
-        createCache(jdbcConn, cacheConfig());
-
-        thinClient.destroyCache("SQL_PUBLIC_" + CACHE_NAME.toUpperCase());
-
-        assertEquals(1, srv.cacheNames().size());
-
-        assertEquals(ANOTHER_CACHE_NAME, srv.cacheNames().iterator().next());
-    }
-
-    /**
-     * Create and destroy caches:
-     * <p>
-     *     <b>Prerequisites:</b>
-     *     Start server node, create 1 cache in a single cache group.
-     * <p>
-     *     <b>Steps:</b>
-     *     <ol>
-     *         <li>Start <b>Jdbc Thin</b> client.</li>
-     *         <li>Create new cache with an existing cache group on client side.</li>
-     *         <li>Destroy newly created cache through some other, previously created, <b>Thick</b> client node.</li>
-     *     </ol>
-     * <p>
-     *      <b>Expected:</b> Only one cache, initially created within server node is expected.
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testCreateOnRestClientDestroyOnThickClient() throws Exception {
-        srv.createCache(cacheConfig().setName(ANOTHER_CACHE_NAME));
-
-        createCacheWithRestClient(cacheConfig());
-
-        thickClient.destroyCache(CACHE_NAME);
-
-        assertEquals(1, srv.cacheNames().size());
-
-        assertEquals(ANOTHER_CACHE_NAME, srv.cacheNames().iterator().next());
-    }
-
-    /**
-     * Create cache with specified configuration through thin/thick client or jdbc thin.
-     *
-     * @param node Cluster node or jdbc connection.
-     * @param cacheCfg Cache or ClientCache configuration
-     * @throws SQLException If failed to create cache through Jdbc Thin connection.
-     */
-    private void createCache(AutoCloseable node, Serializable cacheCfg) throws SQLException {
-        if (node instanceof IgniteClient)
-            ((IgniteClient)node).createCache((ClientCacheConfiguration)cacheCfg);
-        else if (node instanceof Ignite)
-            ((Ignite)node).createCache((CacheConfiguration)cacheCfg);
-        else if (node instanceof JdbcThinConnection) {
-            CacheConfiguration jdbcCacheCfg = (CacheConfiguration)cacheCfg;
-
-            srv.addCacheConfiguration(jdbcCacheCfg);
-
-            try (Statement stmt = jdbcConn.createStatement()) {
-                stmt.execute("CREATE TABLE " + jdbcCacheCfg.getName() +
-                    " (id int, name varchar, primary key (id)) WITH \"template=" + jdbcCacheCfg.getName() + "\"");
-            }
-        }
-        else
-            fail(" Unexpected node/client type");
-    }
-
-    /**
-     * Create cache with specified configuration through rest client.
-     * @param cacheCfg Cache configuration.
-     * @throws Exception If failed.
-     */
-    private void createCacheWithRestClient(CacheConfiguration cacheCfg) throws Exception {
-        srv.addCacheConfiguration(cacheCfg);
-
-        URLConnection conn = new URL("http://localhost:8080/ignite?cmd=getorcreate&cacheName=" +
-            cacheCfg.getName() + "&templateName=" + cacheCfg.getName()).openConnection();
-
-        conn.connect();
-
-        try (InputStreamReader streamReader = new InputStreamReader(conn.getInputStream())) {
-            ObjectMapper objMapper = new ObjectMapper();
-            Map<String, Object> myMap = objMapper.readValue(streamReader,
-                new TypeReference<Map<String, Object>>() {
-                });
-
-            log.info("Version command response is: " + myMap);
-
-            assertTrue(myMap.containsKey("response"));
-            assertEquals(0, myMap.get("successStatus"));
-        }
-    }
-
-    /**
-     * Destroy cache from within rest client.
-     * @param cacheName Cache name.
-     * @throws Exception If failed.
-     */
-    private void destroyCacheWithRestClient(String cacheName) throws Exception {
-        URLConnection conn = new URL("http://localhost:8080/ignite?cmd=destcache&cacheName=" + cacheName).
-            openConnection();
-
-        conn.connect();
-
-        try (InputStreamReader streamReader = new InputStreamReader(conn.getInputStream())) {
-            ObjectMapper objMapper = new ObjectMapper();
-            Map<String, Object> myMap = objMapper.readValue(streamReader,
-                new TypeReference<Map<String, Object>>() {
-                });
-
-            log.info("Version command response is: " + myMap);
-
-            assertTrue(myMap.containsKey("response"));
-            assertEquals(0, myMap.get("successStatus"));
-        }
-    }
-
-    /**
-     * @return Default client cache configuration.
-     */
-    private ClientCacheConfiguration clientCacheConfig() {
-        return new ClientCacheConfiguration().
-            setGroupName(CACHE_GROUP_NAME).
-            setName(CACHE_NAME).
-            setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL).
-            setCacheMode(CacheMode.PARTITIONED);
-    }
-
-    /**
-     * @return Default cache configuration.
-     */
-    private CacheConfiguration cacheConfig() {
-        return new CacheConfiguration().
-            setGroupName(CACHE_GROUP_NAME).
-            setName(CACHE_NAME).
-            setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL).
-            setCacheMode(CacheMode.PARTITIONED);
-    }
-
-    /**
-     * @return Default cache configuration without cache group.
-     */
-    private CacheConfiguration cacheConfigWithoutCacheGroup() {
-        return new CacheConfiguration().
-            setName(CACHE_NAME).
-            setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL).
-            setCacheMode(CacheMode.PARTITIONED);
-    }
-}
diff --git a/modules/clients/src/test/java/org/apache/ignite/internal/client/suite/IgniteClientTestSuite.java b/modules/clients/src/test/java/org/apache/ignite/internal/client/suite/IgniteClientTestSuite.java
index 0058bac..7408f4e 100644
--- a/modules/clients/src/test/java/org/apache/ignite/internal/client/suite/IgniteClientTestSuite.java
+++ b/modules/clients/src/test/java/org/apache/ignite/internal/client/suite/IgniteClientTestSuite.java
@@ -17,8 +17,6 @@
 
 package org.apache.ignite.internal.client.suite;
 
-import org.apache.ignite.common.ClientSideCacheCreationDestructionWileTopologyChangeTest;
-import org.apache.ignite.common.ClientSizeCacheCreationDestructionTest;
 import org.apache.ignite.internal.IgniteClientFailuresTest;
 import org.apache.ignite.internal.TaskEventSubjectIdSelfTest;
 import org.apache.ignite.internal.client.ClientDefaultCacheSelfTest;
@@ -177,10 +175,7 @@ import org.junit.runners.Suite;
     // SSL params.
     ClientSslParametersTest.class,
 
-    IgniteClientFailuresTest.class,
-
-    ClientSizeCacheCreationDestructionTest.class,
-    ClientSideCacheCreationDestructionWileTopologyChangeTest.class
+    IgniteClientFailuresTest.class
 })
 public class IgniteClientTestSuite {
 }
diff --git a/modules/compatibility/src/test/java/org/apache/ignite/compatibility/persistence/PersistenceBasicCompatibilityTest.java b/modules/compatibility/src/test/java/org/apache/ignite/compatibility/persistence/PersistenceBasicCompatibilityTest.java
index 3c4c163..2a283e5 100644
--- a/modules/compatibility/src/test/java/org/apache/ignite/compatibility/persistence/PersistenceBasicCompatibilityTest.java
+++ b/modules/compatibility/src/test/java/org/apache/ignite/compatibility/persistence/PersistenceBasicCompatibilityTest.java
@@ -78,8 +78,8 @@ public class PersistenceBasicCompatibilityTest extends IgnitePersistenceCompatib
      * @throws Exception If failed.
      */
     @Test
-    public void testNodeStartByOldVersionPersistenceData_2_1() throws Exception {
-        doTestStartupWithOldVersion("2.1.0");
+    public void testNodeStartByOldVersionPersistenceData_2_2() throws Exception {
+        doTestStartupWithOldVersion("2.2.0");
     }
 
     /**
@@ -88,8 +88,8 @@ public class PersistenceBasicCompatibilityTest extends IgnitePersistenceCompatib
      * @throws Exception If failed.
      */
     @Test
-    public void testNodeStartByOldVersionPersistenceData_2_2() throws Exception {
-        doTestStartupWithOldVersion("2.2.0");
+    public void testNodeStartByOldVersionPersistenceData_2_1() throws Exception {
+        doTestStartupWithOldVersion("2.1.0");
     }
 
     /**
@@ -135,56 +135,6 @@ public class PersistenceBasicCompatibilityTest extends IgnitePersistenceCompatib
     /**
      * Tests opportunity to read data from previous Ignite DB version.
      *
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testNodeStartByOldVersionPersistenceData_2_7() throws Exception {
-        doTestStartupWithOldVersion("2.7.0");
-    }
-
-    /**
-     * Tests opportunity to read data from previous Ignite DB version.
-     *
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testNodeStartByOldVersionPersistenceData_2_7_6() throws Exception {
-        doTestStartupWithOldVersion("2.7.6");
-    }
-
-    /**
-     * Tests opportunity to read data from previous Ignite DB version.
-     *
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testNodeStartByOldVersionPersistenceData_2_8() throws Exception {
-        doTestStartupWithOldVersion("2.8.0");
-    }
-
-    /**
-     * Tests opportunity to read data from previous Ignite DB version.
-     *
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testNodeStartByOldVersionPersistenceData_2_8_1() throws Exception {
-        doTestStartupWithOldVersion("2.8.1");
-    }
-
-    /**
-     * Tests opportunity to read data from previous Ignite DB version.
-     *
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testNodeStartByOldVersionPersistenceData_2_9() throws Exception {
-        doTestStartupWithOldVersion("2.9.0");
-    }
-
-    /**
-     * Tests opportunity to read data from previous Ignite DB version.
-     *
      * @param igniteVer 3-digits version of ignite
      * @throws Exception If failed.
      */
diff --git a/modules/compress/src/test/java/org/apache/ignite/internal/processors/compress/WalPageCompressionIntegrationTest.java b/modules/compress/src/test/java/org/apache/ignite/internal/processors/compress/WalPageCompressionIntegrationTest.java
index b589eaf..eb7e118 100644
--- a/modules/compress/src/test/java/org/apache/ignite/internal/processors/compress/WalPageCompressionIntegrationTest.java
+++ b/modules/compress/src/test/java/org/apache/ignite/internal/processors/compress/WalPageCompressionIntegrationTest.java
@@ -18,7 +18,6 @@
 package org.apache.ignite.internal.processors.compress;
 
 import org.apache.ignite.IgniteCache;
-import org.apache.ignite.cluster.ClusterState;
 import org.apache.ignite.configuration.CacheConfiguration;
 import org.apache.ignite.configuration.DataRegionConfiguration;
 import org.apache.ignite.configuration.DataStorageConfiguration;
@@ -29,7 +28,6 @@ import org.apache.ignite.internal.pagemem.wal.record.CheckpointRecord;
 import org.apache.ignite.internal.processors.cache.persistence.wal.WALPointer;
 import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
 import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
-import org.junit.Test;
 
 import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC;
 
@@ -99,20 +97,4 @@ public class WalPageCompressionIntegrationTest extends AbstractPageCompressionIn
         assertTrue("Compressed WAL must be smaller than uncompressed [ptr0=" + ptr0 + ", ptr1=" + ptr1 + ']',
             ptr0.compareTo(ptr1) < 0);
     }
-
-    /** */
-    @Test
-    public void testSkipGarbageApplyPageSnapshotWrongAssertion() throws Exception {
-        compression = DiskPageCompression.SKIP_GARBAGE;
-
-        IgniteEx ignite = startGrid(0);
-
-        ignite.cluster().state(ClusterState.ACTIVE);
-
-        ignite.getOrCreateCache(DEFAULT_CACHE_NAME);
-
-        stopGrid(0, true);
-
-        startGrid(0);
-    }
 }
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/CommandList.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/CommandList.java
index 7c45a36..e16acaa 100644
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/CommandList.java
+++ b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/CommandList.java
@@ -19,7 +19,7 @@ package org.apache.ignite.internal.commandline;
 
 import org.apache.ignite.internal.commandline.cache.CacheCommands;
 import org.apache.ignite.internal.commandline.diagnostic.DiagnosticCommand;
-import org.apache.ignite.internal.commandline.encryption.EncryptionCommands;
+import org.apache.ignite.internal.commandline.encryption.EncryptionCommand;
 import org.apache.ignite.internal.commandline.meta.MetadataCommand;
 import org.apache.ignite.internal.commandline.metric.MetricCommand;
 import org.apache.ignite.internal.commandline.property.PropertyCommand;
@@ -59,7 +59,7 @@ public enum CommandList {
     DIAGNOSTIC("--diagnostic", new DiagnosticCommand()),
 
     /** Encryption features command. */
-    ENCRYPTION("--encryption", new EncryptionCommands()),
+    ENCRYPTION("--encryption", new EncryptionCommand()),
 
     /** Kill command. */
     KILL("--kill", new KillCommand()),
@@ -92,10 +92,7 @@ public enum CommandList {
     METRIC("--metric", new MetricCommand()),
 
     /** */
-    PERSISTENCE("--persistence", new PersistenceCommand()),
-
-    /** Command to manage PDS defragmentation. */
-    DEFRAGMENTATION("--defragmentation", new DefragmentationCommand());
+    PERSISTENCE("--persistence", new PersistenceCommand());
 
     /** Private values copy so there's no need in cloning it every time. */
     private static final CommandList[] VALUES = CommandList.values();
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/DefragmentationCommand.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/DefragmentationCommand.java
deleted file mode 100644
index e421863..0000000
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/DefragmentationCommand.java
+++ /dev/null
@@ -1,249 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.commandline;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Locale;
-import java.util.Optional;
-import java.util.Set;
-import java.util.logging.Logger;
-import java.util.stream.Collectors;
-import org.apache.ignite.internal.client.GridClient;
-import org.apache.ignite.internal.client.GridClientConfiguration;
-import org.apache.ignite.internal.client.GridClientNode;
-import org.apache.ignite.internal.commandline.defragmentation.DefragmentationArguments;
-import org.apache.ignite.internal.commandline.defragmentation.DefragmentationSubcommands;
-import org.apache.ignite.internal.visor.VisorTaskArgument;
-import org.apache.ignite.internal.visor.defragmentation.VisorDefragmentationOperation;
-import org.apache.ignite.internal.visor.defragmentation.VisorDefragmentationTask;
-import org.apache.ignite.internal.visor.defragmentation.VisorDefragmentationTaskArg;
-import org.apache.ignite.internal.visor.defragmentation.VisorDefragmentationTaskResult;
-
-import static org.apache.ignite.internal.commandline.Command.usage;
-import static org.apache.ignite.internal.commandline.CommandList.DEFRAGMENTATION;
-import static org.apache.ignite.internal.commandline.defragmentation.DefragmentationSubcommands.CANCEL;
-import static org.apache.ignite.internal.commandline.defragmentation.DefragmentationSubcommands.SCHEDULE;
-
-/** */
-public class DefragmentationCommand implements Command<DefragmentationArguments> {
-    /** */
-    private static final String NODES_ARG = "--nodes";
-
-    /** */
-    private static final String CACHES_ARG = "--caches";
-
-    /** */
-    private DefragmentationArguments args;
-
-    /** {@inheritDoc} */
-    @Override public Object execute(GridClientConfiguration clientCfg, Logger log) throws Exception {
-        try (GridClient client = Command.startClient(clientCfg)) {
-            Optional<GridClientNode> firstNodeOpt = client.compute().nodes().stream().filter(GridClientNode::connectable).findFirst();
-
-            if (firstNodeOpt.isPresent()) {
-                VisorDefragmentationTaskResult res;
-
-                if (args.nodeIds() == null) {
-                    res = TaskExecutor.executeTaskByNameOnNode(
-                        client,
-                        VisorDefragmentationTask.class.getName(),
-                        convertArguments(),
-                        null, // Use node from clientCfg.
-                        clientCfg
-                    );
-                }
-                else {
-                    VisorTaskArgument<?> visorArg = new VisorTaskArgument<>(
-                        client.compute().nodes().stream().filter(
-                            node -> args.nodeIds().contains(node.consistentId().toString())
-                        ).map(GridClientNode::nodeId).collect(Collectors.toList()),
-                        convertArguments(),
-                        false
-                    );
-
-                    res = client.compute()
-                        .projection(firstNodeOpt.get())
-                        .execute(
-                            VisorDefragmentationTask.class.getName(),
-                            visorArg
-                        );
-                }
-
-                printResult(res, log);
-            }
-            else
-                log.warning("No nodes found in topology, command won't be executed.");
-        }
-        catch (Throwable t) {
-            log.severe("Failed to execute defragmentation command='" + args.subcommand().text() + "'");
-            log.severe(CommandLogger.errorMessage(t));
-
-            throw t;
-        }
-
-        return null;
-    }
-
-    /** */
-    private void printResult(VisorDefragmentationTaskResult res, Logger log) {
-        assert res != null;
-
-        log.info(res.getMessage());
-    }
-
-    /** {@inheritDoc} */
-    @Override public void parseArguments(CommandArgIterator argIter) {
-        DefragmentationSubcommands cmd = DefragmentationSubcommands.of(argIter.nextArg("Expected defragmentation subcommand."));
-
-        if (cmd == null)
-            throw new IllegalArgumentException("Expected correct defragmentation subcommand.");
-
-        args = new DefragmentationArguments(cmd);
-
-        switch (cmd) {
-            case SCHEDULE:
-                List<String> consistentIds = null;
-                List<String> cacheNames = null;
-
-                String subarg;
-
-                do {
-                    subarg = argIter.peekNextArg();
-
-                    if (subarg == null)
-                        break;
-
-                    subarg = subarg.toLowerCase(Locale.ENGLISH);
-
-                    switch (subarg) {
-                        case NODES_ARG: {
-                            argIter.nextArg("");
-
-                            Set<String> ids = argIter.nextStringSet(NODES_ARG);
-
-                            if (ids.isEmpty())
-                                throw new IllegalArgumentException("Consistent ids list is empty.");
-
-                            consistentIds = new ArrayList<>(ids);
-
-                            break;
-                        }
-
-                        case CACHES_ARG: {
-                            argIter.nextArg("");
-
-                            Set<String> ids = argIter.nextStringSet(CACHES_ARG);
-
-                            if (ids.isEmpty())
-                                throw new IllegalArgumentException("Caches list is empty.");
-
-                            cacheNames = new ArrayList<>(ids);
-
-                            break;
-                        }
-
-                        default:
-                            subarg = null;
-                    }
-                }
-                while (subarg != null);
-
-                if (consistentIds == null)
-                    throw new IllegalArgumentException("--nodes argument is missing.");
-
-                args.setNodeIds(consistentIds);
-                args.setCacheNames(cacheNames);
-
-                break;
-
-            case STATUS:
-            case CANCEL:
-                // No-op.
-        }
-    }
-
-    /** {@inheritDoc} */
-    @Override public DefragmentationArguments arg() {
-        return args;
-    }
-
-    /** {@inheritDoc} */
-    @Override public void printUsage(Logger log) {
-        String consistentIds = "consistentId0,consistentId1";
-
-        String cacheNames = "cache1,cache2,cache3";
-
-        usage(
-            log,
-            "Schedule PDS defragmentation on given nodes for all caches:",
-            DEFRAGMENTATION,
-            SCHEDULE.text(),
-            NODES_ARG,
-            consistentIds
-        );
-
-        usage(
-            log,
-            "Schedule PDS defragmentation on given nodes but only for given caches:",
-            DEFRAGMENTATION,
-            SCHEDULE.text(),
-            NODES_ARG,
-            consistentIds,
-            CACHES_ARG,
-            cacheNames
-        );
-
-        usage(
-            log,
-            "Cancel scheduled or active PDS defragmentation on underlying node:",
-            DEFRAGMENTATION,
-            CANCEL.text()
-        );
-    }
-
-    /** {@inheritDoc} */
-    @Override public String name() {
-        return DEFRAGMENTATION.toCommandName();
-    }
-
-    /** */
-    private VisorDefragmentationTaskArg convertArguments() {
-        return new VisorDefragmentationTaskArg(
-            convertSubcommand(args.subcommand()),
-            args.cacheNames()
-        );
-    }
-
-    /** */
-    private static VisorDefragmentationOperation convertSubcommand(DefragmentationSubcommands subcmd) {
-        switch (subcmd) {
-            case SCHEDULE:
-                return VisorDefragmentationOperation.SCHEDULE;
-
-            case STATUS:
-                return VisorDefragmentationOperation.STATUS;
-
-            case CANCEL:
-                return VisorDefragmentationOperation.CANCEL;
-
-            default:
-                throw new IllegalArgumentException(subcmd.name());
-        }
-    }
-}
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/defragmentation/DefragmentationArguments.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/defragmentation/DefragmentationArguments.java
deleted file mode 100644
index e82e578..0000000
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/defragmentation/DefragmentationArguments.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.commandline.defragmentation;
-
-import java.util.List;
-
-/** */
-@SuppressWarnings("AssignmentOrReturnOfFieldWithMutableType")
-public class DefragmentationArguments {
-    /** */
-    private final DefragmentationSubcommands subcmd;
-
-    /** */
-    private List<String> nodeIds;
-
-    /** */
-    private List<String> cacheNames;
-
-    /** */
-    public DefragmentationArguments(DefragmentationSubcommands subcmd) {
-        this.subcmd = subcmd;
-    }
-
-    /** */
-    public DefragmentationSubcommands subcommand() {
-        return subcmd;
-    }
-
-    /** */
-    public void setNodeIds(List<String> nodeIds) {
-        this.nodeIds = nodeIds;
-    }
-
-    /** */
-    public List<String> nodeIds() {
-        return nodeIds;
-    }
-
-    /** */
-    public void setCacheNames(List<String> cacheNames) {
-        this.cacheNames = cacheNames;
-    }
-
-    /** */
-    public List<String> cacheNames() {
-        return cacheNames;
-    }
-}
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/defragmentation/DefragmentationSubcommands.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/defragmentation/DefragmentationSubcommands.java
deleted file mode 100644
index 86ec775..0000000
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/defragmentation/DefragmentationSubcommands.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.commandline.defragmentation;
-
-import org.apache.ignite.internal.visor.defragmentation.VisorDefragmentationOperation;
-import org.jetbrains.annotations.Nullable;
-
-/** */
-public enum DefragmentationSubcommands {
-    /** */
-    SCHEDULE("schedule", VisorDefragmentationOperation.SCHEDULE),
-
-    /** */
-    STATUS("status", VisorDefragmentationOperation.STATUS),
-
-    /** */
-    CANCEL("cancel", VisorDefragmentationOperation.CANCEL);
-
-    /** */
-    private final String name;
-
-    /** */
-    private final VisorDefragmentationOperation visorOperation;
-
-    /** */
-    DefragmentationSubcommands(String name, VisorDefragmentationOperation visorOperation) {
-        this.name = name;
-        this.visorOperation = visorOperation;
-    }
-
-    /**
-     * @param strRep String representation of subcommand.
-     * @return Subcommand for its string representation.
-     */
-    public static @Nullable DefragmentationSubcommands of(String strRep) {
-        for (DefragmentationSubcommands cmd : values()) {
-            if (cmd.text().equalsIgnoreCase(strRep))
-                return cmd;
-        }
-
-        return null;
-    }
-
-    /** */
-    public String text() {
-        return name;
-    }
-
-    /** */
-    public VisorDefragmentationOperation operation() {
-        return visorOperation;
-    }
-}
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/diagnostic/ConnectivityCommand.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/diagnostic/ConnectivityCommand.java
deleted file mode 100644
index 99ca0cc..0000000
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/diagnostic/ConnectivityCommand.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.commandline.diagnostic;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-import java.util.UUID;
-import java.util.logging.Logger;
-import java.util.stream.Collectors;
-import org.apache.ignite.cluster.ClusterNode;
-import org.apache.ignite.internal.client.GridClient;
-import org.apache.ignite.internal.client.GridClientConfiguration;
-import org.apache.ignite.internal.client.GridClientNode;
-import org.apache.ignite.internal.commandline.Command;
-import org.apache.ignite.internal.commandline.TaskExecutor;
-import org.apache.ignite.internal.visor.diagnostic.availability.VisorConnectivityArgs;
-import org.apache.ignite.internal.visor.diagnostic.availability.VisorConnectivityResult;
-import org.apache.ignite.internal.visor.diagnostic.availability.VisorConnectivityTask;
-
-import static org.apache.ignite.internal.commandline.CommandHandler.UTILITY_NAME;
-import static org.apache.ignite.internal.commandline.CommandList.DIAGNOSTIC;
-import static org.apache.ignite.internal.commandline.CommandLogger.join;
-import static org.apache.ignite.internal.commandline.diagnostic.DiagnosticSubCommand.CONNECTIVITY;
-
-/**
- * Command to check connectivity between every node.
- */
-public class ConnectivityCommand implements Command<Void> {
-    /**
-     * Header of output table.
-     */
-    private final List<String> TABLE_HEADER = Arrays.asList(
-            "SOURCE-NODE-ID",
-            "SOURCE-CONSISTENT-ID",
-            "SOURCE-NODE-TYPE",
-            "DESTINATION-NODE-ID",
-            "DESTINATION_CONSISTENT_ID",
-            "DESTINATION-NODE-TYPE"
-    );
-
-    /**
-     * Client node type string.
-     */
-    private final String NODE_TYPE_CLIENT = "CLIENT";
-
-    /**
-     * Server node type string.
-     */
-    private final String NODE_TYPE_SERVER = "SERVER";
-
-    /**
-     * Logger
-     */
-    private Logger logger;
-
-    /** {@inheritDoc} */
-    @Override public Object execute(GridClientConfiguration clientCfg, Logger logger) throws Exception {
-        this.logger = logger;
-
-        Map<ClusterNode, VisorConnectivityResult> result;
-
-        try (GridClient client = Command.startClient(clientCfg)) {
-            Set<UUID> nodeIds = client.compute().nodes().stream().map(GridClientNode::nodeId).collect(Collectors.toSet());
-
-            VisorConnectivityArgs taskArg = new VisorConnectivityArgs(nodeIds);
-
-            result = TaskExecutor.executeTask(
-                client,
-                VisorConnectivityTask.class,
-                taskArg,
-                clientCfg
-            );
-        }
-
-        printResult(result);
-
-        return result;
-    }
-
-    /**
-     * @param res Result.
-     */
-    private void printResult(Map<ClusterNode, VisorConnectivityResult> res) {
-        final boolean[] hasFailed = {false};
-
-        final List<List<String>> table = new ArrayList<>();
-
-        table.add(TABLE_HEADER);
-
-        for (Map.Entry<ClusterNode, VisorConnectivityResult> entry : res.entrySet()) {
-            ClusterNode key = entry.getKey();
-
-            String id = key.id().toString();
-            String consId = key.consistentId().toString();
-            String isClient = key.isClient() ? NODE_TYPE_CLIENT : NODE_TYPE_SERVER;
-
-            VisorConnectivityResult value = entry.getValue();
-
-            Map<ClusterNode, Boolean> statuses = value.getNodeIds();
-
-            List<List<String>> row = statuses.entrySet().stream().map(nodeStat -> {
-                ClusterNode remoteNode = nodeStat.getKey();
-
-                String remoteId = remoteNode.id().toString();
-                String remoteConsId = remoteNode.consistentId().toString();
-                String nodeType = remoteNode.isClient() ? NODE_TYPE_CLIENT : NODE_TYPE_SERVER;
-
-                Boolean status = nodeStat.getValue();
-
-                if (!status) {
-                    hasFailed[0] = true;
-                    return Arrays.asList(id, consId, isClient, remoteId, remoteConsId, nodeType);
-                }
-
-                return null;
-            })
-            .filter(Objects::nonNull)
-            .collect(Collectors.toList());
-
-            table.addAll(row);
-        }
-
-        if (hasFailed[0])
-            logger.info("There is no connectivity between the following nodes:\n" + formatAsTable(table));
-        else
-            logger.info("There are no connectivity problems.");
-    }
-
-    /**
-     * Format output as a table
-     * @param rows table rows.
-     * @return formatted string.
-     */
-    public static String formatAsTable(List<List<String>> rows) {
-        int[] maxLengths = new int[rows.get(0).size()];
-
-        for (List<String> row : rows) {
-            for (int i = 0; i < row.size(); i++)
-                maxLengths[i] = Math.max(maxLengths[i], row.get(i).length());
-        }
-
-        StringBuilder formatBuilder = new StringBuilder();
-
-        for (int maxLength : maxLengths)
-            formatBuilder.append("%-").append(maxLength + 2).append("s");
-
-        String format = formatBuilder.toString();
-
-        StringBuilder result = new StringBuilder();
-
-        for (List<String> row : rows)
-            result.append(String.format(format, row.toArray(new String[0]))).append("\n");
-
-        return result.toString();
-    }
-
-    /** {@inheritDoc} */
-    @Override public Void arg() {
-        return null;
-    }
-
-    /** {@inheritDoc} */
-    @Override public void printUsage(Logger logger) {
-        logger.info("View connectvity state of all nodes in cluster");
-        logger.info(join(" ",
-            UTILITY_NAME, DIAGNOSTIC, CONNECTIVITY,
-            "// Prints info about connectivity between nodes"));
-        logger.info("");
-    }
-
-    /** {@inheritDoc} */
-    @Override public String name() {
-        return CONNECTIVITY.name();
-    }
-}
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/diagnostic/DiagnosticCommand.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/diagnostic/DiagnosticCommand.java
index 60534c4..c0e59a3 100644
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/diagnostic/DiagnosticCommand.java
+++ b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/diagnostic/DiagnosticCommand.java
@@ -29,7 +29,6 @@ import static org.apache.ignite.internal.commandline.CommandHandler.UTILITY_NAME
 import static org.apache.ignite.internal.commandline.CommandList.DIAGNOSTIC;
 import static org.apache.ignite.internal.commandline.CommandLogger.INDENT;
 import static org.apache.ignite.internal.commandline.CommandLogger.join;
-import static org.apache.ignite.internal.commandline.diagnostic.DiagnosticSubCommand.CONNECTIVITY;
 import static org.apache.ignite.internal.commandline.diagnostic.DiagnosticSubCommand.HELP;
 import static org.apache.ignite.internal.commandline.diagnostic.DiagnosticSubCommand.PAGE_LOCKS;
 
@@ -78,7 +77,6 @@ public class DiagnosticCommand extends AbstractCommand<DiagnosticSubCommand> {
 
         switch (cmd) {
             case HELP:
-            case CONNECTIVITY:
                 break;
 
             case PAGE_LOCKS:
@@ -111,7 +109,6 @@ public class DiagnosticCommand extends AbstractCommand<DiagnosticSubCommand> {
      */
     private void printDiagnosticHelp(Logger logger) {
         logger.info(INDENT + join(" ", UTILITY_NAME, DIAGNOSTIC, PAGE_LOCKS + " - dump page locks info."));
-        logger.info(INDENT + join(" ", UTILITY_NAME, DIAGNOSTIC, CONNECTIVITY + " - show connectivity state."));
 
         logger.info(INDENT + "Subcommands:");
 
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/diagnostic/DiagnosticSubCommand.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/diagnostic/DiagnosticSubCommand.java
index aac7ddb..2229fde 100644
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/diagnostic/DiagnosticSubCommand.java
+++ b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/diagnostic/DiagnosticSubCommand.java
@@ -27,10 +27,7 @@ public enum DiagnosticSubCommand {
     HELP("help", null),
 
     /** */
-    PAGE_LOCKS("pageLocks", new PageLocksCommand()),
-
-    /** */
-    CONNECTIVITY("connectivity", new ConnectivityCommand());
+    PAGE_LOCKS("pageLocks", new PageLocksCommand());
 
     /** Diagnostic command name. */
     private final String name;
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/CacheGroupEncryptionCommand.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/CacheGroupEncryptionCommand.java
deleted file mode 100644
index d4c09b8..0000000
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/CacheGroupEncryptionCommand.java
+++ /dev/null
@@ -1,264 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.commandline.encryption;
-
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-import java.util.logging.Logger;
-import org.apache.ignite.IgniteException;
-import org.apache.ignite.internal.client.GridClient;
-import org.apache.ignite.internal.client.GridClientConfiguration;
-import org.apache.ignite.internal.commandline.AbstractCommand;
-import org.apache.ignite.internal.commandline.Command;
-import org.apache.ignite.internal.commandline.CommandArgIterator;
-import org.apache.ignite.internal.commandline.CommandList;
-import org.apache.ignite.internal.commandline.CommandLogger;
-import org.apache.ignite.internal.util.typedef.F;
-import org.apache.ignite.internal.visor.encryption.VisorCacheGroupEncryptionTaskArg;
-import org.apache.ignite.internal.visor.encryption.VisorCacheGroupEncryptionTaskResult;
-import org.apache.ignite.internal.visor.encryption.VisorEncryptionKeyIdsTask;
-import org.apache.ignite.internal.visor.encryption.VisorReencryptionResumeTask;
-import org.apache.ignite.internal.visor.encryption.VisorReencryptionStatusTask;
-import org.apache.ignite.internal.visor.encryption.VisorReencryptionSuspendTask;
-
-import static org.apache.ignite.internal.commandline.CommandList.ENCRYPTION;
-import static org.apache.ignite.internal.commandline.CommandLogger.DOUBLE_INDENT;
-import static org.apache.ignite.internal.commandline.CommandLogger.INDENT;
-import static org.apache.ignite.internal.commandline.TaskExecutor.BROADCAST_UUID;
-import static org.apache.ignite.internal.commandline.TaskExecutor.executeTaskByNameOnNode;
-import static org.apache.ignite.internal.commandline.encryption.EncryptionSubcommands.CACHE_GROUP_KEY_IDS;
-
-/**
- * Base cache group encryption multinode subcommand.
- *
- * @param <T> Command result type.
- */
-public abstract class CacheGroupEncryptionCommand<T> extends AbstractCommand<VisorCacheGroupEncryptionTaskArg> {
-    /** Cache group reencryption task argument. */
-    private VisorCacheGroupEncryptionTaskArg taskArg;
-
-    /** {@inheritDoc} */
-    @Override public VisorCacheGroupEncryptionTaskArg arg() {
-        return taskArg;
-    }
-
-    /** {@inheritDoc} */
-    @Override public void parseArguments(CommandArgIterator argIter) {
-        String grpName = argIter.nextArg("Đ¡ache group name is expected.");
-
-        if (argIter.hasNextSubArg())
-            throw new IllegalArgumentException("Unexpected command argument: " + argIter.peekNextArg());
-
-        taskArg = new VisorCacheGroupEncryptionTaskArg(grpName);
-    }
-
-    /** {@inheritDoc} */
-    @Override public Object execute(GridClientConfiguration clientCfg, Logger log) throws Exception {
-        try (GridClient client = Command.startClient(clientCfg)) {
-            VisorCacheGroupEncryptionTaskResult<T> res = executeTaskByNameOnNode(
-                client,
-                visorTaskName(),
-                taskArg,
-                BROADCAST_UUID,
-                clientCfg
-            );
-
-            printResults(res, taskArg.groupName(), log);
-
-            return res;
-        }
-        catch (Throwable e) {
-            log.severe("Failed to perform operation.");
-            log.severe(CommandLogger.errorMessage(e));
-
-            throw e;
-        }
-    }
-
-    /**
-     * @param res Response.
-     * @param grpName Cache group name.
-     * @param log Logger.
-     */
-    protected void printResults(VisorCacheGroupEncryptionTaskResult<T> res, String grpName, Logger log) {
-        Map<UUID, IgniteException> exceptions = res.exceptions();
-
-        for (Map.Entry<UUID, IgniteException> entry : exceptions.entrySet()) {
-            log.info(INDENT + "Node " + entry.getKey() + ":");
-
-            log.info(String.format("%sfailed to execute command for the cache group \"%s\": %s.",
-                DOUBLE_INDENT, grpName, entry.getValue().getMessage()));
-        }
-
-        Map<UUID, T> results = res.results();
-
-        for (Map.Entry<UUID, T> entry : results.entrySet()) {
-            log.info(INDENT + "Node " + entry.getKey() + ":");
-
-            printNodeResult(entry.getValue(), grpName, log);
-        }
-    }
-
-    /**
-     * @param res Response.
-     * @param grpName Cache group name.
-     * @param log Logger.
-     */
-    protected abstract void printNodeResult(T res, String grpName, Logger log);
-
-    /**
-     * @return Visor task name.
-     */
-    protected abstract String visorTaskName();
-
-    /** Subcommand to Display re-encryption status of the cache group. */
-    protected static class ReencryptionStatus extends CacheGroupEncryptionCommand<Long> {
-        /** {@inheritDoc} */
-        @Override protected void printNodeResult(Long bytesLeft, String grpName, Logger log) {
-            if (bytesLeft == -1)
-                log.info(DOUBLE_INDENT + "re-encryption completed or not required");
-            else if (bytesLeft == 0)
-                log.info(DOUBLE_INDENT + "re-encryption will be completed after the next checkpoint");
-            else
-                log.info(String.format("%s%d KB of data left for re-encryption", DOUBLE_INDENT, bytesLeft / 1024));
-        }
-
-        /** {@inheritDoc} */
-        @Override protected String visorTaskName() {
-            return VisorReencryptionStatusTask.class.getName();
-        }
-
-        /** {@inheritDoc} */
-        @Override public String name() {
-            return EncryptionSubcommands.REENCRYPTION_STATUS.text().toUpperCase();
-        }
-
-        /** {@inheritDoc} */
-        @Override public void printUsage(Logger log) {
-            Command.usage(log, "Display re-encryption status of the cache group:", CommandList.ENCRYPTION,
-                EncryptionSubcommands.REENCRYPTION_STATUS.toString(), "cacheGroupName");
-        }
-    }
-
-    /** Subcommand to view current encryption key IDs of the cache group. */
-    protected static class CacheKeyIds extends CacheGroupEncryptionCommand<List<Integer>> {
-        /** {@inheritDoc} */
-        @Override protected void printResults(
-            VisorCacheGroupEncryptionTaskResult<List<Integer>> res,
-            String grpName,
-            Logger log
-        ) {
-            log.info("Encryption key identifiers for cache: " + grpName);
-
-            super.printResults(res, grpName, log);
-        }
-
-        /** {@inheritDoc} */
-        @Override protected void printNodeResult(List<Integer> keyIds, String grpName, Logger log) {
-            if (F.isEmpty(keyIds)) {
-                log.info(DOUBLE_INDENT + "---");
-
-                return;
-            }
-
-            for (int i = 0; i < keyIds.size(); i++)
-                log.info(DOUBLE_INDENT + keyIds.get(i) + (i == 0 ? " (active)" : ""));
-        }
-
-        /** {@inheritDoc} */
-        @Override protected String visorTaskName() {
-            return VisorEncryptionKeyIdsTask.class.getName();
-        }
-
-        /** {@inheritDoc} */
-        @Override public String name() {
-            return CACHE_GROUP_KEY_IDS.text().toUpperCase();
-        }
-
-        /** {@inheritDoc} */
-        @Override public void printUsage(Logger log) {
-            Command.usage(log, "View encryption key identifiers of the cache group:", ENCRYPTION,
-                CACHE_GROUP_KEY_IDS.toString(), "cacheGroupName");
-        }
-    }
-
-    /** Subcommand to suspend re-encryption of the cache group. */
-    protected static class SuspendReencryption extends CacheGroupEncryptionCommand<Boolean> {
-        /** {@inheritDoc} */
-        @Override protected String visorTaskName() {
-            return VisorReencryptionSuspendTask.class.getName();
-        }
-
-        /** {@inheritDoc} */
-        @Override public String name() {
-            return EncryptionSubcommands.REENCRYPTION_SUSPEND.text().toUpperCase();
-        }
-
-        /** {@inheritDoc} */
-        @Override public void printUsage(Logger log) {
-            Command.usage(log, "Suspend re-encryption of the cache group:", CommandList.ENCRYPTION,
-                EncryptionSubcommands.REENCRYPTION_SUSPEND.toString(), "cacheGroupName");
-        }
-
-        /** {@inheritDoc} */
-        @Override protected void printNodeResult(Boolean success, String grpName, Logger log) {
-            log.info(String.format("%sre-encryption of the cache group \"%s\" has %sbeen suspended.",
-                DOUBLE_INDENT, grpName, (success ? "" : "already ")));
-        }
-
-        /** {@inheritDoc} */
-        @Override protected void printResults(
-            VisorCacheGroupEncryptionTaskResult<Boolean> res,
-            String grpName,
-            Logger log
-        ) {
-            super.printResults(res, grpName, log);
-
-            log.info("");
-            log.info("Note: the re-encryption suspend status is not persisted, re-encryption will be started " +
-                "automatically after the node is restarted.");
-            log.info("");
-        }
-    }
-
-    /** Subcommand to resume re-encryption of the cache group. */
-    protected static class ResumeReencryption extends CacheGroupEncryptionCommand<Boolean> {
-        /** {@inheritDoc} */
-        @Override protected String visorTaskName() {
-            return VisorReencryptionResumeTask.class.getName();
-        }
-
-        /** {@inheritDoc} */
-        @Override public String name() {
-            return EncryptionSubcommands.REENCRYPTION_RESUME.text().toUpperCase();
-        }
-
-        /** {@inheritDoc} */
-        @Override public void printUsage(Logger log) {
-            Command.usage(log, "Resume re-encryption of the cache group:", CommandList.ENCRYPTION,
-                EncryptionSubcommands.REENCRYPTION_RESUME.toString(), "cacheGroupName");
-        }
-
-        /** {@inheritDoc} */
-        @Override protected void printNodeResult(Boolean success, String grpName, Logger log) {
-            log.info(String.format("%sre-encryption of the cache group \"%s\" has %sbeen resumed.",
-                DOUBLE_INDENT, grpName, (success ? "" : "already ")));
-        }
-    }
-}
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/ChangeCacheGroupKeyCommand.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/ChangeCacheGroupKeyCommand.java
deleted file mode 100644
index 8518e5d..0000000
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/ChangeCacheGroupKeyCommand.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.commandline.encryption;
-
-import java.util.logging.Logger;
-import org.apache.ignite.internal.client.GridClient;
-import org.apache.ignite.internal.client.GridClientConfiguration;
-import org.apache.ignite.internal.commandline.AbstractCommand;
-import org.apache.ignite.internal.commandline.Command;
-import org.apache.ignite.internal.commandline.CommandArgIterator;
-import org.apache.ignite.internal.commandline.CommandLogger;
-import org.apache.ignite.internal.visor.encryption.VisorCacheGroupEncryptionTaskArg;
-import org.apache.ignite.internal.visor.encryption.VisorChangeCacheGroupKeyTask;
-
-import static org.apache.ignite.internal.commandline.CommandList.ENCRYPTION;
-import static org.apache.ignite.internal.commandline.TaskExecutor.executeTaskByNameOnNode;
-import static org.apache.ignite.internal.commandline.encryption.EncryptionSubcommands.CHANGE_CACHE_GROUP_KEY;
-
-/**
- * Change cache group key encryption subcommand.
- */
-public class ChangeCacheGroupKeyCommand extends AbstractCommand<VisorCacheGroupEncryptionTaskArg> {
-    /** Change cache group key task argument. */
-    private VisorCacheGroupEncryptionTaskArg taskArg;
-
-    /** {@inheritDoc} */
-    @Override public Object execute(GridClientConfiguration clientCfg, Logger log) throws Exception {
-        try (GridClient client = Command.startClient(clientCfg)) {
-            executeTaskByNameOnNode(
-                client,
-                VisorChangeCacheGroupKeyTask.class.getName(),
-                taskArg,
-                null,
-                clientCfg
-            );
-
-            log.info("The encryption key has been changed for the cache group \"" + taskArg.groupName() + "\".");
-
-            return null;
-        }
-        catch (Throwable e) {
-            log.severe("Failed to perform operation.");
-            log.severe(CommandLogger.errorMessage(e));
-
-            throw e;
-        }
-    }
-
-    /** {@inheritDoc} */
-    @Override public String confirmationPrompt() {
-        return "Warning: the command will change the encryption key of the cache group. Joining a node during " +
-            "the key change process is prohibited and will be rejected.";
-    }
-
-    /** {@inheritDoc} */
-    @Override public VisorCacheGroupEncryptionTaskArg arg() {
-        return taskArg;
-    }
-
-    /** {@inheritDoc} */
-    @Override public void parseArguments(CommandArgIterator argIter) {
-        String argCacheGrpName = argIter.nextArg("Đ¡ache group name is expected.");
-
-        taskArg = new VisorCacheGroupEncryptionTaskArg(argCacheGrpName);
-
-        if (argIter.hasNextSubArg())
-            throw new IllegalArgumentException("Unexpected command argument: " + argIter.peekNextArg());
-    }
-
-    /** {@inheritDoc} */
-    @Override public void printUsage(Logger log) {
-        Command.usage(log, "Change the encryption key of the cache group:", ENCRYPTION,
-            CHANGE_CACHE_GROUP_KEY.toString(), "cacheGroupName");
-    }
-
-    /** {@inheritDoc} */
-    @Override public String name() {
-        return CHANGE_CACHE_GROUP_KEY.text().toUpperCase();
-    }
-}
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/ChangeMasterKeyCommand.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/ChangeMasterKeyCommand.java
deleted file mode 100644
index a48dc4b..0000000
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/ChangeMasterKeyCommand.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.commandline.encryption;
-
-import java.util.logging.Logger;
-import org.apache.ignite.internal.client.GridClient;
-import org.apache.ignite.internal.client.GridClientConfiguration;
-import org.apache.ignite.internal.commandline.AbstractCommand;
-import org.apache.ignite.internal.commandline.Command;
-import org.apache.ignite.internal.commandline.CommandArgIterator;
-import org.apache.ignite.internal.commandline.CommandLogger;
-import org.apache.ignite.internal.visor.encryption.VisorChangeMasterKeyTask;
-
-import static org.apache.ignite.internal.commandline.CommandList.ENCRYPTION;
-import static org.apache.ignite.internal.commandline.TaskExecutor.executeTaskByNameOnNode;
-import static org.apache.ignite.internal.commandline.encryption.EncryptionSubcommands.CHANGE_MASTER_KEY;
-
-/**
- * Change master key encryption subcommand.
- */
-public class ChangeMasterKeyCommand extends AbstractCommand<String> {
-    /** New master key name. */
-    private String argMasterKeyName;
-
-    /** {@inheritDoc} */
-    @Override public Object execute(GridClientConfiguration clientCfg, Logger log) throws Exception {
-        try (GridClient client = Command.startClient(clientCfg)) {
-            String resMsg = executeTaskByNameOnNode(
-                client,
-                VisorChangeMasterKeyTask.class.getName(),
-                argMasterKeyName,
-                null,
-                clientCfg
-            );
-
-            log.info(resMsg);
-
-            return resMsg;
-        }
-        catch (Throwable e) {
-            log.severe("Failed to perform operation.");
-            log.severe(CommandLogger.errorMessage(e));
-
-            throw e;
-        }
-    }
-
-    /** {@inheritDoc} */
-    @Override public String confirmationPrompt() {
-        return "Warning: the command will change the master key. Cache start and node join during the key change " +
-            "process is prohibited and will be rejected.";
-    }
-
-    /** {@inheritDoc} */
-    @Override public String arg() {
-        return argMasterKeyName;
-    }
-
-    /** {@inheritDoc} */
-    @Override public void parseArguments(CommandArgIterator argIter) {
-        argMasterKeyName = argIter.nextArg("Expected master key name.");
-    }
-
-    /** {@inheritDoc} */
-    @Override public void printUsage(Logger log) {
-        Command.usage(log, "Change the master key:", ENCRYPTION, CHANGE_MASTER_KEY.toString(), "newMasterKeyName");
-    }
-
-    /** {@inheritDoc} */
-    @Override public String name() {
-        return CHANGE_MASTER_KEY.text().toUpperCase();
-    }
-}
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/EncryptionCommand.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/EncryptionCommand.java
new file mode 100644
index 0000000..5cbd723
--- /dev/null
+++ b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/EncryptionCommand.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.commandline.encryption;
+
+import java.util.logging.Logger;
+import org.apache.ignite.internal.client.GridClient;
+import org.apache.ignite.internal.client.GridClientConfiguration;
+import org.apache.ignite.internal.commandline.AbstractCommand;
+import org.apache.ignite.internal.commandline.Command;
+import org.apache.ignite.internal.commandline.CommandArgIterator;
+import org.apache.ignite.internal.commandline.CommandLogger;
+import org.apache.ignite.internal.visor.encryption.VisorChangeMasterKeyTask;
+import org.apache.ignite.internal.visor.encryption.VisorGetMasterKeyNameTask;
+
+import static org.apache.ignite.internal.commandline.CommandList.ENCRYPTION;
+import static org.apache.ignite.internal.commandline.TaskExecutor.executeTaskByNameOnNode;
+import static org.apache.ignite.internal.commandline.encryption.EncryptionSubcommand.CHANGE_MASTER_KEY;
+import static org.apache.ignite.internal.commandline.encryption.EncryptionSubcommand.GET_MASTER_KEY_NAME;
+import static org.apache.ignite.internal.commandline.encryption.EncryptionSubcommand.of;
+
+/**
+ * Commands assosiated with encryption features.
+ *
+ * @see EncryptionSubcommand
+ */
+public class EncryptionCommand extends AbstractCommand<Object> {
+    /** Subcommand. */
+    EncryptionSubcommand cmd;
+
+    /** The task name. */
+    String taskName;
+
+    /** The task arguments. */
+    Object taskArgs;
+
+    /** {@inheritDoc} */
+    @Override public Object execute(GridClientConfiguration clientCfg, Logger logger) throws Exception {
+        try (GridClient client = Command.startClient(clientCfg)) {
+            String res = executeTaskByNameOnNode(
+                client,
+                taskName,
+                taskArgs,
+                null,
+                clientCfg
+            );
+
+            logger.info(res);
+
+            return res;
+        }
+        catch (Throwable e) {
+            logger.severe("Failed to perform operation.");
+            logger.severe(CommandLogger.errorMessage(e));
+
+            throw e;
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override public String confirmationPrompt() {
+        if (CHANGE_MASTER_KEY == cmd) {
+            return "Warning: the command will change the master key. Cache start and node join during the key change " +
+                "process is prohibited and will be rejected.";
+        }
+
+        return null;
+    }
+
+    /** {@inheritDoc} */
+    @Override public void parseArguments(CommandArgIterator argIter) {
+        EncryptionSubcommand cmd = of(argIter.nextArg("Expected encryption action."));
+
+        if (cmd == null)
+            throw new IllegalArgumentException("Expected correct encryption action.");
+
+        switch (cmd) {
+            case GET_MASTER_KEY_NAME:
+                taskName = VisorGetMasterKeyNameTask.class.getName();
+
+                taskArgs = null;
+
+                break;
+
+            case CHANGE_MASTER_KEY:
+                String masterKeyName = argIter.nextArg("Expected master key name.");
+
+                taskName = VisorChangeMasterKeyTask.class.getName();
+
+                taskArgs = masterKeyName;
+
+                break;
+
+            default:
+                throw new IllegalArgumentException("Unknown encryption subcommand: " + cmd);
+        }
+
+        this.cmd = cmd;
+    }
+
+    /** {@inheritDoc} */
+    @Override public Object arg() {
+        return taskArgs;
+    }
+
+    /** {@inheritDoc} */
+    @Override public void printUsage(Logger logger) {
+        Command.usage(logger, "Print the current master key name:", ENCRYPTION, GET_MASTER_KEY_NAME.toString());
+        Command.usage(logger, "Change the master key:", ENCRYPTION, CHANGE_MASTER_KEY.toString(), "newMasterKeyName");
+    }
+
+    /** {@inheritDoc} */
+    @Override public String name() {
+        return ENCRYPTION.toCommandName();
+    }
+}
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/EncryptionCommands.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/EncryptionCommands.java
deleted file mode 100644
index fbae770..0000000
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/EncryptionCommands.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.commandline.encryption;
-
-import java.util.logging.Logger;
-import org.apache.ignite.internal.client.GridClientConfiguration;
-import org.apache.ignite.internal.commandline.AbstractCommand;
-import org.apache.ignite.internal.commandline.CommandArgIterator;
-import org.apache.ignite.internal.commandline.CommandList;
-
-/**
- * Commands related to encryption functions.
- *
- * @see EncryptionSubcommands
- */
-public class EncryptionCommands extends AbstractCommand<EncryptionSubcommands> {
-    /** Subcommand. */
-    private EncryptionSubcommands cmd;
-
-    /** {@inheritDoc} */
-    @Override public Object execute(GridClientConfiguration clientCfg, Logger logger) throws Exception {
-        return cmd.subcommand().execute(clientCfg, logger);
-    }
-
-    /** {@inheritDoc} */
-    @Override public void parseArguments(CommandArgIterator argIter) {
-        EncryptionSubcommands cmd = EncryptionSubcommands.of(argIter.nextArg("Expected encryption action."));
-
-        if (cmd == null)
-            throw new IllegalArgumentException("Expected correct encryption action.");
-
-        cmd.subcommand().parseArguments(argIter);
-
-        if (argIter.hasNextSubArg())
-            throw new IllegalArgumentException("Unexpected argument of --encryption subcommand: " + argIter.peekNextArg());
-
-        this.cmd = cmd;
-    }
-
-    /** {@inheritDoc} */
-    @Override public EncryptionSubcommands arg() {
-        return cmd;
-    }
-
-    /** {@inheritDoc} */
-    @Override public void printUsage(Logger logger) {
-        for (EncryptionSubcommands cmd : EncryptionSubcommands.values())
-            cmd.subcommand().printUsage(logger);
-    }
-
-    /** {@inheritDoc} */
-    @Override public String name() {
-        return CommandList.ENCRYPTION.toCommandName();
-    }
-}
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/diagnostic/DiagnosticSubCommand.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/EncryptionSubcommand.java
similarity index 54%
copy from modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/diagnostic/DiagnosticSubCommand.java
copy to modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/EncryptionSubcommand.java
index aac7ddb..3c47c02 100644
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/diagnostic/DiagnosticSubCommand.java
+++ b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/EncryptionSubcommand.java
@@ -15,54 +15,36 @@
  * limitations under the License.
  */
 
-package org.apache.ignite.internal.commandline.diagnostic;
+package org.apache.ignite.internal.commandline.encryption;
 
-import org.apache.ignite.internal.commandline.Command;
+import org.jetbrains.annotations.Nullable;
 
 /**
+ * Set of encryption subcommands.
  *
+ * @see EncryptionCommand
  */
-public enum DiagnosticSubCommand {
-    /** */
-    HELP("help", null),
+public enum EncryptionSubcommand {
+    /** Subcommand to get the current master key name. */
+    GET_MASTER_KEY_NAME("get_master_key_name"),
 
-    /** */
-    PAGE_LOCKS("pageLocks", new PageLocksCommand()),
+    /** Subcommand to change the master key. */
+    CHANGE_MASTER_KEY("change_master_key");
 
-    /** */
-    CONNECTIVITY("connectivity", new ConnectivityCommand());
-
-    /** Diagnostic command name. */
+    /** Subcommand name. */
     private final String name;
 
-    /** Command instance for certain type. */
-    private final Command command;
-
-    /**
-     * @param name Command name.
-     * @param command Command handler.
-     */
-    DiagnosticSubCommand(
-        String name,
-        Command command
-    ) {
+    /** @param name Encryption subcommand name. */
+    EncryptionSubcommand(String name) {
         this.name = name;
-        this.command = command;
-    }
-
-    /**
-     * @return Subcommand realization.
-     */
-    public Command subcommand() {
-        return command;
     }
 
     /**
-     * @param text Command text.
-     * @return Command for the text.
+     * @param text Command text (case insensitive).
+     * @return Command for the text. {@code Null} if there is no such command.
      */
-    public static DiagnosticSubCommand of(String text) {
-        for (DiagnosticSubCommand cmd : DiagnosticSubCommand.values()) {
+     @Nullable public static EncryptionSubcommand of(String text) {
+        for (EncryptionSubcommand cmd : EncryptionSubcommand.values()) {
             if (cmd.name.equalsIgnoreCase(text))
                 return cmd;
         }
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/EncryptionSubcommands.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/EncryptionSubcommands.java
deleted file mode 100644
index c8d0941..0000000
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/EncryptionSubcommands.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.commandline.encryption;
-
-import org.apache.ignite.internal.commandline.Command;
-import org.jetbrains.annotations.Nullable;
-
-/**
- * Set of encryption subcommands.
- *
- * @see EncryptionCommands
- */
-public enum EncryptionSubcommands {
-    /** Subcommand to get the current master key name. */
-    GET_MASTER_KEY_NAME("get_master_key_name", new GetMasterKeyNameCommand()),
-
-    /** Subcommand to change the master key. */
-    CHANGE_MASTER_KEY("change_master_key", new ChangeMasterKeyCommand()),
-
-    /** Subcommand to change the current encryption key for specified cache group. */
-    CHANGE_CACHE_GROUP_KEY("change_cache_key", new ChangeCacheGroupKeyCommand()),
-
-    /** Subcommand to view current encryption key IDs of the cache group. */
-    CACHE_GROUP_KEY_IDS("cache_key_ids", new CacheGroupEncryptionCommand.CacheKeyIds()),
-
-    /** Subcommand to display re-encryption status of the cache group. */
-    REENCRYPTION_STATUS("reencryption_status", new CacheGroupEncryptionCommand.ReencryptionStatus()),
-
-    /** Subcommand to suspend re-encryption of the cache group. */
-    REENCRYPTION_SUSPEND("suspend_reencryption", new CacheGroupEncryptionCommand.SuspendReencryption()),
-
-    /** Subcommand to resume re-encryption of the cache group. */
-    REENCRYPTION_RESUME("resume_reencryption", new CacheGroupEncryptionCommand.ResumeReencryption()),
-
-    /** Subcommand to view/change cache group re-encryption rate limit. */
-    REENCRYPTION_RATE("reencryption_rate_limit", new ReencryptionRateCommand());
-
-    /** Subcommand name. */
-    private final String name;
-
-    /** Command. */
-    private final Command<?> cmd;
-
-    /**
-     * @param name Encryption subcommand name.
-     * @param cmd Command implementation.
-     */
-    EncryptionSubcommands(String name, Command<?> cmd) {
-        this.name = name;
-        this.cmd = cmd;
-    }
-
-    /**
-     * @return Name.
-     */
-    public String text() {
-        return name;
-    }
-
-    /**
-     * @return Cache subcommand implementation.
-     */
-    public Command<?> subcommand() {
-        return cmd;
-    }
-
-    /**
-     * @param text Command text (case insensitive).
-     * @return Command for the text. {@code Null} if there is no such command.
-     */
-     @Nullable public static EncryptionSubcommands of(String text) {
-        for (EncryptionSubcommands cmd : values()) {
-            if (cmd.name.equalsIgnoreCase(text))
-                return cmd;
-        }
-
-        return null;
-    }
-
-    /** {@inheritDoc} */
-    @Override public String toString() {
-        return name;
-    }
-}
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/GetMasterKeyNameCommand.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/GetMasterKeyNameCommand.java
deleted file mode 100644
index 02bb8ed..0000000
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/GetMasterKeyNameCommand.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.commandline.encryption;
-
-import java.util.logging.Logger;
-import org.apache.ignite.internal.client.GridClient;
-import org.apache.ignite.internal.client.GridClientConfiguration;
-import org.apache.ignite.internal.commandline.AbstractCommand;
-import org.apache.ignite.internal.commandline.Command;
-import org.apache.ignite.internal.commandline.CommandLogger;
-import org.apache.ignite.internal.visor.encryption.VisorGetMasterKeyNameTask;
-
-import static org.apache.ignite.internal.commandline.CommandList.ENCRYPTION;
-import static org.apache.ignite.internal.commandline.TaskExecutor.executeTaskByNameOnNode;
-import static org.apache.ignite.internal.commandline.encryption.EncryptionSubcommands.GET_MASTER_KEY_NAME;
-
-/**
- * Get master key name encryption subcommand.
- */
-public class GetMasterKeyNameCommand extends AbstractCommand<Void> {
-    /** {@inheritDoc} */
-    @Override public Object execute(GridClientConfiguration clientCfg, Logger log) throws Exception {
-        try (GridClient client = Command.startClient(clientCfg)) {
-            String masterKeyName = executeTaskByNameOnNode(
-                client,
-                VisorGetMasterKeyNameTask.class.getName(),
-                null,
-                null,
-                clientCfg
-            );
-
-            log.info(masterKeyName);
-
-            return masterKeyName;
-        }
-        catch (Throwable e) {
-            log.severe("Failed to perform operation.");
-            log.severe(CommandLogger.errorMessage(e));
-
-            throw e;
-        }
-    }
-
-    /** {@inheritDoc} */
-    @Override public Void arg() {
-        return null;
-    }
-
-    /** {@inheritDoc} */
-    @Override public void printUsage(Logger log) {
-        Command.usage(log, "Print the current master key name:", ENCRYPTION, GET_MASTER_KEY_NAME.toString());
-    }
-
-    /** {@inheritDoc} */
-    @Override public String name() {
-        return GET_MASTER_KEY_NAME.text().toUpperCase();
-    }
-}
diff --git a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/ReencryptionRateCommand.java b/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/ReencryptionRateCommand.java
deleted file mode 100644
index 7eb9f79..0000000
--- a/modules/control-utility/src/main/java/org/apache/ignite/internal/commandline/encryption/ReencryptionRateCommand.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.commandline.encryption;
-
-import java.util.Map;
-import java.util.UUID;
-import java.util.logging.Logger;
-import org.apache.ignite.IgniteException;
-import org.apache.ignite.internal.client.GridClient;
-import org.apache.ignite.internal.client.GridClientConfiguration;
-import org.apache.ignite.internal.commandline.AbstractCommand;
-import org.apache.ignite.internal.commandline.Command;
-import org.apache.ignite.internal.commandline.CommandArgIterator;
-import org.apache.ignite.internal.commandline.CommandLogger;
-import org.apache.ignite.internal.visor.encryption.VisorCacheGroupEncryptionTaskResult;
-import org.apache.ignite.internal.visor.encryption.VisorReencryptionRateTask;
-import org.apache.ignite.internal.visor.encryption.VisorReencryptionRateTaskArg;
-
-import static java.util.Collections.singletonMap;
-import static org.apache.ignite.internal.commandline.CommandList.ENCRYPTION;
-import static org.apache.ignite.internal.commandline.CommandLogger.DOUBLE_INDENT;
-import static org.apache.ignite.internal.commandline.CommandLogger.INDENT;
-import static org.apache.ignite.internal.commandline.CommandLogger.optional;
-import static org.apache.ignite.internal.commandline.TaskExecutor.BROADCAST_UUID;
-import static org.apache.ignite.internal.commandline.TaskExecutor.executeTaskByNameOnNode;
-import static org.apache.ignite.internal.commandline.encryption.EncryptionSubcommands.REENCRYPTION_RATE;
-
-/**
- * View/change cache group re-encryption rate limit subcommand.
- */
-public class ReencryptionRateCommand extends AbstractCommand<VisorReencryptionRateTaskArg> {
-    /** Re-encryption rate task argument. */
-    private VisorReencryptionRateTaskArg taskArg;
-
-    /** {@inheritDoc} */
-    @Override public Object execute(GridClientConfiguration clientCfg, Logger log) throws Exception {
-        try (GridClient client = Command.startClient(clientCfg)) {
-            VisorCacheGroupEncryptionTaskResult<Double> res = executeTaskByNameOnNode(
-                client,
-                VisorReencryptionRateTask.class.getName(),
-                taskArg,
-                BROADCAST_UUID,
-                clientCfg
-            );
-
-            Map<UUID, IgniteException> exceptions = res.exceptions();
-
-            for (Map.Entry<UUID, IgniteException> entry : exceptions.entrySet()) {
-                log.info(INDENT + "Node " + entry.getKey() + ":");
-                log.info(DOUBLE_INDENT +
-                    "failed to get/set re-encryption rate limit: " + entry.getValue().getMessage());
-            }
-
-            Map<UUID, Double> results = res.results();
-            boolean read = taskArg.rate() == null;
-
-            for (Map.Entry<UUID, Double> entry : results.entrySet()) {
-                log.info(INDENT + "Node " + entry.getKey() + ":");
-
-                double rateLimit = read ? entry.getValue() : taskArg.rate();
-
-                if (rateLimit == 0)
-                    log.info(DOUBLE_INDENT + "re-encryption rate is not limited.");
-                else {
-                    log.info(String.format("%sre-encryption rate %s limited to %.2f MB/s.",
-                        DOUBLE_INDENT, (read ? "is" : "has been"), rateLimit));
-                }
-            }
-
-            if (read)
-                return null;
-
-            log.info("");
-            log.info("Note: the changed value of the re-encryption rate limit is not persisted. " +
-                "When the node is restarted, the value will be set from the configuration.");
-            log.info("");
-
-            return null;
-        }
-        catch (Throwable e) {
-            log.severe("Failed to perform operation.");
-            log.severe(CommandLogger.errorMessage(e));
-
-            throw e;
-        }
-    }
-
-    /** {@inheritDoc} */
-    @Override public VisorReencryptionRateTaskArg arg() {
-        return taskArg;
-    }
-
-    /** {@inheritDoc} */
-    @Override public void parseArguments(CommandArgIterator argIter) {
-        Double rateLimit = null;
-
-        while (argIter.hasNextSubArg()) {
-            String rateLimitArg = argIter.nextArg("Expected decimal value for re-encryption rate.");
-
-            try {
-                rateLimit = Double.parseDouble(rateLimitArg);
-            }
-            catch (NumberFormatException e) {
-                throw new IllegalArgumentException("Failed to parse command argument. Decimal value expected.", e);
-            }
-        }
-
-        taskArg = new VisorReencryptionRateTaskArg(rateLimit);
-    }
-
-    /** {@inheritDoc} */
-    @Override public void printUsage(Logger log) {
-        Command.usage(log, "View/change re-encryption rate limit:", ENCRYPTION,
-            singletonMap("new_limit", "Decimal value to change re-encryption rate limit (MB/s)."),
-            REENCRYPTION_RATE.toString(), optional("new_limit"));
-    }
-
-    /** {@inheritDoc} */
-    @Override public String name() {
-        return REENCRYPTION_RATE.text().toUpperCase();
-    }
-}
diff --git a/modules/control-utility/src/test/java/org/apache/ignite/internal/commandline/CommandHandlerParsingTest.java b/modules/control-utility/src/test/java/org/apache/ignite/internal/commandline/CommandHandlerParsingTest.java
index b1ab5f8..b3e51cd 100644
--- a/modules/control-utility/src/test/java/org/apache/ignite/internal/commandline/CommandHandlerParsingTest.java
+++ b/modules/control-utility/src/test/java/org/apache/ignite/internal/commandline/CommandHandlerParsingTest.java
@@ -1033,7 +1033,6 @@ public class CommandHandlerParsingTest {
             cmd == CommandList.WARM_UP ||
             cmd == CommandList.PROPERTY ||
             cmd == CommandList.SYSTEM_VIEW ||
-            cmd == CommandList.METRIC ||
-            cmd == CommandList.DEFRAGMENTATION;
+            cmd == CommandList.METRIC;
     }
 }
diff --git a/modules/control-utility/src/test/java/org/apache/ignite/testsuites/IgniteControlUtilityTestSuite.java b/modules/control-utility/src/test/java/org/apache/ignite/testsuites/IgniteControlUtilityTestSuite.java
index 6466487..dbccfa5 100644
--- a/modules/control-utility/src/test/java/org/apache/ignite/testsuites/IgniteControlUtilityTestSuite.java
+++ b/modules/control-utility/src/test/java/org/apache/ignite/testsuites/IgniteControlUtilityTestSuite.java
@@ -25,7 +25,6 @@ import org.apache.ignite.util.GridCommandHandlerBrokenIndexTest;
 import org.apache.ignite.util.GridCommandHandlerCheckIndexesInlineSizeTest;
 import org.apache.ignite.util.GridCommandHandlerClusterByClassTest;
 import org.apache.ignite.util.GridCommandHandlerClusterByClassWithSSLTest;
-import org.apache.ignite.util.GridCommandHandlerDefragmentationTest;
 import org.apache.ignite.util.GridCommandHandlerIndexForceRebuildTest;
 import org.apache.ignite.util.GridCommandHandlerIndexListTest;
 import org.apache.ignite.util.GridCommandHandlerIndexRebuildStatusTest;
@@ -34,7 +33,6 @@ import org.apache.ignite.util.GridCommandHandlerIndexingClusterByClassTest;
 import org.apache.ignite.util.GridCommandHandlerIndexingClusterByClassWithSSLTest;
 import org.apache.ignite.util.GridCommandHandlerIndexingTest;
 import org.apache.ignite.util.GridCommandHandlerIndexingWithSSLTest;
-import org.apache.ignite.util.GridCommandHandlerInterruptCommandTest;
 import org.apache.ignite.util.GridCommandHandlerMetadataTest;
 import org.apache.ignite.util.GridCommandHandlerPropertiesTest;
 import org.apache.ignite.util.GridCommandHandlerSslTest;
@@ -69,7 +67,6 @@ import org.junit.runners.Suite;
     GridCommandHandlerIndexingClusterByClassWithSSLTest.class,
     GridCommandHandlerIndexingCheckSizeTest.class,
     GridCommandHandlerCheckIndexesInlineSizeTest.class,
-    GridCommandHandlerInterruptCommandTest.class,
     GridCommandHandlerMetadataTest.class,
 
     KillCommandsCommandShTest.class,
@@ -85,8 +82,6 @@ import org.junit.runners.Suite;
 
     GridCommandHandlerPropertiesTest.class,
 
-    GridCommandHandlerDefragmentationTest.class,
-
     SystemViewCommandTest.class,
     MetricCommandTest.class
 })
diff --git a/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerAbstractTest.java b/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerAbstractTest.java
index 8dfbb37..bc31ce7 100644
--- a/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerAbstractTest.java
+++ b/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerAbstractTest.java
@@ -40,7 +40,6 @@ import org.apache.ignite.configuration.CacheConfiguration;
 import org.apache.ignite.configuration.ConnectorConfiguration;
 import org.apache.ignite.configuration.DataRegionConfiguration;
 import org.apache.ignite.configuration.DataStorageConfiguration;
-import org.apache.ignite.configuration.EncryptionConfiguration;
 import org.apache.ignite.configuration.IgniteConfiguration;
 import org.apache.ignite.configuration.WALMode;
 import org.apache.ignite.internal.IgniteEx;
@@ -69,8 +68,6 @@ import static java.util.Arrays.asList;
 import static java.util.Objects.nonNull;
 import static org.apache.ignite.IgniteSystemProperties.IGNITE_ENABLE_EXPERIMENTAL_COMMAND;
 import static org.apache.ignite.configuration.DataStorageConfiguration.DFLT_CHECKPOINT_FREQ;
-import static org.apache.ignite.configuration.EncryptionConfiguration.DFLT_REENCRYPTION_BATCH_SIZE;
-import static org.apache.ignite.configuration.EncryptionConfiguration.DFLT_REENCRYPTION_RATE_MBPS;
 import static org.apache.ignite.internal.encryption.AbstractEncryptionTest.KEYSTORE_PASSWORD;
 import static org.apache.ignite.internal.encryption.AbstractEncryptionTest.KEYSTORE_PATH;
 import static org.apache.ignite.internal.processors.cache.verify.VerifyBackupPartitionsDumpTask.IDLE_DUMP_FILE_PREFIX;
@@ -88,9 +85,6 @@ public abstract class GridCommandHandlerAbstractTest extends GridCommonAbstractT
     /** */
     protected static final String CLIENT_NODE_NAME_PREFIX = "client";
 
-    /** */
-    protected static final String DAEMON_NODE_NAME_PREFIX = "daemon";
-
     /** Option is used for auto confirmation. */
     protected static final String CMD_AUTO_CONFIRMATION = "--yes";
 
@@ -119,13 +113,7 @@ public abstract class GridCommandHandlerAbstractTest extends GridCommonAbstractT
     protected boolean autoConfirmation = true;
 
     /** {@code True} if encription is enabled. */
-    protected boolean encryptionEnabled;
-
-    /**  Re-encryption rate limit in megabytes per second. */
-    protected double reencryptSpeed = DFLT_REENCRYPTION_RATE_MBPS;
-
-    /** The number of pages that is scanned during re-encryption under checkpoint lock. */
-    protected int reencryptBatchSize = DFLT_REENCRYPTION_BATCH_SIZE;
+    protected boolean encriptionEnabled;
 
     /** Last operation result. */
     protected Object lastOperationResult;
@@ -183,7 +171,7 @@ public abstract class GridCommandHandlerAbstractTest extends GridCommonAbstractT
 
         testOut.reset();
 
-        encryptionEnabled = false;
+        encriptionEnabled = false;
 
         GridClientFactory.stopAll(false);
     }
@@ -245,22 +233,13 @@ public abstract class GridCommandHandlerAbstractTest extends GridCommonAbstractT
 
         cfg.setClientMode(igniteInstanceName.startsWith(CLIENT_NODE_NAME_PREFIX));
 
-        cfg.setDaemon(igniteInstanceName.startsWith(DAEMON_NODE_NAME_PREFIX));
-
-        if (encryptionEnabled) {
+        if (encriptionEnabled) {
             KeystoreEncryptionSpi encSpi = new KeystoreEncryptionSpi();
 
             encSpi.setKeyStorePath(KEYSTORE_PATH);
             encSpi.setKeyStorePassword(KEYSTORE_PASSWORD.toCharArray());
 
             cfg.setEncryptionSpi(encSpi);
-
-            EncryptionConfiguration encCfg = new EncryptionConfiguration();
-
-            encCfg.setReencryptionRateLimit(reencryptSpeed);
-            encCfg.setReencryptionBatchSize(reencryptBatchSize);
-
-            dsCfg.setEncryptionConfiguration(encCfg);
         }
 
         return cfg;
@@ -439,8 +418,7 @@ public abstract class GridCommandHandlerAbstractTest extends GridCommonAbstractT
 
         CacheConfiguration<?, ?> ccfg = new CacheConfiguration<>(DEFAULT_CACHE_NAME)
             .setAffinity(new RendezvousAffinityFunction(false, partitions))
-            .setBackups(1)
-            .setEncryptionEnabled(encryptionEnabled);
+            .setBackups(1);
 
         if (filter != null)
             ccfg.setNodeFilter(filter);
diff --git a/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerDefragmentationTest.java b/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerDefragmentationTest.java
deleted file mode 100644
index c2ea3c2..0000000
--- a/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerDefragmentationTest.java
+++ /dev/null
@@ -1,470 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.util;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.CountDownLatch;
-import java.util.function.UnaryOperator;
-import java.util.logging.Formatter;
-import java.util.logging.LogRecord;
-import java.util.logging.Logger;
-import java.util.logging.StreamHandler;
-import java.util.regex.Pattern;
-import org.apache.ignite.Ignite;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.cluster.ClusterState;
-import org.apache.ignite.configuration.DataStorageConfiguration;
-import org.apache.ignite.configuration.IgniteConfiguration;
-import org.apache.ignite.internal.IgniteEx;
-import org.apache.ignite.internal.IgniteInternalFuture;
-import org.apache.ignite.internal.IgniteNodeAttributes;
-import org.apache.ignite.internal.commandline.CommandHandler;
-import org.apache.ignite.internal.processors.cache.persistence.GridCacheDatabaseSharedManager;
-import org.apache.ignite.internal.processors.cache.persistence.defragmentation.maintenance.DefragmentationParameters;
-import org.apache.ignite.internal.processors.cache.persistence.file.FileIOFactory;
-import org.apache.ignite.maintenance.MaintenanceTask;
-import org.apache.ignite.testframework.GridTestUtils;
-import org.apache.ignite.testframework.ListeningTestLogger;
-import org.apache.ignite.testframework.LogListener;
-import org.junit.Test;
-
-import static org.apache.ignite.cluster.ClusterState.ACTIVE;
-import static org.apache.ignite.internal.commandline.CommandHandler.EXIT_CODE_INVALID_ARGUMENTS;
-import static org.apache.ignite.internal.commandline.CommandHandler.EXIT_CODE_OK;
-
-/** */
-public class GridCommandHandlerDefragmentationTest extends GridCommandHandlerClusterPerMethodAbstractTest {
-    /** */
-    private static CountDownLatch blockCdl;
-
-    /** */
-    private static CountDownLatch waitCdl;
-
-    /** {@inheritDoc} */
-    @Override protected void beforeTest() throws Exception {
-        super.beforeTest();
-
-        stopAllGrids();
-
-        cleanPersistenceDir();
-    }
-
-    /** {@inheritDoc} */
-    @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
-        IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
-
-        cfg.getDataStorageConfiguration().setWalSegmentSize(512 * 1024).setWalSegments(3);
-
-        return cfg;
-    }
-
-    /**
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testDefragmentationSchedule() throws Exception {
-        Ignite ignite = startGrids(2);
-
-        ignite.cluster().state(ACTIVE);
-
-        assertEquals(EXIT_CODE_INVALID_ARGUMENTS, execute("--defragmentation", "schedule"));
-
-        String grid0ConsId = grid(0).configuration().getConsistentId().toString();
-        String grid1ConsId = grid(1).configuration().getConsistentId().toString();
-
-        ListeningTestLogger testLog = new ListeningTestLogger();
-
-        CommandHandler cmd = createCommandHandler(testLog);
-
-        LogListener logLsnr = LogListener.matches("Scheduling completed successfully.").build();
-
-        testLog.registerListener(logLsnr);
-
-        assertEquals(EXIT_CODE_OK, execute(
-            cmd,
-            "--defragmentation",
-            "schedule",
-            "--nodes",
-            grid0ConsId
-        ));
-
-        assertTrue(logLsnr.check());
-
-        MaintenanceTask mntcTask = DefragmentationParameters.toStore(Collections.emptyList());
-
-        assertNotNull(grid(0).context().maintenanceRegistry().registerMaintenanceTask(mntcTask));
-        assertNull(grid(1).context().maintenanceRegistry().registerMaintenanceTask(mntcTask));
-
-        stopGrid(0);
-        startGrid(0);
-
-        logLsnr = LogListener.matches("Node is already in Maintenance Mode").build();
-
-        testLog.clearListeners();
-
-        testLog.registerListener(logLsnr);
-
-        assertEquals(EXIT_CODE_OK, execute(
-            cmd,
-            "--defragmentation",
-            "schedule",
-            "--nodes",
-            grid0ConsId
-        ));
-
-        assertTrue(logLsnr.check());
-
-        stopGrid(0);
-        startGrid(0);
-
-        stopGrid(1);
-        startGrid(1);
-
-        stopAllGrids();
-
-        startGrids(2);
-
-        logLsnr = LogListener.matches("Scheduling completed successfully.").times(2).build();
-
-        testLog.clearListeners();
-
-        testLog.registerListener(logLsnr);
-
-        assertEquals(EXIT_CODE_OK, execute(
-            cmd,
-            "--defragmentation",
-            "schedule",
-            "--nodes",
-            String.join(",", grid0ConsId, grid1ConsId)
-        ));
-
-        assertTrue(logLsnr.check());
-    }
-
-    /**
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testDefragmentationCancel() throws Exception {
-        Ignite ignite = startGrids(2);
-
-        ignite.cluster().state(ACTIVE);
-
-        String grid0ConsId = grid(0).configuration().getConsistentId().toString();
-
-        ListeningTestLogger testLog = new ListeningTestLogger();
-
-        CommandHandler cmd = createCommandHandler(testLog);
-
-        assertEquals(EXIT_CODE_OK, execute(
-            cmd,
-            "--defragmentation",
-            "schedule",
-            "--nodes",
-            grid0ConsId
-        ));
-
-        LogListener logLsnr = LogListener.matches("Scheduled defragmentation task cancelled successfully.").atLeast(1).build();
-
-        testLog.registerListener(logLsnr);
-
-        assertEquals(EXIT_CODE_OK, execute(
-            cmd,
-            "--port",
-            grid(0).localNode().attribute(IgniteNodeAttributes.ATTR_REST_TCP_PORT).toString(),
-            "--defragmentation",
-            "cancel"
-        ));
-
-        assertTrue(logLsnr.check());
-
-        testLog.clearListeners();
-
-        logLsnr = LogListener.matches("Scheduled defragmentation task is not found.").build();
-
-        testLog.registerListener(logLsnr);
-
-        assertEquals(EXIT_CODE_OK, execute(
-            cmd,
-            "--port",
-            grid(1).localNode().attribute(IgniteNodeAttributes.ATTR_REST_TCP_PORT).toString(),
-            "--defragmentation",
-            "cancel"
-        ));
-
-        assertTrue(logLsnr.check());
-    }
-
-    /**
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testDefragmentationCancelInProgress() throws Exception {
-        IgniteEx ig = startGrid(0);
-
-        ig.cluster().state(ClusterState.ACTIVE);
-
-        IgniteCache<Object, Object> cache = ig.getOrCreateCache(DEFAULT_CACHE_NAME);
-
-        for (int i = 0; i < 1024; i++)
-            cache.put(i, i);
-
-        forceCheckpoint(ig);
-
-        String grid0ConsId = ig.configuration().getConsistentId().toString();
-
-        ListeningTestLogger testLog = new ListeningTestLogger();
-
-        CommandHandler cmd = createCommandHandler(testLog);
-
-        assertEquals(EXIT_CODE_OK, execute(
-            cmd,
-            "--defragmentation",
-            "schedule",
-            "--nodes",
-            grid0ConsId
-        ));
-
-        String port = grid(0).localNode().attribute(IgniteNodeAttributes.ATTR_REST_TCP_PORT).toString();
-
-        stopGrid(0);
-
-        blockCdl = new CountDownLatch(128);
-
-        UnaryOperator<IgniteConfiguration> cfgOp = cfg -> {
-            DataStorageConfiguration dsCfg = cfg.getDataStorageConfiguration();
-
-            FileIOFactory delegate = dsCfg.getFileIOFactory();
-
-            dsCfg.setFileIOFactory((file, modes) -> {
-                if (file.getName().contains("dfrg")) {
-                    if (blockCdl.getCount() == 0) {
-                        try {
-                            // Slow down defragmentation process.
-                            // This'll be enough for the test since we have, like, 900 partitions left.
-                            Thread.sleep(100);
-                        }
-                        catch (InterruptedException ignore) {
-                            // No-op.
-                        }
-                    }
-                    else
-                        blockCdl.countDown();
-                }
-
-                return delegate.create(file, modes);
-            });
-
-            return cfg;
-        };
-
-        IgniteInternalFuture<?> fut = GridTestUtils.runAsync(() -> {
-            try {
-                startGrid(0, cfgOp);
-            }
-            catch (Exception e) {
-                // No-op.
-                throw new RuntimeException(e);
-            }
-        });
-
-        blockCdl.await();
-
-        LogListener logLsnr = LogListener.matches("Defragmentation cancelled successfully.").build();
-
-        testLog.registerListener(logLsnr);
-
-        assertEquals(EXIT_CODE_OK, execute(
-            cmd,
-            "--port",
-            port,
-            "--defragmentation",
-            "cancel"
-        ));
-
-        assertTrue(logLsnr.check());
-
-        fut.get();
-
-        testLog.clearListeners();
-
-        logLsnr = LogListener.matches("Defragmentation is already completed or has been cancelled previously.").build();
-
-        testLog.registerListener(logLsnr);
-
-        assertEquals(EXIT_CODE_OK, execute(
-            cmd,
-            "--port",
-            port,
-            "--defragmentation",
-            "cancel"
-        ));
-
-        assertTrue(logLsnr.check());
-    }
-
-    /**
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testDefragmentationStatus() throws Exception {
-        IgniteEx ig = startGrid(0);
-
-        ig.cluster().state(ClusterState.ACTIVE);
-
-        ig.getOrCreateCache(DEFAULT_CACHE_NAME + "1");
-
-        IgniteCache<Object, Object> cache = ig.getOrCreateCache(DEFAULT_CACHE_NAME + "2");
-
-        ig.getOrCreateCache(DEFAULT_CACHE_NAME + "3");
-
-        for (int i = 0; i < 1024; i++)
-            cache.put(i, i);
-
-        forceCheckpoint(ig);
-
-        String grid0ConsId = ig.configuration().getConsistentId().toString();
-
-        ListeningTestLogger testLog = new ListeningTestLogger();
-
-        CommandHandler cmd = createCommandHandler(testLog);
-
-        assertEquals(EXIT_CODE_OK, execute(
-            cmd,
-            "--defragmentation",
-            "schedule",
-            "--nodes",
-            grid0ConsId
-        ));
-
-        String port = grid(0).localNode().attribute(IgniteNodeAttributes.ATTR_REST_TCP_PORT).toString();
-
-        stopGrid(0);
-
-        blockCdl = new CountDownLatch(128);
-        waitCdl = new CountDownLatch(1);
-
-        UnaryOperator<IgniteConfiguration> cfgOp = cfg -> {
-            DataStorageConfiguration dsCfg = cfg.getDataStorageConfiguration();
-
-            FileIOFactory delegate = dsCfg.getFileIOFactory();
-
-            dsCfg.setFileIOFactory((file, modes) -> {
-                if (file.getName().contains("dfrg")) {
-                    if (blockCdl.getCount() == 0) {
-                        try {
-                            waitCdl.await();
-                        }
-                        catch (InterruptedException ignore) {
-                            // No-op.
-                        }
-                    }
-                    else
-                        blockCdl.countDown();
-                }
-
-                return delegate.create(file, modes);
-            });
-
-            return cfg;
-        };
-
-        IgniteInternalFuture<?> fut = GridTestUtils.runAsync(() -> {
-            try {
-                startGrid(0, cfgOp);
-            }
-            catch (Exception e) {
-                // No-op.
-                throw new RuntimeException(e);
-            }
-        });
-
-        blockCdl.await();
-
-        List<LogListener> logLsnrs = Arrays.asList(
-            LogListener.matches("default1 - size before/after: 0MB/0MB").build(),
-            LogListener.matches("default2 - partitions processed/all:").build(),
-            LogListener.matches("Awaiting defragmentation: default3").build()
-        );
-
-        for (LogListener logLsnr : logLsnrs)
-            testLog.registerListener(logLsnr);
-
-        assertEquals(EXIT_CODE_OK, execute(
-            cmd,
-            "--port",
-            port,
-            "--defragmentation",
-            "status"
-        ));
-
-        waitCdl.countDown();
-
-        for (LogListener logLsnr : logLsnrs)
-            assertTrue(logLsnr.check());
-
-        fut.get();
-
-        ((GridCacheDatabaseSharedManager)grid(0).context().cache().context().database())
-            .defragmentationManager()
-            .completionFuture()
-            .get();
-
-        testLog.clearListeners();
-
-        logLsnrs = Arrays.asList(
-            LogListener.matches("default1 - size before/after: 0MB/0MB").build(),
-            LogListener.matches(Pattern.compile("default2 - size before/after: (\\S+)/\\1")).build(),
-            LogListener.matches("default3 - size before/after: 0MB/0MB").build()
-        );
-
-        for (LogListener logLsnr : logLsnrs)
-            testLog.registerListener(logLsnr);
-
-        assertEquals(EXIT_CODE_OK, execute(
-            cmd,
-            "--port",
-            port,
-            "--defragmentation",
-            "status"
-        ));
-
-        for (LogListener logLsnr : logLsnrs)
-            assertTrue(logLsnr.check());
-    }
-
-    /** */
-    private CommandHandler createCommandHandler(ListeningTestLogger testLog) {
-        Logger log = CommandHandler.initLogger(null);
-
-        log.addHandler(new StreamHandler(System.out, new Formatter() {
-            /** {@inheritDoc} */
-            @Override public String format(LogRecord record) {
-                String msg = record.getMessage();
-
-                testLog.info(msg);
-
-                return msg + "\n";
-            }
-        }));
-
-        return new CommandHandler(log);
-    }
-}
diff --git a/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerIndexingClusterByClassTest.java b/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerIndexingClusterByClassTest.java
index fc5a465..7d889c7 100644
--- a/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerIndexingClusterByClassTest.java
+++ b/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerIndexingClusterByClassTest.java
@@ -17,8 +17,6 @@
 
 package org.apache.ignite.util;
 
-import org.apache.ignite.failure.FailureHandler;
-import org.apache.ignite.failure.StopNodeFailureHandler;
 import org.junit.Test;
 
 import static org.apache.ignite.internal.commandline.CommandHandler.EXIT_CODE_OK;
@@ -44,11 +42,6 @@ public class GridCommandHandlerIndexingClusterByClassTest extends GridCommandHan
         createAndFillCache(client, CACHE_NAME, GROUP_NAME);
     }
 
-    /** {@inheritDoc} */
-    @Override protected FailureHandler getFailureHandler(String igniteInstanceName) {
-        return new StopNodeFailureHandler();
-    }
-
     /**
      * Tests --cache check_index_inline_sizes works in case of all indexes have the same inline size.
      */
diff --git a/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerInterruptCommandTest.java b/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerInterruptCommandTest.java
deleted file mode 100644
index d750439..0000000
--- a/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerInterruptCommandTest.java
+++ /dev/null
@@ -1,326 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.util;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.LinkedHashMap;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.ignite.IgniteDataStreamer;
-import org.apache.ignite.IgniteException;
-import org.apache.ignite.cache.QueryEntity;
-import org.apache.ignite.cache.QueryIndex;
-import org.apache.ignite.cache.QueryIndexType;
-import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.configuration.DataRegionConfiguration;
-import org.apache.ignite.configuration.DataStorageConfiguration;
-import org.apache.ignite.configuration.IgniteConfiguration;
-import org.apache.ignite.events.DeploymentEvent;
-import org.apache.ignite.events.EventType;
-import org.apache.ignite.internal.IgniteEx;
-import org.apache.ignite.internal.IgniteInternalFuture;
-import org.apache.ignite.internal.util.typedef.internal.S;
-import org.apache.ignite.internal.visor.verify.ValidateIndexesClosure;
-import org.apache.ignite.testframework.GridTestUtils;
-import org.apache.ignite.testframework.ListeningTestLogger;
-import org.apache.ignite.testframework.LogListener;
-import org.junit.Test;
-
-import static org.apache.ignite.internal.commandline.CommandHandler.EXIT_CODE_UNEXPECTED_ERROR;
-
-/**
- * Checks cancel of execution validate_indexes command.
- */
-public class GridCommandHandlerInterruptCommandTest extends GridCommandHandlerAbstractTest {
-    /** Load loop cycles. */
-    private static final int LOAD_LOOP = 500_000;
-
-    /** Idle verify task name. */
-    private static final String IDLE_VERIFY_TASK_V2 = "org.apache.ignite.internal.visor.verify.VisorIdleVerifyTaskV2";
-
-    /** Validate index task name. */
-    private static final String VALIDATE_INDEX_TASK = "org.apache.ignite.internal.visor.verify.VisorValidateIndexesTask";
-
-    /** Log listener. */
-    private ListeningTestLogger lnsrLog;
-
-    /** {@inheritDoc} */
-    @Override protected void beforeTest() throws Exception {
-        super.beforeTest();
-
-        cleanPersistenceDir();
-    }
-
-    /** {@inheritDoc} */
-    @Override protected void afterTest() throws Exception {
-        stopAllGrids();
-
-        cleanPersistenceDir();
-
-        super.afterTest();
-    }
-
-    /** {@inheritDoc} */
-    @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
-        return super.getConfiguration(igniteInstanceName)
-            .setGridLogger(lnsrLog)
-            .setDataStorageConfiguration(new DataStorageConfiguration()
-                .setDefaultDataRegionConfiguration(new DataRegionConfiguration()
-                    .setPersistenceEnabled(true)
-                    .setInitialSize(200L * 1024 * 1024)
-                    .setMaxSize(200L * 1024 * 1024)
-                )
-            )
-            .setIncludeEventTypes(EventType.EVT_TASK_DEPLOYED)
-            .setCacheConfiguration(new CacheConfiguration<Integer, UserValue>(DEFAULT_CACHE_NAME)
-                .setName(DEFAULT_CACHE_NAME)
-                .setQueryEntities(Collections.singleton(createQueryEntity())));
-    }
-
-    /**
-     * Creates predifened query entity.
-     *
-     * @return Query entity.
-     */
-    private QueryEntity createQueryEntity() {
-        QueryEntity qryEntity = new QueryEntity();
-        qryEntity.setKeyType(Integer.class.getTypeName());
-        qryEntity.setValueType(UserValue.class.getName());
-        qryEntity.setTableName("USER_TEST_TABLE");
-
-        LinkedHashMap<String, String> fields = new LinkedHashMap<>();
-        fields.put("x", "java.lang.Integer");
-        fields.put("y", "java.lang.Integer");
-        fields.put("z", "java.lang.Integer");
-        qryEntity.setFields(fields);
-
-        LinkedHashMap<String, Boolean> idxFields = new LinkedHashMap<>();
-
-        QueryIndex idx2 = new QueryIndex();
-        idx2.setName("IDX_2");
-        idx2.setIndexType(QueryIndexType.SORTED);
-        idxFields = new LinkedHashMap<>();
-        idxFields.put("x", false);
-        idx2.setFields(idxFields);
-
-        QueryIndex idx3 = new QueryIndex();
-        idx3.setName("IDX_3");
-        idx3.setIndexType(QueryIndexType.SORTED);
-        idxFields = new LinkedHashMap<>();
-        idxFields.put("y", false);
-        idx3.setFields(idxFields);
-
-        QueryIndex idx4 = new QueryIndex();
-        idx4.setName("IDX_4");
-        idx4.setIndexType(QueryIndexType.SORTED);
-        idxFields = new LinkedHashMap<>();
-        idxFields.put("z", false);
-        idx4.setFields(idxFields);
-
-        qryEntity.setIndexes(Arrays.asList(idx2, idx3, idx4));
-        return qryEntity;
-    }
-
-    /**
-     * User value.
-     */
-    private static class UserValue {
-        /** X. */
-        private int x;
-
-        /** Y. */
-        private int y;
-
-        /** Z. */
-        private int z;
-
-        /**
-         * @param x X.
-         * @param y Y.
-         * @param z Z.
-         */
-        public UserValue(int x, int y, int z) {
-            this.x = x;
-            this.y = y;
-            this.z = z;
-        }
-
-        /**
-         * @param seed Seed.
-         */
-        public UserValue(long seed) {
-            x = (int)(seed % 6991);
-            y = (int)(seed % 18679);
-            z = (int)(seed % 31721);
-        }
-
-        /** {@inheritDoc} */
-        @Override public String toString() {
-            return S.toString(UserValue.class, this);
-        }
-    }
-
-    /**
-     * Checks that validate_indexes command will cancel after it interrupted.
-     *
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testValidateIndexesCommand() throws Exception {
-        lnsrLog = new ListeningTestLogger(false, log);
-
-        IgniteEx ignite = startGrid(0);
-
-        ignite.cluster().active(true);
-
-        preloadeData(ignite);
-
-        CountDownLatch startTaskLatch = waitForTaskEvent(ignite, VALIDATE_INDEX_TASK);
-
-        LogListener lnsrValidationCancelled = LogListener.matches("Index validation was cancelled.").build();
-
-        lnsrLog.registerListener(lnsrValidationCancelled);
-
-        IgniteInternalFuture fut = GridTestUtils.runAsync(() ->
-            assertSame(EXIT_CODE_UNEXPECTED_ERROR, execute("--cache", "validate_indexes")));
-
-        startTaskLatch.await();
-
-        fut.cancel();
-
-        fut.get();
-
-        assertTrue(GridTestUtils.waitForCondition(() ->
-            ignite.compute().activeTaskFutures().isEmpty(), 10_000));
-
-        assertTrue(GridTestUtils.waitForCondition(lnsrValidationCancelled::check, 10_000));
-    }
-
-    /**
-     * Checks that idle verify command will not cancel if initiator client interrupted.
-     *
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testIdleVerifyCommand() throws Exception {
-        lnsrLog = new ListeningTestLogger(false, log);
-
-        IgniteEx ignite = startGrid(0);
-
-        ignite.cluster().active(true);
-
-        preloadeData(ignite);
-
-        CountDownLatch startTaskLatch = waitForTaskEvent(ignite, IDLE_VERIFY_TASK_V2);
-
-        LogListener lnsrValidationCancelled = LogListener.matches("Idle verify was cancelled.").build();
-
-        lnsrLog.registerListener(lnsrValidationCancelled);
-
-        IgniteInternalFuture fut = GridTestUtils.runAsync(() ->
-            assertSame(EXIT_CODE_UNEXPECTED_ERROR, execute("--cache", "idle_verify")));
-
-        startTaskLatch.await();
-
-        fut.cancel();
-
-        fut.get();
-
-        assertTrue(GridTestUtils.waitForCondition(() ->
-            ignite.compute().activeTaskFutures().isEmpty(), 30_000));
-
-        assertFalse(lnsrValidationCancelled.check());
-    }
-
-    /**
-     * Method subscribe on task event and return a latch for waiting.
-     *
-     * @param ignite Ignite.
-     * @param taskName Task name.
-     * @return Latch which will open after event received.
-     */
-
-    private CountDownLatch waitForTaskEvent(IgniteEx ignite, String taskName) {
-        CountDownLatch startTaskLatch = new CountDownLatch(1);
-
-        ignite.events().localListen((evt) -> {
-            assertTrue(evt instanceof DeploymentEvent);
-
-            if (taskName.equals(((DeploymentEvent)evt).alias())) {
-                startTaskLatch.countDown();
-
-                return false;
-            }
-
-            return true;
-        }, EventType.EVT_TASK_DEPLOYED);
-        return startTaskLatch;
-    }
-
-    /**
-     * Preload data to default cache.
-     *
-     * @param ignite Ignite.
-     */
-    private void preloadeData(IgniteEx ignite) {
-        try (IgniteDataStreamer streamr = ignite.dataStreamer(DEFAULT_CACHE_NAME)) {
-            for (int i = 0; i < LOAD_LOOP; i++)
-                streamr.addData(i, new UserValue(i));
-        }
-    }
-
-    /**
-     * Test invokes index validation closure and canceling it after started.
-     *
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testCancelValidateIndexesClosure() throws Exception {
-        IgniteEx ignite0 = startGrid(0);
-
-        ignite0.cluster().active(true);
-
-        preloadeData(ignite0);
-
-        AtomicBoolean cancelled = new AtomicBoolean(false);
-
-        ValidateIndexesClosure clo = new ValidateIndexesClosure(cancelled::get, Collections.singleton(DEFAULT_CACHE_NAME),
-            0, 0, false, true);
-
-        ListeningTestLogger listeningLogger = new ListeningTestLogger(false, log);
-
-        GridTestUtils.setFieldValue(clo, "ignite", ignite0);
-        GridTestUtils.setFieldValue(clo, "log", listeningLogger);
-
-        LogListener lnsrValidationStarted = LogListener.matches("Current progress of ValidateIndexesClosure").build();
-
-        listeningLogger.registerListener(lnsrValidationStarted);
-
-        IgniteInternalFuture fut = GridTestUtils.runAsync(() ->
-            GridTestUtils.assertThrows(log, clo::call, IgniteException.class, ValidateIndexesClosure.CANCELLED_MSG));
-
-        assertTrue(GridTestUtils.waitForCondition(lnsrValidationStarted::check, 10_000));
-
-        assertFalse(fut.isDone());
-
-        cancelled.set(true);
-
-        fut.get(10_000);
-    }
-}
diff --git a/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerTest.java b/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerTest.java
index 4a2bfe1..5557b5e 100644
--- a/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerTest.java
+++ b/modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerTest.java
@@ -27,7 +27,6 @@ import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.BitSet;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
@@ -45,7 +44,6 @@ import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.LongAdder;
 import java.util.function.Function;
-import java.util.function.UnaryOperator;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
@@ -102,23 +100,18 @@ import org.apache.ignite.internal.processors.cache.warmup.BlockedWarmUpConfigura
 import org.apache.ignite.internal.processors.cache.warmup.BlockedWarmUpStrategy;
 import org.apache.ignite.internal.processors.cache.warmup.WarmUpTestPluginProvider;
 import org.apache.ignite.internal.processors.cluster.GridClusterStateProcessor;
-import org.apache.ignite.internal.util.future.IgniteFinishedFutureImpl;
 import org.apache.ignite.internal.util.lang.GridAbsPredicate;
-import org.apache.ignite.internal.util.lang.GridFunc;
 import org.apache.ignite.internal.util.typedef.G;
 import org.apache.ignite.internal.util.typedef.X;
-import org.apache.ignite.internal.util.typedef.internal.CU;
 import org.apache.ignite.internal.util.typedef.internal.U;
 import org.apache.ignite.internal.visor.cache.VisorFindAndDeleteGarbageInPersistenceTaskResult;
 import org.apache.ignite.internal.visor.tx.VisorTxInfo;
 import org.apache.ignite.internal.visor.tx.VisorTxTaskResult;
 import org.apache.ignite.lang.IgniteBiPredicate;
-import org.apache.ignite.lang.IgniteFuture;
 import org.apache.ignite.lang.IgniteInClosure;
 import org.apache.ignite.lang.IgnitePredicate;
 import org.apache.ignite.lang.IgniteUuid;
 import org.apache.ignite.plugin.extensions.communication.Message;
-import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi;
 import org.apache.ignite.spi.metric.LongMetric;
 import org.apache.ignite.testframework.GridTestUtils;
 import org.apache.ignite.testframework.junits.WithSystemProperty;
@@ -146,12 +139,6 @@ import static org.apache.ignite.internal.commandline.CommandHandler.EXIT_CODE_IN
 import static org.apache.ignite.internal.commandline.CommandHandler.EXIT_CODE_OK;
 import static org.apache.ignite.internal.commandline.CommandHandler.EXIT_CODE_UNEXPECTED_ERROR;
 import static org.apache.ignite.internal.commandline.CommandList.DEACTIVATE;
-import static org.apache.ignite.internal.commandline.encryption.EncryptionSubcommands.CACHE_GROUP_KEY_IDS;
-import static org.apache.ignite.internal.commandline.encryption.EncryptionSubcommands.CHANGE_CACHE_GROUP_KEY;
-import static org.apache.ignite.internal.commandline.encryption.EncryptionSubcommands.REENCRYPTION_RATE;
-import static org.apache.ignite.internal.commandline.encryption.EncryptionSubcommands.REENCRYPTION_RESUME;
-import static org.apache.ignite.internal.commandline.encryption.EncryptionSubcommands.REENCRYPTION_STATUS;
-import static org.apache.ignite.internal.commandline.encryption.EncryptionSubcommands.REENCRYPTION_SUSPEND;
 import static org.apache.ignite.internal.encryption.AbstractEncryptionTest.MASTER_KEY_NAME_2;
 import static org.apache.ignite.internal.processors.cache.persistence.GridCacheDatabaseSharedManager.IGNITE_PDS_SKIP_CHECKPOINT_ON_NODE_STOP;
 import static org.apache.ignite.internal.processors.cache.persistence.snapshot.AbstractSnapshotSelfTest.doSnapshotCancellationTest;
@@ -1168,148 +1155,6 @@ public class GridCommandHandlerTest extends GridCommandHandlerClusterPerMethodAb
     }
 
     /**
-     * Test connectivity command works via control.sh.
-     */
-    @Test
-    public void testConnectivityCommandWithoutFailedNodes() throws Exception {
-        IgniteEx ignite = startGrids(5);
-
-        assertFalse(ignite.cluster().state().active());
-
-        ignite.cluster().state(ACTIVE);
-
-        injectTestSystemOut();
-
-        assertEquals(EXIT_CODE_OK, execute("--diagnostic", "connectivity"));
-
-        assertContains(log, testOut.toString(), "There are no connectivity problems.");
-    }
-
-    /**
-     * Test that if node exits topology during connectivity check, the command will not fail.
-     *
-     * Description:
-     * 1. Start three nodes.
-     * 2. Execute connectivity check.
-     * 3. When 3-rd node receives connectivity check compute task, it must stop itself.
-     * 4. The command should exit with code OK.
-     *
-     * @throws Exception If failed.
-     */
-    @Test
-    public void testConnectivityCommandWithNodeExit() throws Exception {
-        IgniteEx[] node3 = new IgniteEx[1];
-
-        class KillNode3CommunicationSpi extends TcpCommunicationSpi {
-            /** Fail check connection request and stop third node */
-            boolean fail;
-
-            public KillNode3CommunicationSpi(boolean fail) {
-                this.fail = fail;
-            }
-
-            /** {@inheritDoc} */
-            @Override public IgniteFuture<BitSet> checkConnection(List<ClusterNode> nodes) {
-                if (fail) {
-                    runAsync(node3[0]::close);
-                    return null;
-                }
-
-                return super.checkConnection(nodes);
-            }
-        }
-
-        IgniteEx node1 = startGrid(1, (UnaryOperator<IgniteConfiguration>) configuration -> {
-            configuration.setCommunicationSpi(new KillNode3CommunicationSpi(false));
-            return configuration;
-        });
-
-        IgniteEx node2 = startGrid(2, (UnaryOperator<IgniteConfiguration>) configuration -> {
-            configuration.setCommunicationSpi(new KillNode3CommunicationSpi(false));
-            return configuration;
-        });
-
-        node3[0] = startGrid(3, (UnaryOperator<IgniteConfiguration>) configuration -> {
-            configuration.setCommunicationSpi(new KillNode3CommunicationSpi(true));
-            return configuration;
-        });
-
-        assertFalse(node1.cluster().state().active());
-
-        node1.cluster().state(ACTIVE);
-
-        assertEquals(3, node1.cluster().nodes().size());
-
-        injectTestSystemOut();
-
-        final IgniteInternalFuture<?> connectivity = runAsync(() -> {
-            final int result = execute("--diagnostic", "connectivity");
-            assertEquals(EXIT_CODE_OK, result);
-        });
-
-        connectivity.get();
-    }
-
-    /**
-     * Test connectivity command works via control.sh with one node failing.
-     */
-    @Test
-    public void testConnectivityCommandWithFailedNodes() throws Exception {
-        UUID okId = UUID.randomUUID();
-        UUID failingId = UUID.randomUUID();
-
-        UnaryOperator<IgniteConfiguration> operator = configuration -> {
-            configuration.setCommunicationSpi(new TcpCommunicationSpi() {
-                /** {inheritDoc} */
-                @Override public IgniteFuture<BitSet> checkConnection(List<ClusterNode> nodes) {
-                    BitSet bitSet = new BitSet();
-
-                    int idx = 0;
-
-                    for (ClusterNode remoteNode : nodes) {
-                        if (!remoteNode.id().equals(failingId))
-                            bitSet.set(idx);
-
-                        idx++;
-                    }
-
-                    return new IgniteFinishedFutureImpl<>(bitSet);
-                }
-            });
-            return configuration;
-        };
-
-        IgniteEx ignite = startGrid("normal", configuration -> {
-            operator.apply(configuration);
-            configuration.setConsistentId(okId);
-            configuration.setNodeId(okId);
-            return configuration;
-        });
-
-        IgniteEx failure = startGrid("failure", configuration -> {
-            operator.apply(configuration);
-            configuration.setConsistentId(failingId);
-            configuration.setNodeId(failingId);
-            return configuration;
-        });
-
-        ignite.cluster().state(ACTIVE);
-
-        failure.cluster().state(ACTIVE);
-
-        injectTestSystemOut();
-
-        int connectivity = execute("--diagnostic", "connectivity");
-        assertEquals(EXIT_CODE_OK, connectivity);
-
-        String out = testOut.toString();
-        String what = "There is no connectivity between the following nodes";
-
-        assertContains(log, out.replaceAll("[\\W_]+", "").trim(),
-                            what.replaceAll("[\\W_]+", "").trim());
-    }
-
-    /**
      * Test baseline remove works via control.sh
      *
      * @throws Exception If failed.
@@ -2381,10 +2226,7 @@ public class GridCommandHandlerTest extends GridCommandHandlerClusterPerMethodAb
      */
     @Test
     public void testDiagnosticPageLocksTracker() throws Exception {
-        Ignite ignite = startGrid(0, (UnaryOperator<IgniteConfiguration>)cfg -> cfg.setConsistentId("node0/dump"));
-        startGrid(1, (UnaryOperator<IgniteConfiguration>)cfg -> cfg.setConsistentId("node1/dump"));
-        startGrid(2, (UnaryOperator<IgniteConfiguration>)cfg -> cfg.setConsistentId("node2/dump"));
-        startGrid(3, (UnaryOperator<IgniteConfiguration>)cfg -> cfg.setConsistentId("node3/dump"));
+        Ignite ignite = startGrids(4);
 
         Collection<ClusterNode> nodes = ignite.cluster().nodes();
 
@@ -2784,7 +2626,7 @@ public class GridCommandHandlerTest extends GridCommandHandlerClusterPerMethodAb
     /** @throws Exception If failed. */
     @Test
     public void testMasterKeyChange() throws Exception {
-        encryptionEnabled = true;
+        encriptionEnabled = true;
 
         injectTestSystemOut();
 
@@ -2821,187 +2663,8 @@ public class GridCommandHandlerTest extends GridCommandHandlerClusterPerMethodAb
 
     /** @throws Exception If failed. */
     @Test
-    public void testCacheGroupKeyChange() throws Exception {
-        encryptionEnabled = true;
-
-        injectTestSystemOut();
-
-        int srvNodes = 2;
-
-        IgniteEx ignite = startGrids(srvNodes);
-
-        startGrid(CLIENT_NODE_NAME_PREFIX);
-        startGrid(DAEMON_NODE_NAME_PREFIX);
-
-        ignite.cluster().state(ACTIVE);
-
-        List<Ignite> srvGrids = GridFunc.asList(grid(0), grid(1));
-
-        enableCheckpoints(srvGrids, false);
-
-        createCacheAndPreload(ignite, 1000);
-
-        int ret = execute("--encryption", CACHE_GROUP_KEY_IDS.toString(), DEFAULT_CACHE_NAME);
-
-        assertEquals(EXIT_CODE_OK, ret);
-        assertContains(log, testOut.toString(), "Encryption key identifiers for cache: " + DEFAULT_CACHE_NAME);
-        assertEquals(srvNodes, countSubstrs(testOut.toString(), "0 (active)"));
-
-        ret = execute("--encryption", CHANGE_CACHE_GROUP_KEY.toString(), DEFAULT_CACHE_NAME);
-
-        assertEquals(EXIT_CODE_OK, ret);
-        assertContains(log, testOut.toString(),
-            "The encryption key has been changed for the cache group \"" + DEFAULT_CACHE_NAME + '"');
-
-        ret = execute("--encryption", CACHE_GROUP_KEY_IDS.toString(), DEFAULT_CACHE_NAME);
-
-        assertEquals(testOut.toString(), EXIT_CODE_OK, ret);
-        assertContains(log, testOut.toString(), "Encryption key identifiers for cache: " + DEFAULT_CACHE_NAME);
-        assertEquals(srvNodes, countSubstrs(testOut.toString(), "1 (active)"));
-
-        GridTestUtils.waitForCondition(() -> {
-            execute("--encryption", REENCRYPTION_STATUS.toString(), DEFAULT_CACHE_NAME);
-
-            return srvNodes == countSubstrs(testOut.toString(),
-                "re-encryption will be completed after the next checkpoint");
-        }, getTestTimeout());
-
-        enableCheckpoints(srvGrids, true);
-        forceCheckpoint(srvGrids);
-
-        GridTestUtils.waitForCondition(() -> {
-            execute("--encryption", REENCRYPTION_STATUS.toString(), DEFAULT_CACHE_NAME);
-
-            return srvNodes == countSubstrs(testOut.toString(), "re-encryption completed or not required");
-        }, getTestTimeout());
-    }
-
-    /** @throws Exception If failed. */
-    @Test
-    public void testChangeReencryptionRate() throws Exception {
-        int srvNodes = 2;
-
-        IgniteEx ignite = startGrids(srvNodes);
-
-        ignite.cluster().state(ACTIVE);
-
-        injectTestSystemOut();
-
-        int ret = execute("--encryption", REENCRYPTION_RATE.toString());
-
-        assertEquals(EXIT_CODE_OK, ret);
-        assertEquals(srvNodes, countSubstrs(testOut.toString(), "re-encryption rate is not limited."));
-
-        double newRate = 0.01;
-
-        ret = execute("--encryption", REENCRYPTION_RATE.toString(), Double.toString(newRate));
-
-        assertEquals(EXIT_CODE_OK, ret);
-        assertEquals(srvNodes, countSubstrs(testOut.toString(),
-            String.format("re-encryption rate has been limited to %.2f MB/s.", newRate)));
-
-        ret = execute("--encryption", REENCRYPTION_RATE.toString());
-
-        assertEquals(EXIT_CODE_OK, ret);
-        assertEquals(srvNodes, countSubstrs(testOut.toString(),
-            String.format("re-encryption rate is limited to %.2f MB/s.", newRate)));
-
-        ret = execute("--encryption", REENCRYPTION_RATE.toString(), "0");
-
-        assertEquals(EXIT_CODE_OK, ret);
-        assertEquals(srvNodes, countSubstrs(testOut.toString(), "re-encryption rate is not limited."));
-    }
-
-    /** @throws Exception If failed. */
-    @Test
-    public void testReencryptionSuspendAndResume() throws Exception {
-        encryptionEnabled = true;
-        reencryptSpeed = 0.01;
-        reencryptBatchSize = 1;
-
-        int srvNodes = 2;
-
-        IgniteEx ignite = startGrids(srvNodes);
-
-        ignite.cluster().state(ACTIVE);
-
-        injectTestSystemOut();
-
-        createCacheAndPreload(ignite, 10_000);
-
-        ignite.encryption().changeCacheGroupKey(Collections.singleton(DEFAULT_CACHE_NAME)).get();
-
-        assertTrue(isReencryptionStarted(DEFAULT_CACHE_NAME));
-
-        int ret = execute("--encryption", REENCRYPTION_STATUS.toString(), DEFAULT_CACHE_NAME);
-
-        assertEquals(EXIT_CODE_OK, ret);
-
-        Pattern ptrn = Pattern.compile("(?m)Node [-0-9a-f]{36}:\n\\s+(?<left>\\d+) KB of data.+");
-        Matcher matcher = ptrn.matcher(testOut.toString());
-        int matchesCnt = 0;
-
-        while (matcher.find()) {
-            assertEquals(1, matcher.groupCount());
-
-            int pagesLeft = Integer.parseInt(matcher.group("left"));
-
-            assertTrue(pagesLeft > 0);
-
-            matchesCnt++;
-        }
-
-        assertEquals(srvNodes, matchesCnt);
-
-        ret = execute("--encryption", REENCRYPTION_SUSPEND.toString(), DEFAULT_CACHE_NAME);
-
-        assertEquals(EXIT_CODE_OK, ret);
-        assertEquals(srvNodes, countSubstrs(testOut.toString(),
-            "re-encryption of the cache group \"" + DEFAULT_CACHE_NAME + "\" has been suspended."));
-        assertFalse(isReencryptionStarted(DEFAULT_CACHE_NAME));
-
-        ret = execute("--encryption", REENCRYPTION_SUSPEND.toString(), DEFAULT_CACHE_NAME);
-
-        assertEquals(EXIT_CODE_OK, ret);
-        assertEquals(srvNodes, countSubstrs(testOut.toString(),
-            "re-encryption of the cache group \"" + DEFAULT_CACHE_NAME + "\" has already been suspended."));
-
-        ret = execute("--encryption", REENCRYPTION_RESUME.toString(), DEFAULT_CACHE_NAME);
-
-        assertEquals(EXIT_CODE_OK, ret);
-        assertEquals(srvNodes, countSubstrs(testOut.toString(),
-            "re-encryption of the cache group \"" + DEFAULT_CACHE_NAME + "\" has been resumed."));
-        assertTrue(isReencryptionStarted(DEFAULT_CACHE_NAME));
-
-        ret = execute("--encryption", REENCRYPTION_RESUME.toString(), DEFAULT_CACHE_NAME);
-
-        assertEquals(EXIT_CODE_OK, ret);
-        assertEquals(srvNodes, countSubstrs(testOut.toString(),
-            "re-encryption of the cache group \"" + DEFAULT_CACHE_NAME + "\" has already been resumed."));
-    }
-
-    /**
-     * @param cacheName Cache name.
-     * @return {@code True} if re-encryption of the specified cache is started on all server nodes.
-     */
-    private boolean isReencryptionStarted(String cacheName) {
-        for (Ignite grid : G.allGrids()) {
-            ClusterNode locNode = grid.cluster().localNode();
-
-            if (locNode.isClient() || locNode.isDaemon())
-                continue;
-
-            if (((IgniteEx)grid).context().encryption().reencryptionFuture(CU.cacheId(cacheName)).isDone())
-                return false;
-        }
-
-        return true;
-    }
-
-    /** @throws Exception If failed. */
-    @Test
     public void testMasterKeyChangeOnInactiveCluster() throws Exception {
-        encryptionEnabled = true;
+        encriptionEnabled = true;
 
         injectTestSystemOut();
 
@@ -3171,18 +2834,4 @@ public class GridCommandHandlerTest extends GridCommandHandlerClusterPerMethodAb
 
         return hnd.getLastOperationResult();
     }
-
-    /**
-     * @param str String.
-     * @param substr Substring to find in the specified string.
-     * @return The number of substrings found in the specified string.
-     */
-    private int countSubstrs(String str, String substr) {
-        int cnt = 0;
-
-        for (int off = 0; (off = str.indexOf(substr, off)) != -1; off++)
-            ++cnt;
-
-        return cnt;
-    }
 }
diff --git a/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java b/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java
index 94f1536..148e86d 100644
--- a/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java
+++ b/modules/core/src/main/java/org/apache/ignite/IgniteSystemProperties.java
@@ -36,7 +36,6 @@ import org.apache.ignite.internal.client.GridClient;
 import org.apache.ignite.internal.marshaller.optimized.OptimizedMarshaller;
 import org.apache.ignite.internal.processors.metastorage.DistributedMetaStorage;
 import org.apache.ignite.internal.processors.metric.GridMetricManager;
-import org.apache.ignite.internal.processors.performancestatistics.FilePerformanceStatisticsWriter;
 import org.apache.ignite.internal.processors.rest.GridRestCommand;
 import org.apache.ignite.internal.util.GridLogThrottle;
 import org.apache.ignite.lang.IgniteExperimental;
@@ -82,7 +81,6 @@ import static org.apache.ignite.internal.processors.cache.distributed.dht.preloa
 import static org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition.DFLT_ATOMIC_CACHE_DELETE_HISTORY_SIZE;
 import static org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition.DFLT_CACHE_REMOVE_ENTRIES_TTL;
 import static org.apache.ignite.internal.processors.cache.mvcc.MvccCachingManager.DFLT_MVCC_TX_SIZE_CACHING_THRESHOLD;
-import static org.apache.ignite.internal.processors.cache.persistence.GridCacheDatabaseSharedManager.DFLT_DEFRAGMENTATION_REGION_SIZE_PERCENTAGE;
 import static org.apache.ignite.internal.processors.cache.persistence.GridCacheDatabaseSharedManager.DFLT_PDS_WAL_REBALANCE_THRESHOLD;
 import static org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointHistory.DFLT_PDS_MAX_CHECKPOINT_MEMORY_HISTORY_SIZE;
 import static org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointWorkflow.DFLT_CHECKPOINT_PARALLEL_SORT_THRESHOLD;
@@ -119,10 +117,6 @@ import static org.apache.ignite.internal.processors.failure.FailureProcessor.DFL
 import static org.apache.ignite.internal.processors.job.GridJobProcessor.DFLT_JOBS_HISTORY_SIZE;
 import static org.apache.ignite.internal.processors.jobmetrics.GridJobMetricsProcessor.DFLT_JOBS_METRICS_CONCURRENCY_LEVEL;
 import static org.apache.ignite.internal.processors.metastorage.persistence.DistributedMetaStorageImpl.DFLT_MAX_HISTORY_BYTES;
-import static org.apache.ignite.internal.processors.performancestatistics.FilePerformanceStatisticsWriter.DFLT_BUFFER_SIZE;
-import static org.apache.ignite.internal.processors.performancestatistics.FilePerformanceStatisticsWriter.DFLT_CACHED_STRINGS_THRESHOLD;
-import static org.apache.ignite.internal.processors.performancestatistics.FilePerformanceStatisticsWriter.DFLT_FILE_MAX_SIZE;
-import static org.apache.ignite.internal.processors.performancestatistics.FilePerformanceStatisticsWriter.DFLT_FLUSH_SIZE;
 import static org.apache.ignite.internal.processors.query.QueryUtils.DFLT_INDEXING_DISCOVERY_HISTORY_SIZE;
 import static org.apache.ignite.internal.processors.rest.GridRestProcessor.DFLT_SES_TIMEOUT;
 import static org.apache.ignite.internal.processors.rest.GridRestProcessor.DFLT_SES_TOKEN_INVALIDATE_INTERVAL;
@@ -677,10 +671,7 @@ public final class IgniteSystemProperties {
 
     /**
      * Flag indicating whether validation of keys put to cache should be disabled.
-     *
-     * @deprecated Since 2.10 Obsolete because of common use of binary marshaller.
      */
-    @Deprecated
     @SystemProperty("Disables validation of keys put to cache")
     public static final String IGNITE_CACHE_KEY_VALIDATION_DISABLED = "IGNITE_CACHE_KEY_VALIDATION_DISABLED";
 
@@ -1958,51 +1949,6 @@ public final class IgniteSystemProperties {
     public static final String IGNITE_TEST_ENV = "IGNITE_TEST_ENV";
 
     /**
-     * Defragmentation region size percentage of configured region size.
-     * This percentage will be calculated from largest configured region size and then proportionally subtracted
-     * from all configured regions.
-     */
-    @SystemProperty(value = "Defragmentation region size percentage of configured region size. " +
-        "This percentage will be calculated from largest configured region size and then proportionally subtracted " +
-        "from all configured regions",
-        type = Integer.class,
-        defaults = "" + DFLT_DEFRAGMENTATION_REGION_SIZE_PERCENTAGE)
-    public static final String IGNITE_DEFRAGMENTATION_REGION_SIZE_PERCENTAGE =
-        "IGNITE_DEFRAGMENTATION_REGION_SIZE_PERCENTAGE";
-
-    /**
-     * Performance statistics maximum file size in bytes. Performance statistics will be stopped when the size exceeded.
-     * The default value is {@link FilePerformanceStatisticsWriter#DFLT_FILE_MAX_SIZE}.
-     */
-    @SystemProperty(value = "Performance statistics maximum file size in bytes. Performance statistics will be " +
-        "stopped when the size exceeded", type = Long.class, defaults = "" + DFLT_FILE_MAX_SIZE)
-    public static final String IGNITE_PERF_STAT_FILE_MAX_SIZE = "IGNITE_PERF_STAT_FILE_MAX_SIZE";
-
-    /**
-     * Performance statistics off heap buffer size in bytes. The default value is
-     * {@link FilePerformanceStatisticsWriter#DFLT_BUFFER_SIZE}.
-     */
-    @SystemProperty(value = "Performance statistics off heap buffer size in bytes", type = Integer.class,
-        defaults = "" + DFLT_BUFFER_SIZE)
-    public static final String IGNITE_PERF_STAT_BUFFER_SIZE = "IGNITE_PERF_STAT_BUFFER_SIZE";
-
-    /**
-     * Performance statistics minimal batch size to flush in bytes. The default value is
-     * {@link FilePerformanceStatisticsWriter#DFLT_FLUSH_SIZE}.
-     */
-    @SystemProperty(value = "Performance statistics minimal batch size to flush in bytes", type = Integer.class,
-        defaults = "" + DFLT_FLUSH_SIZE)
-    public static final String IGNITE_PERF_STAT_FLUSH_SIZE = "IGNITE_PERF_STAT_FLUSH_SIZE";
-
-    /**
-     * Performance statistics maximum cached strings threshold. String caching will stop on threshold excess.
-     * The default value is {@link FilePerformanceStatisticsWriter#DFLT_CACHED_STRINGS_THRESHOLD}.
-     */
-    @SystemProperty(value = "Performance statistics maximum cached strings threshold. String caching will stop on " +
-        "threshold excess", type = Integer.class, defaults = "" + DFLT_CACHED_STRINGS_THRESHOLD)
-    public static final String IGNITE_PERF_STAT_CACHED_STRINGS_THRESHOLD = "IGNITE_PERF_STAT_CACHED_STRINGS_THRESHOLD";
-
-    /**
      * Enforces singleton.
      */
     private IgniteSystemProperties() {
diff --git a/modules/core/src/main/java/org/apache/ignite/cache/affinity/rendezvous/ClusterNodeAttributeAffinityBackupFilter.java b/modules/core/src/main/java/org/apache/ignite/cache/affinity/rendezvous/ClusterNodeAttributeAffinityBackupFilter.java
index 7a94d7a..592ca53 100644
--- a/modules/core/src/main/java/org/apache/ignite/cache/affinity/rendezvous/ClusterNodeAttributeAffinityBackupFilter.java
+++ b/modules/core/src/main/java/org/apache/ignite/cache/affinity/rendezvous/ClusterNodeAttributeAffinityBackupFilter.java
@@ -24,10 +24,10 @@ import org.apache.ignite.internal.util.typedef.internal.A;
 import org.apache.ignite.lang.IgniteBiPredicate;
 
 /**
- * Attribute-based affinity backup filter that forces each partition's primary and backup nodes to different hardware
- * which is not expected to fail simultaneously, e.g., in AWS, to different "availability zones". This
+ * This class can be used as a {@link RendezvousAffinityFunction#affinityBackupFilter } to create
+ * cache templates in Spring that force each partition's primary and backup to different hardware which
+ * is not expected to fail simultaneously, e.g., in AWS, to different "availability zones".  This
  * is a per-partition selection, and different partitions may choose different primaries.
- * See {@link RendezvousAffinityFunction#setAffinityBackupFilter}.
  * <p>
  * This implementation will discard backups rather than place multiple on the same set of nodes. This avoids
  * trying to cram more data onto remaining nodes  when some have failed.
@@ -91,7 +91,7 @@ public class ClusterNodeAttributeAffinityBackupFilter implements IgniteBiPredica
     public ClusterNodeAttributeAffinityBackupFilter(String... attributeNames) {
         A.ensure(attributeNames.length > 0, "attributeNames.length > 0");
 
-        this.attributeNames = attributeNames.clone();
+        this.attributeNames = attributeNames;
     }
 
     /**
@@ -128,12 +128,4 @@ public class ClusterNodeAttributeAffinityBackupFilter implements IgniteBiPredica
         return true;
     }
 
-    /**
-     * Gets attribute names.
-     *
-     * @return Attribute names.
-     */
-    public String[] getAttributeNames() {
-        return attributeNames.clone();
-    }
 }
diff --git a/modules/core/src/main/java/org/apache/ignite/configuration/DataStorageConfiguration.java b/modules/core/src/main/java/org/apache/ignite/configuration/DataStorageConfiguration.java
index 6f5b338..2a1927b 100644
--- a/modules/core/src/main/java/org/apache/ignite/configuration/DataStorageConfiguration.java
+++ b/modules/core/src/main/java/org/apache/ignite/configuration/DataStorageConfiguration.java
@@ -72,9 +72,6 @@ public class DataStorageConfiguration implements Serializable {
     /** */
     private static final long serialVersionUID = 0L;
 
-    /** Value used for making WAL archive size unlimited */
-    public static final long UNLIMITED_WAL_ARCHIVE = -1;
-
     /** Default data region start size (256 MB). */
     public static final long DFLT_DATA_REGION_INITIAL_SIZE = 256L * 1024 * 1024;
 
@@ -179,9 +176,6 @@ public class DataStorageConfiguration implements Serializable {
     /** Default wal compaction level. */
     public static final int DFLT_WAL_COMPACTION_LEVEL = Deflater.BEST_SPEED;
 
-    /** Default defragmentation thread pool size. */
-    public static final int DFLT_DEFRAGMENTATION_THREAD_POOL_SIZE = 4;
-
     /** Default compression algorithm for WAL page snapshot records. */
     public static final DiskPageCompression DFLT_WAL_PAGE_COMPRESSION = DiskPageCompression.DISABLED;
 
@@ -323,9 +317,6 @@ public class DataStorageConfiguration implements Serializable {
     /** Encryption configuration. */
     private EncryptionConfiguration encCfg = new EncryptionConfiguration();
 
-    /** Maximum number of partitions which can be defragmented at the same time. */
-    private int defragmentationThreadPoolSize = DFLT_DEFRAGMENTATION_THREAD_POOL_SIZE;
-
     /**
      * Creates valid durable memory configuration with all default values.
      */
@@ -603,26 +594,21 @@ public class DataStorageConfiguration implements Serializable {
     /**
      * Gets a max allowed size(in bytes) of WAL archives.
      *
-     * @return max size(in bytes) of WAL archive directory(greater than 0, or {@link #UNLIMITED_WAL_ARCHIVE} if
-     * WAL archive size is unlimited).
+     * @return max size(in bytes) of WAL archive directory(always greater than 0).
      */
     public long getMaxWalArchiveSize() {
-        return maxWalArchiveSize;
+        return maxWalArchiveSize <= 0 ? DFLT_WAL_ARCHIVE_MAX_SIZE : maxWalArchiveSize;
     }
 
     /**
      * Sets a max allowed size(in bytes) of WAL archives.
      *
-     * If value is not positive or {@link #UNLIMITED_WAL_ARCHIVE}, {@link #DFLT_WAL_ARCHIVE_MAX_SIZE} will be used.
+     * If value is not positive, {@link #DFLT_WAL_ARCHIVE_MAX_SIZE} will be used.
      *
      * @param walArchiveMaxSize max size(in bytes) of WAL archive directory.
      * @return {@code this} for chaining.
      */
     public DataStorageConfiguration setMaxWalArchiveSize(long walArchiveMaxSize) {
-        if (walArchiveMaxSize != UNLIMITED_WAL_ARCHIVE)
-            A.ensure(walArchiveMaxSize > 0, "Max WAL archive size can be only greater than 0 " +
-                "or must be equal to " + UNLIMITED_WAL_ARCHIVE + " (to be unlimited)");
-
         this.maxWalArchiveSize = walArchiveMaxSize;
 
         return this;
@@ -1179,30 +1165,6 @@ public class DataStorageConfiguration implements Serializable {
         return dfltWarmUpCfg;
     }
 
-    /**
-     * Sets maximum number of partitions which can be defragmented at the same time.
-     *
-     * @param defragmentationThreadPoolSize Maximum number of partitions which can be defragmented at the same time.
-     *      Default is {@link DataStorageConfiguration#DFLT_DEFRAGMENTATION_THREAD_POOL_SIZE}.
-     * @return {@code this} for chaining.
-     */
-    public DataStorageConfiguration setDefragmentationThreadPoolSize(int defragmentationThreadPoolSize) {
-        A.ensure(defragmentationThreadPoolSize > 1, "Defragmentation thread pool size must be greater or equal to 1.");
-
-        this.defragmentationThreadPoolSize = defragmentationThreadPoolSize;
-
-        return this;
-    }
-
-    /**
-     * Maximum number of partitions which can be defragmented at the same time.
-     *
-     * @return Thread pool size for defragmentation.
-     */
-    public int getDefragmentationThreadPoolSize() {
-        return defragmentationThreadPoolSize;
-    }
-
     /** {@inheritDoc} */
     @Override public String toString() {
         return S.toString(DataStorageConfiguration.class, this);
diff --git a/modules/core/src/main/java/org/apache/ignite/configuration/EncryptionConfiguration.java b/modules/core/src/main/java/org/apache/ignite/configuration/EncryptionConfiguration.java
index 6b9345d..79e205e 100644
--- a/modules/core/src/main/java/org/apache/ignite/configuration/EncryptionConfiguration.java
+++ b/modules/core/src/main/java/org/apache/ignite/configuration/EncryptionConfiguration.java
@@ -75,7 +75,7 @@ public class EncryptionConfiguration implements Serializable {
      */
     public EncryptionConfiguration setReencryptionRateLimit(double reencryptionRateLimit) {
         A.ensure(reencryptionRateLimit >= 0,
-            "Re-encryption rate limit (" + reencryptionRateLimit + ") must be non-negative.");
+            "Reencryption rate limit (" + reencryptionRateLimit + ") must be non-negative.");
 
         this.reencryptionRateLimit = reencryptionRateLimit;
 
diff --git a/modules/core/src/main/java/org/apache/ignite/events/EventType.java b/modules/core/src/main/java/org/apache/ignite/events/EventType.java
index 6fb7056..3da9801 100644
--- a/modules/core/src/main/java/org/apache/ignite/events/EventType.java
+++ b/modules/core/src/main/java/org/apache/ignite/events/EventType.java
@@ -925,11 +925,6 @@ public interface EventType {
 
     /**
      * Built-in event type: query execution.
-     * This event is triggered after a corresponding SQL query validated and before it is executed.
-     * Unlike {@link #EVT_CACHE_QUERY_EXECUTED}, {@code EVT_SQL_QUERY_EXECUTION} is fired only once for a request
-     * and does not relate to a specific cache.
-     * Enet includes the following information: qurey text and its arguments, security subject id.
-     *
      * <p>
      * NOTE: all types in range <b>from 1 to 1000 are reserved</b> for
      * internal Ignite events and should not be used by user-defined events.
diff --git a/modules/core/src/main/java/org/apache/ignite/events/SqlQueryExecutionEvent.java b/modules/core/src/main/java/org/apache/ignite/events/SqlQueryExecutionEvent.java
index d8feb07..4700d7b 100644
--- a/modules/core/src/main/java/org/apache/ignite/events/SqlQueryExecutionEvent.java
+++ b/modules/core/src/main/java/org/apache/ignite/events/SqlQueryExecutionEvent.java
@@ -28,10 +28,6 @@ import static org.apache.ignite.events.EventType.EVT_SQL_QUERY_EXECUTION;
 
 /**
  * Query execution event.
- * This event is triggered after a corresponding SQL query validated and before it is executed.
- * Unlike {@link EventType#EVT_CACHE_QUERY_EXECUTED}, {@link EventType#EVT_SQL_QUERY_EXECUTION} is fired only once for a request
- * and does not relate to a specific cache.
- *
  * <p>
  * Grid events are used for notification about what happens within the grid. Note that by
  * design Ignite keeps all events generated on the local node locally and it provides
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/GridComponent.java b/modules/core/src/main/java/org/apache/ignite/internal/GridComponent.java
index 60198749d..067f79b 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/GridComponent.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/GridComponent.java
@@ -76,10 +76,7 @@ public interface GridComponent {
         SERVICE_PROC,
 
         /** Distributed MetaStorage processor. */
-        META_STORAGE,
-
-        /** Performance statistics processor. */
-        PERFORMANCE_STAT_PROC;
+        META_STORAGE;
 
         /** Cached values array. */
         public static final DiscoveryDataExchangeType[] VALUES = values();
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java b/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java
index 2e2fae6..56f9765 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java
@@ -21,6 +21,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.UUID;
 import java.util.concurrent.ExecutorService;
+
 import org.apache.ignite.IgniteLogger;
 import org.apache.ignite.configuration.IgniteConfiguration;
 import org.apache.ignite.internal.managers.checkpoint.GridCheckpointManager;
@@ -38,7 +39,6 @@ import org.apache.ignite.internal.processors.affinity.GridAffinityProcessor;
 import org.apache.ignite.internal.processors.authentication.IgniteAuthenticationProcessor;
 import org.apache.ignite.internal.processors.cache.GridCacheProcessor;
 import org.apache.ignite.internal.processors.cache.mvcc.MvccProcessor;
-import org.apache.ignite.internal.processors.cache.persistence.defragmentation.IgniteDefragmentation;
 import org.apache.ignite.internal.processors.cache.persistence.filename.PdsFoldersResolver;
 import org.apache.ignite.internal.processors.cacheobject.IgniteCacheObjectProcessor;
 import org.apache.ignite.internal.processors.closure.GridClosureProcessor;
@@ -58,7 +58,6 @@ import org.apache.ignite.internal.processors.marshaller.GridMarshallerMappingPro
 import org.apache.ignite.internal.processors.metastorage.DistributedMetaStorage;
 import org.apache.ignite.internal.processors.metric.GridMetricManager;
 import org.apache.ignite.internal.processors.odbc.ClientListenerProcessor;
-import org.apache.ignite.internal.processors.performancestatistics.PerformanceStatisticsProcessor;
 import org.apache.ignite.internal.processors.platform.PlatformProcessor;
 import org.apache.ignite.internal.processors.plugin.IgnitePluginProcessor;
 import org.apache.ignite.internal.processors.pool.PoolProcessor;
@@ -454,13 +453,6 @@ public interface GridKernalContext extends Iterable<GridComponent> {
     public GridEncryptionManager encryption();
 
     /**
-     * Gets defragmentation manager.
-     *
-     * @return Defragmentation manager.
-     */
-    public IgniteDefragmentation defragmentation();
-
-    /**
      * Gets workers registry.
      *
      * @return Workers registry.
@@ -769,11 +761,4 @@ public interface GridKernalContext extends Iterable<GridComponent> {
      * @return Thread pool for create/rebuild indexes.
      */
     public ExecutorService buildIndexExecutorService();
-
-    /**
-     * Gets Performance statistics processor.
-     *
-     * @return Performance statistics processor.
-     */
-    public PerformanceStatisticsProcessor performanceStatistics();
 }
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContextImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContextImpl.java
index a9f80be..ae589ad 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContextImpl.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContextImpl.java
@@ -31,6 +31,7 @@ import java.util.List;
 import java.util.Map;
... 42645 lines suppressed ...