You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ignite.apache.org by sb...@apache.org on 2016/09/26 11:25:27 UTC

[46/47] ignite git commit: IGNITE-3912: Hadoop: Implemented new class loading architecture for embedded execution mode.

IGNITE-3912: Hadoop: Implemented new class loading architecture for embedded execution mode.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/8032fc2c
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/8032fc2c
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/8032fc2c

Branch: refs/heads/ignite-3967
Commit: 8032fc2c8a7cf9f404eb75c65164bb2900aab79d
Parents: 548fe6a
Author: vozerov-gridgain <vo...@gridgain.com>
Authored: Mon Sep 26 12:31:30 2016 +0300
Committer: vozerov-gridgain <vo...@gridgain.com>
Committed: Mon Sep 26 12:31:34 2016 +0300

----------------------------------------------------------------------
 bin/include/setenv.bat                          |     8 -
 bin/include/setenv.sh                           |    31 -
 .../ignite/internal/GridKernalContext.java      |     8 +
 .../ignite/internal/GridKernalContextImpl.java  |    12 +
 .../ignite/internal/IgniteComponentType.java    |    11 +-
 .../apache/ignite/internal/IgniteKernal.java    |     8 +-
 .../processors/hadoop/HadoopClassLoader.java    |   487 +
 .../processors/hadoop/HadoopClasspathUtils.java |   424 +
 .../processors/hadoop/HadoopDefaultJobInfo.java |   156 +
 .../processors/hadoop/HadoopHelper.java         |    55 +
 .../processors/hadoop/HadoopJobInfo.java        |     4 +-
 .../processors/hadoop/HadoopLocations.java      |   123 +
 .../processors/hadoop/HadoopNoopHelper.java     |    66 +
 .../processors/hadoop/HadoopNoopProcessor.java  |     4 +-
 .../internal/processors/igfs/IgfsImpl.java      |     5 +-
 .../processors/igfs/IgfsKernalContextAware.java |    32 +
 .../hadoop/fs/BasicHadoopFileSystemFactory.java |   125 +-
 .../fs/CachingHadoopFileSystemFactory.java      |    54 +-
 .../hadoop/fs/HadoopFileSystemFactory.java      |    11 +-
 .../fs/IgniteHadoopFileSystemCounterWriter.java |    79 +-
 .../fs/IgniteHadoopIgfsSecondaryFileSystem.java |   402 +-
 .../fs/KerberosHadoopFileSystemFactory.java     |    77 +-
 .../hadoop/fs/v1/IgniteHadoopFileSystem.java    |    47 +-
 .../hadoop/fs/v2/IgniteHadoopFileSystem.java    |    47 +-
 .../IgniteHadoopClientProtocolProvider.java     |    11 +-
 .../mapreduce/IgniteHadoopMapReducePlanner.java |    22 +-
 .../IgniteHadoopWeightedMapReducePlanner.java   |     4 +-
 .../ignite/hadoop/util/UserNameMapper.java      |     4 +-
 .../processors/hadoop/HadoopClassLoader.java    |   964 --
 .../processors/hadoop/HadoopClasspathMain.java  |    44 -
 .../processors/hadoop/HadoopClasspathUtils.java |   461 -
 .../processors/hadoop/HadoopCommonUtils.java    |   154 +
 .../processors/hadoop/HadoopContext.java        |     1 -
 .../processors/hadoop/HadoopDefaultJobInfo.java |   156 -
 .../processors/hadoop/HadoopExternalSplit.java  |    88 +
 .../processors/hadoop/HadoopHelperImpl.java     |   120 +
 .../processors/hadoop/HadoopLocations.java      |   123 -
 .../hadoop/HadoopMapReduceCounterGroup.java     |   123 -
 .../hadoop/HadoopMapReduceCounters.java         |   228 -
 .../processors/hadoop/HadoopProcessor.java      |    32 +-
 .../processors/hadoop/HadoopSplitWrapper.java   |   119 +
 .../internal/processors/hadoop/HadoopUtils.java |   443 -
 .../hadoop/counter/HadoopCounterAdapter.java    |     1 +
 .../counter/HadoopPerformanceCounter.java       |    12 +-
 .../hadoop/delegate/HadoopDelegateUtils.java    |   138 +
 .../HadoopFileSystemCounterWriterDelegate.java  |    36 +
 .../HadoopFileSystemFactoryDelegate.java        |    36 +
 .../HadoopIgfsSecondaryFileSystemDelegate.java  |    28 +
 .../hadoop/fs/HadoopFileSystemCacheUtils.java   |   242 -
 .../hadoop/fs/HadoopFileSystemsUtils.java       |    51 -
 .../hadoop/fs/HadoopLazyConcurrentMap.java      |   212 -
 .../hadoop/fs/HadoopLocalFileSystemV1.java      |    39 -
 .../hadoop/fs/HadoopLocalFileSystemV2.java      |    88 -
 .../processors/hadoop/fs/HadoopParameters.java  |    94 -
 .../hadoop/fs/HadoopRawLocalFileSystem.java     |   314 -
 .../processors/hadoop/igfs/HadoopIgfs.java      |   202 -
 .../igfs/HadoopIgfsCommunicationException.java  |    57 -
 .../processors/hadoop/igfs/HadoopIgfsEx.java    |    93 -
 .../hadoop/igfs/HadoopIgfsFuture.java           |    97 -
 .../hadoop/igfs/HadoopIgfsInProc.java           |   510 -
 .../hadoop/igfs/HadoopIgfsInputStream.java      |   629 -
 .../processors/hadoop/igfs/HadoopIgfsIo.java    |    76 -
 .../processors/hadoop/igfs/HadoopIgfsIpcIo.java |   624 -
 .../hadoop/igfs/HadoopIgfsIpcIoListener.java    |    36 -
 .../hadoop/igfs/HadoopIgfsJclLogger.java        |   116 -
 .../hadoop/igfs/HadoopIgfsOutProc.java          |   524 -
 .../hadoop/igfs/HadoopIgfsOutputStream.java     |   201 -
 .../hadoop/igfs/HadoopIgfsProperties.java       |    86 -
 .../hadoop/igfs/HadoopIgfsProxyInputStream.java |   337 -
 .../igfs/HadoopIgfsProxyOutputStream.java       |   165 -
 ...fsSecondaryFileSystemPositionedReadable.java |   105 -
 .../hadoop/igfs/HadoopIgfsStreamDelegate.java   |    96 -
 .../igfs/HadoopIgfsStreamEventListener.java     |    39 -
 .../processors/hadoop/igfs/HadoopIgfsUtils.java |   174 -
 .../hadoop/igfs/HadoopIgfsWrapper.java          |   552 -
 .../impl/HadoopMapReduceCounterGroup.java       |   124 +
 .../hadoop/impl/HadoopMapReduceCounters.java    |   229 +
 .../processors/hadoop/impl/HadoopUtils.java     |   328 +
 .../HadoopBasicFileSystemFactoryDelegate.java   |   164 +
 .../HadoopCachingFileSystemFactoryDelegate.java |    75 +
 .../HadoopDefaultFileSystemFactoryDelegate.java |    62 +
 ...doopFileSystemCounterWriterDelegateImpl.java |   108 +
 ...doopIgfsSecondaryFileSystemDelegateImpl.java |   472 +
 ...HadoopKerberosFileSystemFactoryDelegate.java |   112 +
 .../impl/fs/HadoopFileSystemCacheUtils.java     |   243 +
 .../hadoop/impl/fs/HadoopFileSystemsUtils.java  |    51 +
 .../hadoop/impl/fs/HadoopLazyConcurrentMap.java |   210 +
 .../hadoop/impl/fs/HadoopLocalFileSystemV1.java |    40 +
 .../hadoop/impl/fs/HadoopLocalFileSystemV2.java |    89 +
 .../hadoop/impl/fs/HadoopParameters.java        |    94 +
 .../impl/fs/HadoopRawLocalFileSystem.java       |   315 +
 .../processors/hadoop/impl/igfs/HadoopIgfs.java |   203 +
 .../igfs/HadoopIgfsCommunicationException.java  |    57 +
 .../hadoop/impl/igfs/HadoopIgfsEx.java          |    94 +
 .../hadoop/impl/igfs/HadoopIgfsFuture.java      |    97 +
 .../hadoop/impl/igfs/HadoopIgfsInProc.java      |   511 +
 .../hadoop/impl/igfs/HadoopIgfsInputStream.java |   630 +
 .../hadoop/impl/igfs/HadoopIgfsIo.java          |    76 +
 .../hadoop/impl/igfs/HadoopIgfsIpcIo.java       |   625 +
 .../impl/igfs/HadoopIgfsIpcIoListener.java      |    36 +
 .../hadoop/impl/igfs/HadoopIgfsJclLogger.java   |   116 +
 .../hadoop/impl/igfs/HadoopIgfsOutProc.java     |   525 +
 .../impl/igfs/HadoopIgfsOutputStream.java       |   202 +
 .../hadoop/impl/igfs/HadoopIgfsProperties.java  |    87 +
 .../impl/igfs/HadoopIgfsProxyInputStream.java   |   338 +
 .../impl/igfs/HadoopIgfsProxyOutputStream.java  |   166 +
 ...fsSecondaryFileSystemPositionedReadable.java |   106 +
 .../impl/igfs/HadoopIgfsStreamDelegate.java     |    96 +
 .../igfs/HadoopIgfsStreamEventListener.java     |    39 +
 .../hadoop/impl/igfs/HadoopIgfsUtils.java       |   175 +
 .../hadoop/impl/igfs/HadoopIgfsWrapper.java     |   554 +
 .../hadoop/impl/proto/HadoopClientProtocol.java |   354 +
 .../hadoop/impl/v1/HadoopV1CleanupTask.java     |    65 +
 .../hadoop/impl/v1/HadoopV1Counter.java         |   107 +
 .../hadoop/impl/v1/HadoopV1MapTask.java         |   122 +
 .../hadoop/impl/v1/HadoopV1OutputCollector.java |   138 +
 .../hadoop/impl/v1/HadoopV1Partitioner.java     |    44 +
 .../hadoop/impl/v1/HadoopV1ReduceTask.java      |   101 +
 .../hadoop/impl/v1/HadoopV1Reporter.java        |    81 +
 .../hadoop/impl/v1/HadoopV1SetupTask.java       |    57 +
 .../hadoop/impl/v1/HadoopV1Splitter.java        |   103 +
 .../processors/hadoop/impl/v1/HadoopV1Task.java |    98 +
 .../processors/hadoop/impl/v2/HadoopDaemon.java |   126 +
 .../impl/v2/HadoopSerializationWrapper.java     |   139 +
 .../impl/v2/HadoopShutdownHookManager.java      |    98 +
 .../hadoop/impl/v2/HadoopV2CleanupTask.java     |    73 +
 .../hadoop/impl/v2/HadoopV2Context.java         |   244 +
 .../hadoop/impl/v2/HadoopV2Counter.java         |    89 +
 .../processors/hadoop/impl/v2/HadoopV2Job.java  |   452 +
 .../impl/v2/HadoopV2JobResourceManager.java     |   324 +
 .../hadoop/impl/v2/HadoopV2MapTask.java         |    99 +
 .../hadoop/impl/v2/HadoopV2Partitioner.java     |    44 +
 .../hadoop/impl/v2/HadoopV2ReduceTask.java      |    91 +
 .../hadoop/impl/v2/HadoopV2SetupTask.java       |    66 +
 .../hadoop/impl/v2/HadoopV2Splitter.java        |   112 +
 .../processors/hadoop/impl/v2/HadoopV2Task.java |   186 +
 .../hadoop/impl/v2/HadoopV2TaskContext.java     |   563 +
 .../impl/v2/HadoopWritableSerialization.java    |    76 +
 .../hadoop/jobtracker/HadoopJobTracker.java     |    23 +-
 .../planner/HadoopDefaultMapReducePlan.java     |     7 +-
 .../hadoop/proto/HadoopClientProtocol.java      |   349 -
 .../child/HadoopChildProcessRunner.java         |    16 +-
 .../hadoop/v1/HadoopV1CleanupTask.java          |    64 -
 .../processors/hadoop/v1/HadoopV1Counter.java   |   106 -
 .../processors/hadoop/v1/HadoopV1MapTask.java   |   122 -
 .../hadoop/v1/HadoopV1OutputCollector.java      |   137 -
 .../hadoop/v1/HadoopV1Partitioner.java          |    44 -
 .../hadoop/v1/HadoopV1ReduceTask.java           |   101 -
 .../processors/hadoop/v1/HadoopV1Reporter.java  |    81 -
 .../processors/hadoop/v1/HadoopV1SetupTask.java |    56 -
 .../processors/hadoop/v1/HadoopV1Splitter.java  |   102 -
 .../processors/hadoop/v1/HadoopV1Task.java      |    97 -
 .../processors/hadoop/v2/HadoopDaemon.java      |   126 -
 .../hadoop/v2/HadoopExternalSplit.java          |    89 -
 .../hadoop/v2/HadoopSerializationWrapper.java   |   138 -
 .../hadoop/v2/HadoopShutdownHookManager.java    |    98 -
 .../hadoop/v2/HadoopSplitWrapper.java           |   119 -
 .../hadoop/v2/HadoopV2CleanupTask.java          |    72 -
 .../processors/hadoop/v2/HadoopV2Context.java   |   243 -
 .../processors/hadoop/v2/HadoopV2Counter.java   |    88 -
 .../processors/hadoop/v2/HadoopV2Job.java       |   445 -
 .../hadoop/v2/HadoopV2JobResourceManager.java   |   323 -
 .../processors/hadoop/v2/HadoopV2MapTask.java   |    99 -
 .../hadoop/v2/HadoopV2Partitioner.java          |    44 -
 .../hadoop/v2/HadoopV2ReduceTask.java           |    91 -
 .../processors/hadoop/v2/HadoopV2SetupTask.java |    65 -
 .../processors/hadoop/v2/HadoopV2Splitter.java  |   111 -
 .../processors/hadoop/v2/HadoopV2Task.java      |   185 -
 .../hadoop/v2/HadoopV2TaskContext.java          |   560 -
 .../hadoop/v2/HadoopWritableSerialization.java  |    75 -
 .../HadoopClientProtocolEmbeddedSelfTest.java   |    35 -
 .../hadoop/HadoopClientProtocolSelfTest.java    |   654 -
 .../hadoop/cache/HadoopTxConfigCacheTest.java   |    42 -
 ...KerberosHadoopFileSystemFactorySelfTest.java |   121 -
 .../util/BasicUserNameMapperSelfTest.java       |   133 -
 .../util/ChainedUserNameMapperSelfTest.java     |   107 -
 .../util/KerberosUserNameMapperSelfTest.java    |    99 -
 .../ignite/igfs/Hadoop1DualAbstractTest.java    |   158 -
 .../igfs/Hadoop1OverIgfsDualAsyncTest.java      |    30 -
 .../igfs/Hadoop1OverIgfsDualSyncTest.java       |    30 -
 .../igfs/HadoopFIleSystemFactorySelfTest.java   |   317 -
 .../HadoopIgfs20FileSystemAbstractSelfTest.java |  2040 ---
 ...Igfs20FileSystemLoopbackPrimarySelfTest.java |    74 -
 ...oopIgfs20FileSystemShmemPrimarySelfTest.java |    74 -
 .../igfs/HadoopIgfsDualAbstractSelfTest.java    |   321 -
 .../igfs/HadoopIgfsDualAsyncSelfTest.java       |    32 -
 .../ignite/igfs/HadoopIgfsDualSyncSelfTest.java |    32 -
 ...adoopIgfsSecondaryFileSystemTestAdapter.java |   149 -
 ...oopSecondaryFileSystemConfigurationTest.java |   575 -
 .../apache/ignite/igfs/IgfsEventsTestSuite.java |   285 -
 .../igfs/IgfsNearOnlyMultiNodeSelfTest.java     |   223 -
 .../IgniteHadoopFileSystemAbstractSelfTest.java |  2432 ---
 .../IgniteHadoopFileSystemClientSelfTest.java   |   212 -
 ...IgniteHadoopFileSystemHandshakeSelfTest.java |   389 -
 .../IgniteHadoopFileSystemIpcCacheSelfTest.java |   214 -
 .../IgniteHadoopFileSystemLoggerSelfTest.java   |   298 -
 ...niteHadoopFileSystemLoggerStateSelfTest.java |   329 -
 ...adoopFileSystemLoopbackAbstractSelfTest.java |    46 -
 ...SystemLoopbackEmbeddedDualAsyncSelfTest.java |    33 -
 ...eSystemLoopbackEmbeddedDualSyncSelfTest.java |    33 -
 ...leSystemLoopbackEmbeddedPrimarySelfTest.java |    33 -
 ...SystemLoopbackEmbeddedSecondarySelfTest.java |    34 -
 ...SystemLoopbackExternalDualAsyncSelfTest.java |    33 -
 ...eSystemLoopbackExternalDualSyncSelfTest.java |    33 -
 ...leSystemLoopbackExternalPrimarySelfTest.java |    33 -
 ...SystemLoopbackExternalSecondarySelfTest.java |    34 -
 ...condaryFileSystemInitializationSelfTest.java |   214 -
 ...teHadoopFileSystemShmemAbstractSelfTest.java |    91 -
 ...ileSystemShmemEmbeddedDualAsyncSelfTest.java |    33 -
 ...FileSystemShmemEmbeddedDualSyncSelfTest.java |    33 -
 ...pFileSystemShmemEmbeddedPrimarySelfTest.java |    33 -
 ...ileSystemShmemEmbeddedSecondarySelfTest.java |    33 -
 ...ileSystemShmemExternalDualAsyncSelfTest.java |    33 -
 ...FileSystemShmemExternalDualSyncSelfTest.java |    33 -
 ...pFileSystemShmemExternalPrimarySelfTest.java |    33 -
 ...ileSystemShmemExternalSecondarySelfTest.java |    33 -
 .../hadoop/HadoopAbstractMapReduceTest.java     |   429 -
 .../hadoop/HadoopAbstractSelfTest.java          |   239 -
 .../hadoop/HadoopAbstractWordCountTest.java     |   175 -
 .../hadoop/HadoopClassLoaderTest.java           |   110 -
 .../hadoop/HadoopCommandLineTest.java           |   474 -
 .../HadoopDefaultMapReducePlannerSelfTest.java  |   615 -
 .../processors/hadoop/HadoopErrorSimulator.java |   326 -
 .../hadoop/HadoopFileSystemsTest.java           |   155 -
 .../processors/hadoop/HadoopGroupingTest.java   |   307 -
 .../hadoop/HadoopJobTrackerSelfTest.java        |   345 -
 .../hadoop/HadoopMapReduceEmbeddedSelfTest.java |   253 -
 .../HadoopMapReduceErrorResilienceTest.java     |   154 -
 .../processors/hadoop/HadoopMapReduceTest.java  |    66 -
 .../hadoop/HadoopNoHadoopMapReduceTest.java     |    47 -
 .../processors/hadoop/HadoopPlannerMockJob.java |   168 -
 .../hadoop/HadoopPopularWordsTest.java          |   298 -
 .../HadoopSerializationWrapperSelfTest.java     |    79 -
 .../processors/hadoop/HadoopSharedMap.java      |     1 +
 .../hadoop/HadoopSnappyFullMapReduceTest.java   |    36 -
 .../processors/hadoop/HadoopSnappyTest.java     |   102 -
 .../hadoop/HadoopSortingExternalTest.java       |    46 -
 .../processors/hadoop/HadoopSortingTest.java    |   303 -
 .../hadoop/HadoopSplitWrapperSelfTest.java      |    72 -
 .../processors/hadoop/HadoopStartup.java        |    54 -
 .../hadoop/HadoopTaskExecutionSelfTest.java     |   567 -
 .../hadoop/HadoopTasksAllVersionsTest.java      |   260 -
 .../processors/hadoop/HadoopTasksV1Test.java    |    58 -
 .../processors/hadoop/HadoopTasksV2Test.java    |    77 -
 .../hadoop/HadoopTestClassLoader.java           |   106 +
 .../hadoop/HadoopTestRoundRobinMrPlanner.java   |    71 -
 .../hadoop/HadoopTestTaskContext.java           |   228 -
 .../processors/hadoop/HadoopTestUtils.java      |   178 -
 .../hadoop/HadoopUserLibsSelfTest.java          |   260 -
 .../processors/hadoop/HadoopV2JobSelfTest.java  |   100 -
 .../hadoop/HadoopValidationSelfTest.java        |    53 -
 .../HadoopWeightedMapReducePlannerTest.java     |   599 -
 .../HadoopWeightedPlannerMapReduceTest.java     |    38 -
 .../hadoop/books/alice-in-wonderland.txt        |  3735 -----
 .../processors/hadoop/books/art-of-war.txt      |  6982 ---------
 .../hadoop/books/huckleberry-finn.txt           | 11733 ---------------
 .../processors/hadoop/books/sherlock-holmes.txt | 13052 -----------------
 .../processors/hadoop/books/tom-sawyer.txt      |  8858 -----------
 .../hadoop/deps/CircularWIthHadoop.java         |    32 -
 .../hadoop/deps/CircularWithoutHadoop.java      |    27 -
 .../processors/hadoop/deps/WithCast.java        |    41 -
 .../hadoop/deps/WithClassAnnotation.java        |    28 -
 .../hadoop/deps/WithConstructorInvocation.java  |    31 -
 .../processors/hadoop/deps/WithExtends.java     |    27 -
 .../processors/hadoop/deps/WithField.java       |    29 -
 .../processors/hadoop/deps/WithImplements.java  |    36 -
 .../hadoop/deps/WithIndirectField.java          |    27 -
 .../processors/hadoop/deps/WithInitializer.java |    33 -
 .../processors/hadoop/deps/WithInnerClass.java  |    31 -
 .../hadoop/deps/WithLocalVariable.java          |    38 -
 .../hadoop/deps/WithMethodAnnotation.java       |    32 -
 .../hadoop/deps/WithMethodArgument.java         |    31 -
 .../hadoop/deps/WithMethodCheckedException.java |    31 -
 .../hadoop/deps/WithMethodInvocation.java       |    31 -
 .../hadoop/deps/WithMethodReturnType.java       |    31 -
 .../hadoop/deps/WithMethodRuntimeException.java |    31 -
 .../processors/hadoop/deps/WithOuterClass.java  |    38 -
 .../hadoop/deps/WithParameterAnnotation.java    |    31 -
 .../processors/hadoop/deps/WithStaticField.java |    29 -
 .../hadoop/deps/WithStaticInitializer.java      |    34 -
 .../processors/hadoop/deps/Without.java         |    25 -
 .../hadoop/examples/HadoopWordCount1.java       |    94 -
 .../hadoop/examples/HadoopWordCount1Map.java    |    79 -
 .../hadoop/examples/HadoopWordCount1Reduce.java |    61 -
 .../hadoop/examples/HadoopWordCount2.java       |   111 -
 .../examples/HadoopWordCount2Combiner.java      |    45 -
 .../hadoop/examples/HadoopWordCount2Mapper.java |    88 -
 .../examples/HadoopWordCount2Reducer.java       |   113 -
 .../impl/HadoopAbstractMapReduceTest.java       |   430 +
 .../hadoop/impl/HadoopAbstractSelfTest.java     |   239 +
 .../impl/HadoopAbstractWordCountTest.java       |   175 +
 .../hadoop/impl/HadoopCommandLineTest.java      |   476 +
 .../HadoopDefaultMapReducePlannerSelfTest.java  |   619 +
 .../hadoop/impl/HadoopErrorSimulator.java       |   326 +
 .../hadoop/impl/HadoopFileSystemsTest.java      |   155 +
 .../hadoop/impl/HadoopGroupingTest.java         |   302 +
 .../hadoop/impl/HadoopJobTrackerSelfTest.java   |   334 +
 .../impl/HadoopMapReduceEmbeddedSelfTest.java   |   249 +
 .../HadoopMapReduceErrorResilienceTest.java     |   154 +
 .../hadoop/impl/HadoopMapReduceTest.java        |    66 +
 .../impl/HadoopNoHadoopMapReduceTest.java       |    47 +
 .../hadoop/impl/HadoopPlannerMockJob.java       |   175 +
 .../hadoop/impl/HadoopPopularWordsTest.java     |   298 +
 .../HadoopSerializationWrapperSelfTest.java     |    80 +
 .../impl/HadoopSnappyFullMapReduceTest.java     |    36 +
 .../hadoop/impl/HadoopSnappyTest.java           |   104 +
 .../hadoop/impl/HadoopSortingExternalTest.java  |    46 +
 .../hadoop/impl/HadoopSortingTest.java          |   304 +
 .../hadoop/impl/HadoopSplitWrapperSelfTest.java |    72 +
 .../processors/hadoop/impl/HadoopStartup.java   |    54 +
 .../impl/HadoopTaskExecutionSelfTest.java       |   550 +
 .../hadoop/impl/HadoopTasksAllVersionsTest.java |   264 +
 .../hadoop/impl/HadoopTasksV1Test.java          |    62 +
 .../hadoop/impl/HadoopTasksV2Test.java          |    81 +
 .../impl/HadoopTestRoundRobinMrPlanner.java     |    75 +
 .../hadoop/impl/HadoopTestTaskContext.java      |   233 +
 .../processors/hadoop/impl/HadoopTestUtils.java |   178 +
 .../hadoop/impl/HadoopTxConfigCacheTest.java    |    42 +
 .../hadoop/impl/HadoopUserLibsSelfTest.java     |   261 +
 .../hadoop/impl/HadoopV2JobSelfTest.java        |   108 +
 .../hadoop/impl/HadoopValidationSelfTest.java   |    53 +
 .../HadoopWeightedMapReducePlannerTest.java     |   602 +
 .../HadoopWeightedPlannerMapReduceTest.java     |    38 +
 .../hadoop/impl/books/alice-in-wonderland.txt   |  3735 +++++
 .../processors/hadoop/impl/books/art-of-war.txt |  6982 +++++++++
 .../hadoop/impl/books/huckleberry-finn.txt      | 11733 +++++++++++++++
 .../hadoop/impl/books/sherlock-holmes.txt       | 13052 +++++++++++++++++
 .../processors/hadoop/impl/books/tom-sawyer.txt |  8858 +++++++++++
 .../HadoopClientProtocolEmbeddedSelfTest.java   |    35 +
 .../client/HadoopClientProtocolSelfTest.java    |   654 +
 .../hadoop/impl/examples/HadoopWordCount1.java  |    94 +
 .../impl/examples/HadoopWordCount1Map.java      |    79 +
 .../impl/examples/HadoopWordCount1Reduce.java   |    61 +
 .../hadoop/impl/examples/HadoopWordCount2.java  |   111 +
 .../impl/examples/HadoopWordCount2Combiner.java |    45 +
 .../impl/examples/HadoopWordCount2Mapper.java   |    88 +
 .../impl/examples/HadoopWordCount2Reducer.java  |   113 +
 ...KerberosHadoopFileSystemFactorySelfTest.java |   126 +
 .../impl/igfs/Hadoop1DualAbstractTest.java      |   157 +
 .../impl/igfs/Hadoop1OverIgfsDualAsyncTest.java |    32 +
 .../impl/igfs/Hadoop1OverIgfsDualSyncTest.java  |    32 +
 .../igfs/HadoopFIleSystemFactorySelfTest.java   |   345 +
 .../HadoopIgfs20FileSystemAbstractSelfTest.java |  2047 +++
 ...Igfs20FileSystemLoopbackPrimarySelfTest.java |    77 +
 ...oopIgfs20FileSystemShmemPrimarySelfTest.java |    77 +
 .../igfs/HadoopIgfsDualAbstractSelfTest.java    |   328 +
 .../impl/igfs/HadoopIgfsDualAsyncSelfTest.java  |    32 +
 .../impl/igfs/HadoopIgfsDualSyncSelfTest.java   |    32 +
 ...adoopIgfsSecondaryFileSystemTestAdapter.java |   153 +
 ...oopSecondaryFileSystemConfigurationTest.java |   583 +
 .../hadoop/impl/igfs/IgfsEventsTestSuite.java   |   289 +
 .../igfs/IgfsNearOnlyMultiNodeSelfTest.java     |   226 +
 .../IgniteHadoopFileSystemAbstractSelfTest.java |  2435 +++
 .../IgniteHadoopFileSystemClientSelfTest.java   |   216 +
 ...IgniteHadoopFileSystemHandshakeSelfTest.java |   393 +
 .../IgniteHadoopFileSystemIpcCacheSelfTest.java |   215 +
 .../IgniteHadoopFileSystemLoggerSelfTest.java   |   299 +
 ...niteHadoopFileSystemLoggerStateSelfTest.java |   332 +
 ...adoopFileSystemLoopbackAbstractSelfTest.java |    50 +
 ...SystemLoopbackEmbeddedDualAsyncSelfTest.java |    33 +
 ...eSystemLoopbackEmbeddedDualSyncSelfTest.java |    33 +
 ...leSystemLoopbackEmbeddedPrimarySelfTest.java |    33 +
 ...SystemLoopbackEmbeddedSecondarySelfTest.java |    34 +
 ...SystemLoopbackExternalDualAsyncSelfTest.java |    33 +
 ...eSystemLoopbackExternalDualSyncSelfTest.java |    33 +
 ...leSystemLoopbackExternalPrimarySelfTest.java |    33 +
 ...SystemLoopbackExternalSecondarySelfTest.java |    34 +
 ...condaryFileSystemInitializationSelfTest.java |   217 +
 ...teHadoopFileSystemShmemAbstractSelfTest.java |    94 +
 ...ileSystemShmemEmbeddedDualAsyncSelfTest.java |    33 +
 ...FileSystemShmemEmbeddedDualSyncSelfTest.java |    33 +
 ...pFileSystemShmemEmbeddedPrimarySelfTest.java |    33 +
 ...ileSystemShmemEmbeddedSecondarySelfTest.java |    33 +
 ...ileSystemShmemExternalDualAsyncSelfTest.java |    33 +
 ...FileSystemShmemExternalDualSyncSelfTest.java |    33 +
 ...pFileSystemShmemExternalPrimarySelfTest.java |    33 +
 ...ileSystemShmemExternalSecondarySelfTest.java |    33 +
 .../collections/HadoopAbstractMapTest.java      |   175 +
 .../HadoopConcurrentHashMultimapSelftest.java   |   280 +
 .../collections/HadoopHashMapSelfTest.java      |   133 +
 .../collections/HadoopSkipListSelfTest.java     |   320 +
 .../streams/HadoopDataStreamSelfTest.java       |   153 +
 .../taskexecutor/HadoopExecutorServiceTest.java |   119 +
 .../HadoopExternalTaskExecutionSelfTest.java    |   232 +
 .../HadoopExternalCommunicationSelfTest.java    |   222 +
 .../impl/util/BasicUserNameMapperSelfTest.java  |   134 +
 .../util/ChainedUserNameMapperSelfTest.java     |   111 +
 .../util/KerberosUserNameMapperSelfTest.java    |   100 +
 .../collections/HadoopAbstractMapTest.java      |   174 -
 .../HadoopConcurrentHashMultimapSelftest.java   |   278 -
 .../collections/HadoopHashMapSelfTest.java      |   131 -
 .../collections/HadoopSkipListSelfTest.java     |   318 -
 .../streams/HadoopDataStreamSelfTest.java       |   150 -
 .../hadoop/state/HadoopGroupingTestState.java   |    40 +
 .../state/HadoopJobTrackerSelfTestState.java    |    45 +
 .../HadoopMapReduceEmbeddedSelfTestState.java   |    32 +
 .../HadoopTaskExecutionSelfTestValues.java      |    51 +
 .../taskexecutor/HadoopExecutorServiceTest.java |   118 -
 .../HadoopExternalTaskExecutionSelfTest.java    |   232 -
 .../HadoopExternalCommunicationSelfTest.java    |   220 -
 .../testsuites/IgniteHadoopTestSuite.java       |   114 +-
 .../IgniteIgfsLinuxAndMacOSTestSuite.java       |    25 +-
 402 files changed, 78397 insertions(+), 78325 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/bin/include/setenv.bat
----------------------------------------------------------------------
diff --git a/bin/include/setenv.bat b/bin/include/setenv.bat
index 9d55521..82b8fce 100644
--- a/bin/include/setenv.bat
+++ b/bin/include/setenv.bat
@@ -43,16 +43,8 @@ set IGNITE_LIBS=%IGNITE_HOME%\libs\*
 
 for /D %%F in (%IGNITE_HOME%\libs\*) do if not "%%F" == "%IGNITE_HOME%\libs\optional" call :concat %%F\*
 
-if exist %IGNITE_HOME%\libs\ignite-hadoop set HADOOP_EDITION=1
-
 if defined USER_LIBS set IGNITE_LIBS=%USER_LIBS%;%IGNITE_LIBS%
 
-if "%HADOOP_EDITION%" == "1" FOR /F "delims=" %%i IN ('%JAVA_HOME%\bin\java.exe -cp %IGNITE_HOME%\libs\ignite-hadoop\* org.apache.ignite.internal.processors.hadoop.HadoopClasspathMain ";"' ) DO set IGNITE_HADOOP_CLASSPATH=%%i
-
-if "%IGNITE_HADOOP_CLASSPATH%" == "" goto :eof
-
-set IGNITE_LIBS=%IGNITE_LIBS%;%IGNITE_HADOOP_CLASSPATH%
-
 goto :eof
 
 :concat

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/bin/include/setenv.sh
----------------------------------------------------------------------
diff --git a/bin/include/setenv.sh b/bin/include/setenv.sh
index a85cba3..e088c08 100755
--- a/bin/include/setenv.sh
+++ b/bin/include/setenv.sh
@@ -61,39 +61,8 @@ do
     if [ -d ${file} ] && [ "${file}" != "${IGNITE_HOME}"/libs/optional ]; then
         IGNITE_LIBS=${IGNITE_LIBS}${SEP}${file}/*
     fi
-
-    if [ -d ${file} ] && [ "${file}" == "${IGNITE_HOME}"/libs/ignite-hadoop ]; then
-        HADOOP_EDITION=1
-    fi
 done
 
 if [ "${USER_LIBS}" != "" ]; then
     IGNITE_LIBS=${USER_LIBS}${SEP}${IGNITE_LIBS}
 fi
-
-if [ "${HADOOP_EDITION}" == "1" ]; then
-    # Resolve constants.
-    HADOOP_DEFAULTS="/etc/default/hadoop"
-
-    #
-    # Resolve the rest of Hadoop environment variables.
-    #
-    if [[ -z "${HADOOP_COMMON_HOME}" || -z "${HADOOP_HDFS_HOME}" || -z "${HADOOP_MAPRED_HOME}" ]]; then
-        if [ -f "$HADOOP_DEFAULTS" ]; then
-            source "$HADOOP_DEFAULTS"
-        fi
-    fi
-
-    IGNITE_HADOOP_CLASSPATH=$( "$JAVA" -cp "${IGNITE_HOME}"/libs/ignite-hadoop/'*' \
-        org.apache.ignite.internal.processors.hadoop.HadoopClasspathMain ":" )
-
-    statusCode=${?}
-
-    if [ "${statusCode}" -ne 0 ]; then
-       exit ${statusCode}
-    fi
-
-    unset statusCode
-
-    IGNITE_LIBS=${IGNITE_LIBS}${SEP}${IGNITE_HADOOP_CLASSPATH}
-fi

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java b/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java
index 3eaef1e..b123a4a 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java
@@ -44,6 +44,7 @@ import org.apache.ignite.internal.processors.continuous.GridContinuousProcessor;
 import org.apache.ignite.internal.processors.datastreamer.DataStreamProcessor;
 import org.apache.ignite.internal.processors.datastructures.DataStructuresProcessor;
 import org.apache.ignite.internal.processors.hadoop.HadoopProcessorAdapter;
+import org.apache.ignite.internal.processors.hadoop.HadoopHelper;
 import org.apache.ignite.internal.processors.igfs.IgfsHelper;
 import org.apache.ignite.internal.processors.igfs.IgfsProcessorAdapter;
 import org.apache.ignite.internal.processors.job.GridJobProcessor;
@@ -285,6 +286,13 @@ public interface GridKernalContext extends Iterable<GridComponent> {
     public HadoopProcessorAdapter hadoop();
 
     /**
+     * Gets Hadoop helper.
+     *
+     * @return Hadoop helper.
+     */
+    public HadoopHelper hadoopHelper();
+
+    /**
      * Gets utility cache pool.
      *
      * @return Utility cache pool.

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContextImpl.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContextImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContextImpl.java
index 1ff4543..eb214e8 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContextImpl.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/GridKernalContextImpl.java
@@ -60,6 +60,7 @@ import org.apache.ignite.internal.processors.continuous.GridContinuousProcessor;
 import org.apache.ignite.internal.processors.datastreamer.DataStreamProcessor;
 import org.apache.ignite.internal.processors.datastructures.DataStructuresProcessor;
 import org.apache.ignite.internal.processors.hadoop.HadoopProcessorAdapter;
+import org.apache.ignite.internal.processors.hadoop.HadoopHelper;
 import org.apache.ignite.internal.processors.igfs.IgfsHelper;
 import org.apache.ignite.internal.processors.igfs.IgfsProcessorAdapter;
 import org.apache.ignite.internal.processors.job.GridJobProcessor;
@@ -238,6 +239,10 @@ public class GridKernalContextImpl implements GridKernalContext, Externalizable
 
     /** */
     @GridToStringInclude
+    private HadoopHelper hadoopHelper;
+
+    /** */
+    @GridToStringInclude
     private GridSegmentationProcessor segProc;
 
     /** */
@@ -541,6 +546,8 @@ public class GridKernalContextImpl implements GridKernalContext, Externalizable
 
         if (helper instanceof IgfsHelper)
             igfsHelper = (IgfsHelper)helper;
+        else if (helper instanceof HadoopHelper)
+            hadoopHelper = (HadoopHelper)helper;
         else
             assert false : "Unknown helper class: " + helper.getClass();
     }
@@ -733,6 +740,11 @@ public class GridKernalContextImpl implements GridKernalContext, Externalizable
     }
 
     /** {@inheritDoc} */
+    @Override public HadoopHelper hadoopHelper() {
+        return hadoopHelper;
+    }
+
+    /** {@inheritDoc} */
     @Override public GridContinuousProcessor continuous() {
         return contProc;
     }

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/modules/core/src/main/java/org/apache/ignite/internal/IgniteComponentType.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/IgniteComponentType.java b/modules/core/src/main/java/org/apache/ignite/internal/IgniteComponentType.java
index 76e495f..0cd2fc1 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/IgniteComponentType.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/IgniteComponentType.java
@@ -41,6 +41,13 @@ public enum IgniteComponentType {
         "ignite-hadoop"
     ),
 
+    /** Hadoop Helper component. */
+    HADOOP_HELPER(
+        "org.apache.ignite.internal.processors.hadoop.HadoopNoopHelper",
+        "org.apache.ignite.internal.processors.hadoop.HadoopHelperImpl",
+        "ignite-hadoop"
+    ),
+
     /** IGFS helper component. */
     IGFS_HELPER(
         "org.apache.ignite.internal.processors.igfs.IgfsNoopHelper",
@@ -160,7 +167,7 @@ public enum IgniteComponentType {
      * @return Created component.
      * @throws IgniteCheckedException If failed.
      */
-    public <T extends GridComponent> T create(GridKernalContext ctx, boolean noOp) throws IgniteCheckedException {
+    public <T> T create(GridKernalContext ctx, boolean noOp) throws IgniteCheckedException {
         return create0(ctx, noOp ? noOpClsName : clsName);
     }
 
@@ -172,7 +179,7 @@ public enum IgniteComponentType {
      * @return Created component.
      * @throws IgniteCheckedException If failed.
      */
-    public <T extends GridComponent> T createIfInClassPath(GridKernalContext ctx, boolean mandatory)
+    public <T> T createIfInClassPath(GridKernalContext ctx, boolean mandatory)
         throws IgniteCheckedException {
         String cls = clsName;
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java b/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java
index c5d2748..6c5a628 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/IgniteKernal.java
@@ -115,6 +115,7 @@ import org.apache.ignite.internal.processors.continuous.GridContinuousProcessor;
 import org.apache.ignite.internal.processors.datastreamer.DataStreamProcessor;
 import org.apache.ignite.internal.processors.datastructures.DataStructuresProcessor;
 import org.apache.ignite.internal.processors.hadoop.Hadoop;
+import org.apache.ignite.internal.processors.hadoop.HadoopClassLoader;
 import org.apache.ignite.internal.processors.hadoop.HadoopProcessorAdapter;
 import org.apache.ignite.internal.processors.job.GridJobProcessor;
 import org.apache.ignite.internal.processors.jobmetrics.GridJobMetricsProcessor;
@@ -188,6 +189,7 @@ import static org.apache.ignite.internal.GridKernalState.STARTED;
 import static org.apache.ignite.internal.GridKernalState.STARTING;
 import static org.apache.ignite.internal.GridKernalState.STOPPED;
 import static org.apache.ignite.internal.GridKernalState.STOPPING;
+import static org.apache.ignite.internal.IgniteComponentType.HADOOP_HELPER;
 import static org.apache.ignite.internal.IgniteComponentType.IGFS;
 import static org.apache.ignite.internal.IgniteComponentType.IGFS_HELPER;
 import static org.apache.ignite.internal.IgniteComponentType.SCHEDULE;
@@ -821,6 +823,8 @@ public class IgniteKernal implements IgniteEx, IgniteMXBean, Externalizable {
 
             addHelper(IGFS_HELPER.create(F.isEmpty(cfg.getFileSystemConfiguration())));
 
+            addHelper(HADOOP_HELPER.createIfInClassPath(ctx, false));
+
             startProcessor(new IgnitePluginProcessor(ctx, cfg, plugins));
 
             // Off-heap processor has no dependencies.
@@ -881,7 +885,7 @@ public class IgniteKernal implements IgniteEx, IgniteMXBean, Externalizable {
             startProcessor(new DataStreamProcessor(ctx));
             startProcessor((GridProcessor)IGFS.create(ctx, F.isEmpty(cfg.getFileSystemConfiguration())));
             startProcessor(new GridContinuousProcessor(ctx));
-            startProcessor((GridProcessor)createHadoopComponent());
+            startProcessor(createHadoopComponent());
             startProcessor(new DataStructuresProcessor(ctx));
             startProcessor(createComponent(PlatformProcessor.class, ctx));
 
@@ -1148,7 +1152,7 @@ public class IgniteKernal implements IgniteEx, IgniteMXBean, Externalizable {
         else {
             HadoopProcessorAdapter cmp = null;
 
-            if (IgniteComponentType.HADOOP.inClassPath() && cfg.isPeerClassLoadingEnabled()) {
+            if (!ctx.hadoopHelper().isNoOp() && cfg.isPeerClassLoadingEnabled()) {
                 U.warn(log, "Hadoop module is found in classpath, but will not be started because peer class " +
                     "loading is enabled (set IgniteConfiguration.peerClassLoadingEnabled to \"false\" if you want " +
                     "to use Hadoop module).");

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
new file mode 100644
index 0000000..cd94c89
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
@@ -0,0 +1,487 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.hadoop;
+
+import org.apache.ignite.IgniteCheckedException;
+import org.apache.ignite.IgniteException;
+import org.apache.ignite.internal.util.ClassCache;
+import org.apache.ignite.internal.util.typedef.F;
+import org.apache.ignite.internal.util.typedef.internal.S;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.jetbrains.annotations.Nullable;
+import org.jsr166.ConcurrentHashMap8;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.Vector;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ * Class loader allowing explicitly load classes without delegation to parent class loader.
+ * Also supports class parsing for finding dependencies which contain transitive dependencies
+ * unavailable for parent.
+ */
+public class HadoopClassLoader extends URLClassLoader implements ClassCache {
+    /** Hadoop class name: Daemon. */
+    public static final String CLS_DAEMON = "org.apache.hadoop.util.Daemon";
+
+    /** Hadoop class name: ShutdownHookManager. */
+    public static final String CLS_SHUTDOWN_HOOK_MANAGER = "org.apache.hadoop.util.ShutdownHookManager";
+
+    /** Hadoop class name: Daemon replacement. */
+    public static final String CLS_DAEMON_REPLACE = "org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopDaemon";
+
+    /** Hadoop class name: ShutdownHookManager replacement. */
+    public static final String CLS_SHUTDOWN_HOOK_MANAGER_REPLACE =
+        "org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopShutdownHookManager";
+
+    /** */
+    private static final URLClassLoader APP_CLS_LDR = (URLClassLoader)HadoopClassLoader.class.getClassLoader();
+
+    /** */
+    private static final Collection<URL> appJars = F.asList(APP_CLS_LDR.getURLs());
+
+    /** Mutex for native libraries initialization. */
+    private static final Object LIBS_MUX = new Object();
+
+    /** Predefined native libraries to load. */
+    private static final Collection<String> PREDEFINED_NATIVE_LIBS;
+
+    /** Native libraries. */
+    private static Collection<Object> NATIVE_LIBS;
+
+    /** */
+    private static volatile Collection<URL> hadoopJars;
+
+    /** */
+    private static final Map<String, byte[]> bytesCache = new ConcurrentHashMap8<>();
+
+    /** Class cache. */
+    private final ConcurrentMap<String, Class> cacheMap = new ConcurrentHashMap<>();
+
+    /** Diagnostic name of this class loader. */
+    @SuppressWarnings({"FieldCanBeLocal", "UnusedDeclaration"})
+    private final String name;
+
+    /** Igfs Helper. */
+    private final HadoopHelper helper;
+
+    static {
+        // We are very parallel capable.
+        registerAsParallelCapable();
+
+        PREDEFINED_NATIVE_LIBS = new HashSet<>();
+
+        PREDEFINED_NATIVE_LIBS.add("hadoop");
+        PREDEFINED_NATIVE_LIBS.add("MapRClient");
+    }
+
+    /**
+     * Gets name for the task class loader. Task class loader
+     * @param info The task info.
+     * @param prefix Get only prefix (without task type and number)
+     * @return The class loader name.
+     */
+    public static String nameForTask(HadoopTaskInfo info, boolean prefix) {
+        if (prefix)
+            return "hadoop-task-" + info.jobId() + "-";
+        else
+            return "hadoop-task-" + info.jobId() + "-" + info.type() + "-" + info.taskNumber();
+    }
+
+    /**
+     * Constructor.
+     *
+     * @param urls Urls.
+     * @param name Classloader name.
+     * @param libNames Optional additional native library names to be linked from parent classloader.
+     */
+    public HadoopClassLoader(URL[] urls, String name, @Nullable String[] libNames, HadoopHelper helper) {
+        super(addHadoopUrls(urls), APP_CLS_LDR);
+
+        assert !(getParent() instanceof HadoopClassLoader);
+
+        this.name = name;
+        this.helper = helper;
+
+        initializeNativeLibraries(libNames);
+    }
+
+    /**
+     * Workaround to load native Hadoop libraries. Java doesn't allow native libraries to be loaded from different
+     * classloaders. But we load Hadoop classes many times and one of these classes - {@code NativeCodeLoader} - tries
+     * to load the same native library over and over again.
+     * <p>
+     * To fix the problem, we force native library load in parent class loader and then "link" handle to this native
+     * library to our class loader. As a result, our class loader will think that the library is already loaded and will
+     * be able to link native methods.
+     *
+     * @see <a href="http://docs.oracle.com/javase/1.5.0/docs/guide/jni/spec/invocation.html#library_version">
+     *     JNI specification</a>
+     */
+    @SuppressWarnings("SynchronizationOnLocalVariableOrMethodParameter")
+    private void initializeNativeLibraries(@Nullable String[] usrLibs) {
+        Collection<Object> res;
+
+        synchronized (LIBS_MUX) {
+            if (NATIVE_LIBS == null) {
+                LinkedList<NativeLibrary> libs = new LinkedList<>();
+
+                for (String lib : PREDEFINED_NATIVE_LIBS)
+                    libs.add(new NativeLibrary(lib, true));
+
+                if (!F.isEmpty(usrLibs)) {
+                    for (String usrLib : usrLibs)
+                        libs.add(new NativeLibrary(usrLib, false));
+                }
+
+                NATIVE_LIBS = initializeNativeLibraries0(libs);
+            }
+
+            res = NATIVE_LIBS;
+        }
+
+        // Link libraries to class loader.
+        Vector<Object> ldrLibs = nativeLibraries(this);
+
+        synchronized (ldrLibs) {
+            ldrLibs.addAll(res);
+        }
+    }
+
+    /**
+     * Initialize native libraries.
+     *
+     * @param libs Libraries to initialize.
+     * @return Initialized libraries.
+     */
+    @SuppressWarnings("SynchronizationOnLocalVariableOrMethodParameter")
+    private static Collection<Object> initializeNativeLibraries0(Collection<NativeLibrary> libs) {
+        assert Thread.holdsLock(LIBS_MUX);
+
+        Collection<Object> res = new HashSet<>();
+
+        for (NativeLibrary lib : libs) {
+            String libName = lib.name;
+
+            File libFile = new File(libName);
+
+            try {
+                // Load library.
+                if (libFile.isAbsolute())
+                    System.load(libName);
+                else
+                    System.loadLibrary(libName);
+
+                // Find library in class loader internals.
+                Object libObj = null;
+
+                ClassLoader ldr = APP_CLS_LDR;
+
+                while (ldr != null) {
+                    Vector<Object> ldrLibObjs = nativeLibraries(ldr);
+
+                    synchronized (ldrLibObjs) {
+                        for (Object ldrLibObj : ldrLibObjs) {
+                            String name = nativeLibraryName(ldrLibObj);
+
+                            if (libFile.isAbsolute()) {
+                                if (F.eq(name, libFile.getCanonicalPath())) {
+                                    libObj = ldrLibObj;
+
+                                    break;
+                                }
+                            } else {
+                                if (name.contains(libName)) {
+                                    libObj = ldrLibObj;
+
+                                    break;
+                                }
+                            }
+                        }
+                    }
+
+                    if (libObj != null)
+                        break;
+
+                    ldr = ldr.getParent();
+                }
+
+                if (libObj == null)
+                    throw new IgniteException("Failed to find loaded library: " + libName);
+
+                res.add(libObj);
+            }
+            catch (UnsatisfiedLinkError e) {
+                if (!lib.optional)
+                    throw e;
+            }
+            catch (IOException e) {
+                throw new IgniteException("Failed to initialize native libraries due to unexpected exception.", e);
+            }
+        }
+
+        return res;
+    }
+
+    /**
+     * Get native libraries collection for the given class loader.
+     *
+     * @param ldr Class loaded.
+     * @return Native libraries.
+     */
+    private static Vector<Object> nativeLibraries(ClassLoader ldr) {
+        assert ldr != null;
+
+        return U.field(ldr, "nativeLibraries");
+    }
+
+    /**
+     * Get native library name.
+     *
+     * @param lib Library.
+     * @return Name.
+     */
+    private static String nativeLibraryName(Object lib) {
+        assert lib != null;
+
+        return U.field(lib, "name");
+    }
+
+    /** {@inheritDoc} */
+    @Override protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
+        try {
+            // Always load Hadoop classes explicitly, since Hadoop can be available in App classpath.
+            if (name.equals(CLS_SHUTDOWN_HOOK_MANAGER))  // Dirty hack to get rid of Hadoop shutdown hooks.
+                return loadReplace(name, CLS_SHUTDOWN_HOOK_MANAGER_REPLACE);
+            else if (name.equals(CLS_DAEMON))
+                // We replace this in order to be able to forcibly stop some daemon threads
+                // that otherwise never stop (e.g. PeerCache runnables):
+                return loadReplace(name, CLS_DAEMON_REPLACE);
+
+            // For Ignite Hadoop and IGFS classes we have to check if they depend on Hadoop.
+            if (loadByCurrentClassloader(name))
+                return loadClassExplicitly(name, resolve);
+
+            return super.loadClass(name, resolve);
+        }
+        catch (NoClassDefFoundError | ClassNotFoundException e) {
+            throw new ClassNotFoundException("Failed to load class: " + name, e);
+        }
+    }
+
+    /**
+     * Load a class replacing it with our own implementation.
+     *
+     * @param originalName Name.
+     * @param replaceName Replacement.
+     * @return Class.
+     */
+    private Class<?> loadReplace(final String originalName, final String replaceName) {
+        synchronized (getClassLoadingLock(originalName)) {
+            // First, check if the class has already been loaded
+            Class c = findLoadedClass(originalName);
+
+            if (c != null)
+                return c;
+
+            byte[] bytes = bytesCache.get(originalName);
+
+            if (bytes == null) {
+                InputStream in = helper.loadClassBytes(this, replaceName);
+
+                if (in == null)
+                    throw new IgniteException("Failed to replace class [originalName=" + originalName +
+                        ", replaceName=" +  replaceName + ']');
+
+                bytes = helper.loadReplace(in, originalName, replaceName);
+
+                bytesCache.put(originalName, bytes);
+            }
+
+            return defineClass(originalName, bytes, 0, bytes.length);
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override public Class<?> getFromCache(String clsName) throws ClassNotFoundException {
+        Class<?> cls = cacheMap.get(clsName);
+
+        if (cls == null) {
+            Class old = cacheMap.putIfAbsent(clsName, cls = Class.forName(clsName, true, this));
+
+            if (old != null)
+                cls = old;
+        }
+
+        return cls;
+    }
+
+    /**
+     * Check whether file must be loaded with current class loader, or normal delegation model should be used.
+     * <p>
+     * Override is only necessary for Ignite classes which have direct or transitive dependencies on Hadoop classes.
+     * These are all classes from "org.apache.ignite.internal.processors.hadoop.impl" package,
+     * and these are several well-know classes from "org.apache.ignite.hadoop" package.
+     *
+     * @param clsName Class name.
+     * @return Whether class must be loaded by current classloader without delegation.
+     */
+    @SuppressWarnings("RedundantIfStatement")
+    public static boolean loadByCurrentClassloader(String clsName) {
+        // All impl classes.
+        if (clsName.startsWith("org.apache.ignite.internal.processors.hadoop.impl"))
+            return true;
+
+        // Several classes from public API.
+        if (clsName.startsWith("org.apache.ignite.hadoop")) {
+            // We use "contains" instead of "equals" to handle subclasses properly.
+            if (clsName.contains("org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem") ||
+                clsName.contains("org.apache.ignite.hadoop.fs.v2.IgniteHadoopFileSystem") ||
+                clsName.contains("org.apache.ignite.hadoop.mapreduce.IgniteHadoopClientProtocolProvider"))
+                return true;
+        }
+
+        return false;
+    }
+
+    /**
+     * @param name Class name.
+     * @param resolve Resolve class.
+     * @return Class.
+     * @throws ClassNotFoundException If failed.
+     */
+    private Class<?> loadClassExplicitly(String name, boolean resolve) throws ClassNotFoundException {
+        synchronized (getClassLoadingLock(name)) {
+            // First, check if the class has already been loaded
+            Class c = findLoadedClass(name);
+
+            if (c == null) {
+                long t1 = System.nanoTime();
+
+                c = findClass(name);
+
+                // this is the defining class loader; record the stats
+                sun.misc.PerfCounter.getFindClassTime().addElapsedTimeFrom(t1);
+                sun.misc.PerfCounter.getFindClasses().increment();
+            }
+
+            if (resolve)
+                resolveClass(c);
+
+            return c;
+        }
+    }
+
+    /**
+     * @param urls URLs.
+     * @return URLs.
+     */
+    private static URL[] addHadoopUrls(URL[] urls) {
+        Collection<URL> hadoopJars;
+
+        try {
+            hadoopJars = hadoopUrls();
+        }
+        catch (IgniteCheckedException e) {
+            throw new RuntimeException(e);
+        }
+
+        ArrayList<URL> list = new ArrayList<>(hadoopJars.size() + appJars.size() + (urls == null ? 0 : urls.length));
+
+        list.addAll(appJars);
+        list.addAll(hadoopJars);
+
+        if (!F.isEmpty(urls))
+            list.addAll(F.asList(urls));
+
+        return list.toArray(new URL[list.size()]);
+    }
+
+    /**
+     * @return Collection of jar URLs.
+     * @throws IgniteCheckedException If failed.
+     */
+    public static Collection<URL> hadoopUrls() throws IgniteCheckedException {
+        Collection<URL> hadoopUrls = hadoopJars;
+
+        if (hadoopUrls != null)
+            return hadoopUrls;
+
+        synchronized (HadoopClassLoader.class) {
+            hadoopUrls = hadoopJars;
+
+            if (hadoopUrls != null)
+                return hadoopUrls;
+
+            try {
+                hadoopUrls = HadoopClasspathUtils.classpathForClassLoader();
+            }
+            catch (IOException e) {
+                throw new IgniteCheckedException("Failed to resolve Hadoop JAR locations: " + e.getMessage(), e);
+            }
+
+            hadoopJars = hadoopUrls;
+
+            return hadoopUrls;
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override public String toString() {
+        return S.toString(HadoopClassLoader.class, this);
+    }
+
+    /**
+     * Getter for name field.
+     */
+    public String name() {
+        return name;
+    }
+
+    /**
+     * Native library abstraction.
+     */
+    private static class NativeLibrary {
+        /** Library name. */
+        private final String name;
+
+        /** Whether library is optional. */
+        private final boolean optional;
+
+        /**
+         * Constructor.
+         *
+         * @param name Library name.
+         * @param optional Optional flag.
+         */
+        public NativeLibrary(String name, boolean optional) {
+            this.name = name;
+            this.optional = optional;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
new file mode 100644
index 0000000..7579ddb
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
@@ -0,0 +1,424 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.hadoop;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Hadoop classpath utilities.
+ */
+public class HadoopClasspathUtils {
+    /** Prefix directory. */
+    public static final String PREFIX = "HADOOP_PREFIX";
+
+    /** Hadoop home directory. */
+    public static final String HOME = "HADOOP_HOME";
+
+    /** Hadoop common directory. */
+    public static final String COMMON_HOME = "HADOOP_COMMON_HOME";
+
+    /** Hadoop HDFS directory. */
+    public static final String HDFS_HOME = "HADOOP_HDFS_HOME";
+
+    /** Hadoop mapred directory. */
+    public static final String MAPRED_HOME = "HADOOP_MAPRED_HOME";
+
+    /** Arbitrary additional dependencies. Compliant with standard Java classpath resolution. */
+    public static final String HADOOP_USER_LIBS = "HADOOP_USER_LIBS";
+
+    /** Empty string. */
+    private static final String EMPTY_STR = "";
+
+    /**
+     * Gets Hadoop class path as a list of URLs (for in-process class loader usage).
+     *
+     * @return List of class path URLs.
+     * @throws IOException If failed.
+     */
+    public static List<URL> classpathForClassLoader() throws IOException {
+        List<URL> res = new ArrayList<>();
+
+        for (SearchDirectory dir : classpathDirectories()) {
+            for (File file : dir.files()) {
+                try {
+                    res.add(file.toURI().toURL());
+                }
+                catch (MalformedURLException e) {
+                    throw new IOException("Failed to convert file path to URL: " + file.getPath());
+                }
+            }
+        }
+
+        return res;
+    }
+
+    /**
+     * Gets Hadoop locations.
+     *
+     * @return The locations as determined from the environment.
+     */
+    public static HadoopLocations locations() throws IOException {
+        // Query environment.
+        String hadoopHome = systemOrEnv(PREFIX, systemOrEnv(HOME, EMPTY_STR));
+
+        String commonHome = systemOrEnv(COMMON_HOME, EMPTY_STR);
+        String hdfsHome = systemOrEnv(HDFS_HOME, EMPTY_STR);
+        String mapredHome = systemOrEnv(MAPRED_HOME, EMPTY_STR);
+
+        // If any composite location is defined, use only them.
+        if (!isEmpty(commonHome) || !isEmpty(hdfsHome) || !isEmpty(mapredHome)) {
+            HadoopLocations res = new HadoopLocations(hadoopHome, commonHome, hdfsHome, mapredHome);
+
+            if (res.valid())
+                return res;
+            else
+                throw new IOException("Failed to resolve Hadoop classpath because some environment variables are " +
+                    "either undefined or point to nonexistent directories [" +
+                    "[env=" + COMMON_HOME + ", value=" + commonHome + ", exists=" + res.commonExists() + "], " +
+                    "[env=" + HDFS_HOME + ", value=" + hdfsHome + ", exists=" + res.hdfsExists() + "], " +
+                    "[env=" + MAPRED_HOME + ", value=" + mapredHome + ", exists=" + res.mapredExists() + "]]");
+        }
+        else if (!isEmpty(hadoopHome)) {
+            // All further checks will be based on HADOOP_HOME, so check for it's existence.
+            if (!exists(hadoopHome))
+                throw new IOException("Failed to resolve Hadoop classpath because " + HOME + " environment " +
+                    "variable points to nonexistent directory: " + hadoopHome);
+
+            // Probe Apache Hadoop.
+            HadoopLocations res = new HadoopLocations(
+                hadoopHome,
+                hadoopHome + "/share/hadoop/common",
+                hadoopHome + "/share/hadoop/hdfs",
+                hadoopHome + "/share/hadoop/mapreduce"
+            );
+
+            if (res.valid())
+                return res;
+
+            // Probe CDH.
+            res = new HadoopLocations(
+                hadoopHome,
+                hadoopHome,
+                hadoopHome + "/../hadoop-hdfs",
+                hadoopHome + "/../hadoop-mapreduce"
+            );
+
+            if (res.valid())
+                return res;
+
+            // Probe HDP.
+            res = new HadoopLocations(
+                hadoopHome,
+                hadoopHome,
+                hadoopHome + "/../hadoop-hdfs-client",
+                hadoopHome + "/../hadoop-mapreduce-client"
+            );
+
+            if (res.valid())
+                return res;
+
+            // Failed.
+            throw new IOException("Failed to resolve Hadoop classpath because " + HOME + " environment variable " +
+                "is either invalid or points to non-standard Hadoop distribution: " + hadoopHome);
+        }
+        else {
+            // Advise to set HADOOP_HOME only as this is preferred way to configure classpath.
+            throw new IOException("Failed to resolve Hadoop classpath (please define " + HOME + " environment " +
+                "variable and point it to your Hadoop distribution).");
+        }
+    }
+
+    /**
+     * Gets base directories to discover classpath elements in.
+     *
+     * @return Collection of directory and mask pairs.
+     * @throws IOException if a mandatory classpath location is not found.
+     */
+    private static Collection<SearchDirectory> classpathDirectories() throws IOException {
+        HadoopLocations loc = locations();
+
+        Collection<SearchDirectory> res = new ArrayList<>();
+
+        // Add libraries from Hadoop distribution:
+        res.add(new SearchDirectory(new File(loc.common(), "lib"), AcceptAllDirectoryFilter.INSTANCE));
+        res.add(new SearchDirectory(new File(loc.hdfs(), "lib"), AcceptAllDirectoryFilter.INSTANCE));
+        res.add(new SearchDirectory(new File(loc.mapred(), "lib"), AcceptAllDirectoryFilter.INSTANCE));
+
+        res.add(new SearchDirectory(new File(loc.common()), new PrefixDirectoryFilter("hadoop-common-")));
+        res.add(new SearchDirectory(new File(loc.common()), new PrefixDirectoryFilter("hadoop-auth-")));
+
+        res.add(new SearchDirectory(new File(loc.hdfs()), new PrefixDirectoryFilter("hadoop-hdfs-")));
+
+        res.add(new SearchDirectory(new File(loc.mapred()),
+            new PrefixDirectoryFilter("hadoop-mapreduce-client-common")));
+        res.add(new SearchDirectory(new File(loc.mapred()),
+            new PrefixDirectoryFilter("hadoop-mapreduce-client-core")));
+
+        // Add user provided libs:
+        res.addAll(parseUserLibs());
+
+        return res;
+    }
+
+    /**
+     * Parse user libs.
+     *
+     * @return Parsed libs search patterns.
+     * @throws IOException If failed.
+     */
+    public static Collection<SearchDirectory> parseUserLibs() throws IOException {
+        return parseUserLibs(systemOrEnv(HADOOP_USER_LIBS, null));
+    }
+
+    /**
+     * Parse user libs.
+     *
+     * @param str String.
+     * @return Result.
+     * @throws IOException If failed.
+     */
+    public static Collection<SearchDirectory> parseUserLibs(String str) throws IOException {
+        Collection<SearchDirectory> res = new LinkedList<>();
+
+        if (!isEmpty(str)) {
+            String[] tokens = normalize(str).split(File.pathSeparator);
+
+            for (String token : tokens) {
+                // Skip empty tokens.
+                if (isEmpty(token))
+                    continue;
+
+                File file = new File(token);
+                File dir = file.getParentFile();
+
+                if (token.endsWith("*")) {
+                    assert dir != null;
+
+                    res.add(new SearchDirectory(dir, AcceptAllDirectoryFilter.INSTANCE, false));
+                }
+                else {
+                    // Met "/" or "C:\" pattern, nothing to do with it.
+                    if (dir == null)
+                        continue;
+
+                    res.add(new SearchDirectory(dir, new ExactDirectoryFilter(file.getName()), false));
+                }
+            }
+        }
+
+        return res;
+    }
+
+    /**
+     * Get system property or environment variable with the given name.
+     *
+     * @param name Variable name.
+     * @param dflt Default value.
+     * @return Value.
+     */
+    private static String systemOrEnv(String name, String dflt) {
+        String res = System.getProperty(name);
+
+        if (res == null)
+            res = System.getenv(name);
+
+        return res != null ? res : dflt;
+    }
+
+    /**
+     * Answers if the given path denotes existing directory.
+     *
+     * @param path The directory path.
+     * @return {@code True} if the given path denotes an existing directory.
+     */
+    public static boolean exists(String path) {
+        if (path == null)
+            return false;
+
+        Path p = Paths.get(path);
+
+        return Files.exists(p) && Files.isDirectory(p) && Files.isReadable(p);
+    }
+
+    /**
+     * Check if string is empty.
+     *
+     * @param val Value.
+     * @return {@code True} if empty.
+     */
+    private static boolean isEmpty(String val) {
+        return val == null || val.isEmpty();
+    }
+
+    /**
+     * NOramlize the string.
+     *
+     * @param str String.
+     * @return Normalized string.
+     */
+    private static String normalize(String str) {
+        assert str != null;
+
+        return str.trim().toLowerCase();
+    }
+
+    /**
+     * Simple pair-like structure to hold directory name and a mask assigned to it.
+     */
+    public static class SearchDirectory {
+        /** File. */
+        private final File dir;
+
+        /** Filter. */
+        private final DirectoryFilter filter;
+
+        /** Whether directory must exist. */
+        private final boolean strict;
+
+        /**
+         * Constructor for directory search with strict rule.
+         *
+         * @param dir Directory.
+         * @param filter Filter.
+         * @throws IOException If failed.
+         */
+        private SearchDirectory(File dir, DirectoryFilter filter) throws IOException {
+            this(dir, filter, true);
+        }
+
+        /**
+         * Constructor.
+         *
+         * @param dir Directory.
+         * @param filter Filter.
+         * @param strict Whether directory must exist.
+         * @throws IOException If failed.
+         */
+        private SearchDirectory(File dir, DirectoryFilter filter, boolean strict) throws IOException {
+            this.dir = dir;
+            this.filter = filter;
+            this.strict = strict;
+
+            if (strict && !exists(dir.getAbsolutePath()))
+                throw new IOException("Directory cannot be read: " + dir.getAbsolutePath());
+        }
+
+        /**
+         * @return Child files.
+         */
+        public File[] files() throws IOException {
+            File[] files = dir.listFiles(new FilenameFilter() {
+                @Override public boolean accept(File dir, String name) {
+                    return filter.test(name);
+                }
+            });
+
+            if (files == null) {
+                if (strict)
+                    throw new IOException("Failed to get directory files [dir=" + dir + ']');
+                else
+                    return new File[0];
+            }
+            else
+                return files;
+        }
+    }
+
+    /**
+     * Directory filter interface.
+     */
+    public static interface DirectoryFilter {
+        /**
+         * Test if file with this name should be included.
+         *
+         * @param name File name.
+         * @return {@code True} if passed.
+         */
+        public boolean test(String name);
+    }
+
+    /**
+     * Filter to accept all files.
+     */
+    public static class AcceptAllDirectoryFilter implements DirectoryFilter {
+        /** Singleton instance. */
+        public static final AcceptAllDirectoryFilter INSTANCE = new AcceptAllDirectoryFilter();
+
+        /** {@inheritDoc} */
+        @Override public boolean test(String name) {
+            return true;
+        }
+    }
+
+    /**
+     * Filter which uses prefix to filter files.
+     */
+    public static class PrefixDirectoryFilter implements DirectoryFilter {
+        /** Prefix. */
+        private final String prefix;
+
+        /**
+         * Constructor.
+         *
+         * @param prefix Prefix.
+         */
+        public PrefixDirectoryFilter(String prefix) {
+            assert prefix != null;
+
+            this.prefix = normalize(prefix);
+        }
+
+        /** {@inheritDoc} */
+        @Override public boolean test(String name) {
+            return normalize(name).startsWith(prefix);
+        }
+    }
+
+    /**
+     * Filter which uses exact comparison.
+     */
+    public static class ExactDirectoryFilter implements DirectoryFilter {
+        /** Name. */
+        private final String name;
+
+        /**
+         * Constructor.
+         *
+         * @param name Name.
+         */
+        public ExactDirectoryFilter(String name) {
+            this.name = normalize(name);
+        }
+
+        /** {@inheritDoc} */
+        @Override public boolean test(String name) {
+            return normalize(name).equals(this.name);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java
new file mode 100644
index 0000000..ae17ac8
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.hadoop;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.lang.reflect.Constructor;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.ignite.IgniteCheckedException;
+import org.apache.ignite.IgniteLogger;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.jetbrains.annotations.Nullable;
+
+/**
+ * Hadoop job info based on default Hadoop configuration.
+ */
+public class HadoopDefaultJobInfo implements HadoopJobInfo, Externalizable {
+    /** */
+    private static final long serialVersionUID = 5489900236464999951L;
+
+    /** {@code true} If job has combiner. */
+    private boolean hasCombiner;
+
+    /** Number of reducers configured for job. */
+    private int numReduces;
+
+    /** Configuration. */
+    private Map<String,String> props = new HashMap<>();
+
+    /** Job name. */
+    private String jobName;
+
+    /** User name. */
+    private String user;
+
+    /**
+     * Default constructor required by {@link Externalizable}.
+     */
+    public HadoopDefaultJobInfo() {
+        // No-op.
+    }
+
+    /**
+     * Constructor.
+     *
+     * @param jobName Job name.
+     * @param user User name.
+     * @param hasCombiner {@code true} If job has combiner.
+     * @param numReduces Number of reducers configured for job.
+     * @param props All other properties of the job.
+     */
+    public HadoopDefaultJobInfo(String jobName, String user, boolean hasCombiner, int numReduces,
+        Map<String, String> props) {
+        this.jobName = jobName;
+        this.user = user;
+        this.hasCombiner = hasCombiner;
+        this.numReduces = numReduces;
+        this.props = props;
+    }
+
+    /** {@inheritDoc} */
+    @Nullable @Override public String property(String name) {
+        return props.get(name);
+    }
+
+    /** {@inheritDoc} */
+    @Override public HadoopJob createJob(Class<? extends HadoopJob> jobCls, HadoopJobId jobId, IgniteLogger log,
+        @Nullable String[] libNames, HadoopHelper helper) throws IgniteCheckedException {
+        assert jobCls != null;
+
+        try {
+            Constructor<? extends HadoopJob> constructor = jobCls.getConstructor(HadoopJobId.class,
+                HadoopDefaultJobInfo.class, IgniteLogger.class, String[].class, HadoopHelper.class);
+
+            return constructor.newInstance(jobId, this, log, libNames, helper);
+        }
+        catch (Throwable t) {
+            if (t instanceof Error)
+                throw (Error)t;
+            
+            throw new IgniteCheckedException(t);
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override public boolean hasCombiner() {
+        return hasCombiner;
+    }
+
+    /** {@inheritDoc} */
+    @Override public boolean hasReducer() {
+        return reducers() > 0;
+    }
+
+    /** {@inheritDoc} */
+    @Override public int reducers() {
+        return numReduces;
+    }
+
+    /** {@inheritDoc} */
+    @Override public String jobName() {
+        return jobName;
+    }
+
+    /** {@inheritDoc} */
+    @Override public String user() {
+        return user;
+    }
+
+    /** {@inheritDoc} */
+    @Override public void writeExternal(ObjectOutput out) throws IOException {
+        U.writeString(out, jobName);
+        U.writeString(out, user);
+
+        out.writeBoolean(hasCombiner);
+        out.writeInt(numReduces);
+
+        U.writeStringMap(out, props);
+    }
+
+    /** {@inheritDoc} */
+    @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
+        jobName = U.readString(in);
+        user = U.readString(in);
+
+        hasCombiner = in.readBoolean();
+        numReduces = in.readInt();
+
+        props = U.readStringMap(in);
+    }
+
+    /**
+     * @return Properties of the job.
+     */
+    public Map<String, String> properties() {
+        return props;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelper.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelper.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelper.java
new file mode 100644
index 0000000..a8fee79
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelper.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.hadoop;
+
+import java.io.InputStream;
+import org.jetbrains.annotations.Nullable;
+
+/**
+ * IGFS utility processor adapter.
+ */
+public interface HadoopHelper {
+    /**
+     * @return Whether this is no-op implementation.
+     */
+    public boolean isNoOp();
+
+    /**
+     * Get common Hadoop class loader.
+     *
+     * @return Common Hadoop class loader.
+     */
+    public HadoopClassLoader commonClassLoader();
+
+    /**
+     * Load special replacement and impersonate.
+     *
+     * @param in Input stream.
+     * @param originalName Original class name.
+     * @param replaceName Replacer class name.
+     * @return Result.
+     */
+    public byte[] loadReplace(InputStream in, final String originalName, final String replaceName);
+
+    /**
+     * @param ldr Loader.
+     * @param clsName Class.
+     * @return Input stream.
+     */
+    @Nullable public InputStream loadClassBytes(ClassLoader ldr, String clsName);
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopJobInfo.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopJobInfo.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopJobInfo.java
index a3b1bb6..853c63d 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopJobInfo.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopJobInfo.java
@@ -59,11 +59,13 @@ public interface HadoopJobInfo extends Serializable {
      * @param jobId Job ID.
      * @param log Logger.
      * @param libNames Optional additional native library names.
+     * @param helper HadoopHelper.
      * @return Job.
      * @throws IgniteCheckedException If failed.
      */
     public HadoopJob createJob(Class<? extends HadoopJob> jobCls,
-        HadoopJobId jobId, IgniteLogger log, @Nullable String[] libNames) throws IgniteCheckedException;
+        HadoopJobId jobId, IgniteLogger log, @Nullable String[] libNames, HadoopHelper helper)
+            throws IgniteCheckedException;
 
     /**
      * @return Number of reducers configured for job.

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
new file mode 100644
index 0000000..a90007f
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
@@ -0,0 +1,123 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.hadoop;
+
+/**
+ * Simple structure to hold Hadoop directory locations.
+ */
+public class HadoopLocations {
+    /** Hadoop home. */
+    private final String home;
+
+    /** Common home. */
+    private final String common;
+
+    /** HDFS home. */
+    private final String hdfs;
+
+    /** Mapred home. */
+    private final String mapred;
+
+    /** Whether common home exists. */
+    private final boolean commonExists;
+
+    /** Whether HDFS home exists. */
+    private final boolean hdfsExists;
+
+    /** Whether mapred home exists. */
+    private final boolean mapredExists;
+
+    /**
+     * Constructor.
+     *
+     * @param home Hadoop home.
+     * @param common Common home.
+     * @param hdfs HDFS home.
+     * @param mapred Mapred home.
+     */
+    public HadoopLocations(String home, String common, String hdfs, String mapred) {
+        assert common != null && hdfs != null && mapred != null;
+
+        this.home = home;
+        this.common = common;
+        this.hdfs = hdfs;
+        this.mapred = mapred;
+
+        commonExists = HadoopClasspathUtils.exists(common);
+        hdfsExists = HadoopClasspathUtils.exists(hdfs);
+        mapredExists = HadoopClasspathUtils.exists(mapred);
+    }
+
+    /**
+     * @return Hadoop home.
+     */
+    public String home() {
+        return home;
+    }
+
+    /**
+     * @return Common home.
+     */
+    public String common() {
+        return common;
+    }
+
+    /**
+     * @return HDFS home.
+     */
+    public String hdfs() {
+        return hdfs;
+    }
+
+    /**
+     * @return Mapred home.
+     */
+    public String mapred() {
+        return mapred;
+    }
+
+    /**
+     * @return Whether common home exists.
+     */
+    public boolean commonExists() {
+        return commonExists;
+    }
+
+    /**
+     * @return Whether HDFS home exists.
+     */
+    public boolean hdfsExists() {
+        return hdfsExists;
+    }
+
+    /**
+     * @return Whether mapred home exists.
+     */
+    public boolean mapredExists() {
+        return mapredExists;
+    }
+
+    /**
+     * Whether all required directories exists.
+     *
+     * @return {@code True} if exists.
+     */
+    public boolean valid() {
+        return commonExists && hdfsExists && mapredExists;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopHelper.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopHelper.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopHelper.java
new file mode 100644
index 0000000..d3348ca
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopHelper.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.hadoop;
+
+import org.apache.ignite.internal.GridKernalContext;
+import org.jetbrains.annotations.Nullable;
+
+import java.io.InputStream;
+
+/**
+ * Noop Hadoop Helper implementation.
+ */
+@SuppressWarnings("unused")
+public class HadoopNoopHelper implements HadoopHelper {
+    /**
+     * Constructor.
+     *
+     * @param ctx Kernal context.
+     */
+    @SuppressWarnings("UnusedParameters")
+    public HadoopNoopHelper(GridKernalContext ctx) {
+        // No-op.
+    }
+
+    /** {@inheritDoc} */
+    @Override public boolean isNoOp() {
+        return true;
+    }
+
+    /** {@inheritDoc} */
+    @Override public HadoopClassLoader commonClassLoader() {
+        throw unsupported();
+    }
+
+    /** {@inheritDoc} */
+    @Override public byte[] loadReplace(InputStream in, String originalName, String replaceName) {
+        throw unsupported();
+    }
+
+    /** {@inheritDoc} */
+    @Nullable @Override public InputStream loadClassBytes(ClassLoader ldr, String clsName) {
+        throw unsupported();
+    }
+
+    /**
+     * @return Exception.
+     */
+    private static UnsupportedOperationException unsupported() {
+        throw new UnsupportedOperationException("Operation is unsupported (Hadoop module is not in the classpath).");
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopProcessor.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopProcessor.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopProcessor.java
index 501870a..fa4ab47 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopProcessor.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopNoopProcessor.java
@@ -79,8 +79,8 @@ public class HadoopNoopProcessor extends HadoopProcessorAdapter {
      * Creates an exception to be uniformly thrown from all the methods.
      */
     private IllegalStateException createException() {
-        return new IllegalStateException("Hadoop module is not loaded (please ensure that ignite-hadoop.jar is in " +
-            "classpath and IgniteConfiguration.peerClassLoadingEnabled is set to false).");
+        return new IllegalStateException("Hadoop module is not loaded (please ensure that ignite-hadoop.jar is " +
+            "in libs and IgniteConfiguration.peerClassLoadingEnabled is set to false).");
     }
 
     /** {@inheritDoc} */

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java
index 6ff1f8f..1dd12d9 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsImpl.java
@@ -183,8 +183,11 @@ public final class IgfsImpl implements IgfsEx {
         data = igfsCtx.data();
         secondaryFs = cfg.getSecondaryFileSystem();
 
+        if (secondaryFs instanceof IgfsKernalContextAware)
+            ((IgfsKernalContextAware)secondaryFs).setKernalContext(igfsCtx.kernalContext());
+
         if (secondaryFs instanceof LifecycleAware)
-            ((LifecycleAware) secondaryFs).start();
+            ((LifecycleAware)secondaryFs).start();
 
         /* Default IGFS mode. */
         IgfsMode dfltMode;

http://git-wip-us.apache.org/repos/asf/ignite/blob/8032fc2c/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsKernalContextAware.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsKernalContextAware.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsKernalContextAware.java
new file mode 100644
index 0000000..7f59db4
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/igfs/IgfsKernalContextAware.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.igfs;
+
+import org.apache.ignite.internal.GridKernalContext;
+
+/**
+ * Indicates whether particular file system accepts kernal context.
+ */
+public interface IgfsKernalContextAware {
+    /**
+     * Set kernal context.
+     *
+     * @param ctx Kernal context.
+     */
+    public void setKernalContext(GridKernalContext ctx);
+}