You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ignite.apache.org by sb...@apache.org on 2015/06/18 21:49:00 UTC

[50/50] incubator-ignite git commit: #IG-980: workable version (needs some cleanup).

#IG-980: workable version (needs some cleanup).


Project: http://git-wip-us.apache.org/repos/asf/incubator-ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ignite/commit/3df0112b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ignite/tree/3df0112b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ignite/diff/3df0112b

Branch: refs/heads/ignite-980
Commit: 3df0112b0388ecab854db6ef85095f00f1a56246
Parents: 16997e1
Author: iveselovskiy <iv...@gridgain.com>
Authored: Thu Jun 18 22:47:22 2015 +0300
Committer: iveselovskiy <iv...@gridgain.com>
Committed: Thu Jun 18 22:47:22 2015 +0300

----------------------------------------------------------------------
 modules/hadoop/pom.xml                          |  78 ---------
 .../fs/IgniteHadoopFileSystemCounterWriter.java |   4 +-
 .../fs/IgniteHadoopIgfsSecondaryFileSystem.java |   2 +-
 .../internal/processors/hadoop/HadoopUtils.java | 159 +++++++++++--------
 .../hadoop/fs/HadoopLazyConcurrentMap.java      |  49 ++++--
 .../hadoop/jobtracker/HadoopJobTracker.java     |   2 -
 .../processors/hadoop/v2/HadoopV2Job.java       |  45 +++++-
 .../hadoop/v2/HadoopV2JobResourceManager.java   |  11 +-
 .../hadoop/v2/HadoopV2TaskContext.java          |   6 +-
 .../hadoop/HadoopClientProtocolSelfTest.java    |   9 --
 .../apache/ignite/igfs/IgfsEventsTestSuite.java |   5 +-
 .../hadoop/HadoopAbstractSelfTest.java          |   9 ++
 .../hadoop/HadoopFileSystemsTest.java           |  36 ++++-
 .../processors/hadoop/HadoopMapReduceTest.java  |  15 --
 .../testsuites/IgniteHadoopTestSuite.java       |  52 +++---
 .../IgniteIgfsLinuxAndMacOSTestSuite.java       |   2 +-
 16 files changed, 238 insertions(+), 246 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/pom.xml
----------------------------------------------------------------------
diff --git a/modules/hadoop/pom.xml b/modules/hadoop/pom.xml
index 1884271..dcd788e 100644
--- a/modules/hadoop/pom.xml
+++ b/modules/hadoop/pom.xml
@@ -143,84 +143,6 @@
                 <artifactId>maven-surefire-plugin</artifactId>
                 <version>2.17</version>
                 <configuration>
-                    <classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-annotations</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-auth</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>commons-codec:commons-codec</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.httpcomponents:httpclient</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.httpcomponents:httpcore</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-common</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.google.guava:guava</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>commons-cli:commons-cli</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.commons:commons-math3</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>xmlenc:xmlenc</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>commons-httpclient:commons-httpclient</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>commons-net:commons-net</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>javax.servlet:servlet-api</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.mortbay.jetty:jetty</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.mortbay.jetty:jetty-util</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.sun.jersey:jersey-core</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.sun.jersey:jersey-json</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.codehaus.jettison:jettison</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.sun.xml.bind:jaxb-impl</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.codehaus.jackson:jackson-jaxrs</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.codehaus.jackson:jackson-xc</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.sun.jersey:jersey-server</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>asm:asm</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>tomcat:jasper-compiler</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>tomcat:jasper-runtime</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>javax.servlet.jsp:jsp-api</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>commons-el:commons-el</classpathDependencyExcludes>
-                        <!--<classpathDependencyExcludes>commons-logging:commons-logging</classpathDependencyExcludes>-->
-                        <classpathDependencyExcludes>net.java.dev.jets3t:jets3t</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.jamesmurty.utils:java-xmlbuilder</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.codehaus.jackson:jackson-core-asl</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.codehaus.jackson:jackson-mapper-asl
-                        </classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.avro:avro</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.thoughtworks.paranamer:paranamer</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.xerial.snappy:snappy-java</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.google.protobuf:protobuf-java</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.jcraft:jsch</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.google.code.findbugs:jsr305</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.zookeeper:zookeeper</classpathDependencyExcludes>
-                        <!--<classpathDependencyExcludes>org.apache.commons:commons-compress</classpathDependencyExcludes>-->
-                        <classpathDependencyExcludes>org.tukaani:xz</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-hdfs</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>commons-daemon:commons-daemon</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-mapreduce-client-common
-                        </classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-yarn-common</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-yarn-api</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>javax.xml.bind:jaxb-api</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>javax.xml.stream:stax-api</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>javax.activation:activation</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.google.inject:guice</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>javax.inject:javax.inject</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.sun.jersey.contribs:jersey-guice</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-yarn-client</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.sun.jersey:jersey-client</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-yarn-server-common
-                        </classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.google.inject.extensions:guice-servlet
-                        </classpathDependencyExcludes>
-                        <classpathDependencyExcludes>io.netty:netty</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-mapreduce-client-core
-                        </classpathDependencyExcludes>
-                        <!--<classpathDependencyExcludes>commons-beanutils:commons-beanutils</classpathDependencyExcludes>-->
-                        <classpathDependencyExcludes>org.hamcrest:hamcrest-core</classpathDependencyExcludes>
-                        <!--<classpathDependencyExcludes>commons-collections:commons-collections</classpathDependencyExcludes>-->
-                        <classpathDependencyExcludes>org.eclipse.jetty:jetty-http</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>commons-io:commons-io</classpathDependencyExcludes>
-                        <!--<classpathDependencyExcludes>commons-lang:commons-lang</classpathDependencyExcludes>-->
-                        <classpathDependencyExcludes>commons-configuration:commons-configuration
-                        </classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.eclipse.jetty:jetty-server</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.eclipse.jetty:jetty-util</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.eclipse.jetty:jetty-io</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>aopalliance:aopalliance</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.beust:jcommander</classpathDependencyExcludes>
-                    </classpathDependencyExcludes>
                 </configuration>
             </plugin>
         </plugins>

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopFileSystemCounterWriter.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopFileSystemCounterWriter.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopFileSystemCounterWriter.java
index 5d20041..cb4c445 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopFileSystemCounterWriter.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopFileSystemCounterWriter.java
@@ -58,8 +58,6 @@ public class IgniteHadoopFileSystemCounterWriter implements HadoopCounterWriter
 
         String user = jobInfo.user();
 
-//        jobInfo.
-//
         user = IgfsUtils.fixUserName(user);
 
         String dir = jobInfo.property(COUNTER_WRITER_DIR_PROPERTY);
@@ -75,7 +73,7 @@ public class IgniteHadoopFileSystemCounterWriter implements HadoopCounterWriter
             hadoopCfg.set(MRJobConfig.USER_NAME, user);
 
             // This uses Hadoop job class loader:
-            final FileSystem fs = HadoopUtils.fileSystemForMrUser(jobStatPath.toUri(), hadoopCfg, jobId.toString());
+            final FileSystem fs = HadoopUtils.fileSystemForMrUser(jobStatPath.toUri(), hadoopCfg, jobId.toString(), null);
 
             fs.mkdirs(jobStatPath);
 

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopIgfsSecondaryFileSystem.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopIgfsSecondaryFileSystem.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopIgfsSecondaryFileSystem.java
index 6a630fb..32f0725 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopIgfsSecondaryFileSystem.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopIgfsSecondaryFileSystem.java
@@ -75,7 +75,7 @@ public class IgniteHadoopIgfsSecondaryFileSystem implements IgfsSecondaryFileSys
                 }
             }
         }
-    );
+    , getClass().getClassLoader());
 
     /**
      * Simple constructor that is to be used by default.

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopUtils.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopUtils.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopUtils.java
index 4392e2f..4802b64 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopUtils.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopUtils.java
@@ -33,12 +33,10 @@ import org.apache.ignite.internal.util.*;
 import org.apache.ignite.internal.util.typedef.*;
 import org.apache.ignite.internal.util.typedef.internal.*;
 import org.jetbrains.annotations.*;
-import org.jsr166.*;
 
 import java.io.*;
 import java.net.*;
 import java.util.*;
-import java.util.concurrent.*;
 
 /**
  * Hadoop utility methods.
@@ -66,29 +64,17 @@ public class HadoopUtils {
     private static final String OLD_REDUCE_CLASS_ATTR = "mapred.reducer.class";
 
     /** Lazy per-user cache for the file systems. It is cleared and nulled in #close() method. */
-    private static final HadoopLazyConcurrentMap<FsCacheKey, FileSystem> fileSysLazyMap;
-
-    static {
-        fileSysLazyMap = createHadoopLazyConcurrentMap();
-
-        X.println("##### fileSysLazyMap created, HadoopUtils classloader = " + HadoopUtils.class.getClassLoader());
-        //X.println("##### fileSysLazyMap created, classloader = " + fileSysLazyMap.getClass().getClassLoader());
-        //X.println("##### " + fileSysLazyMap);
-        //int i = 0;
-       // Ignition.ignite()
-//        Ignition.addListener(new IgnitionListener() {
-//            @Override public void onStateChange(@Nullable String name, IgniteState state) {
-//                if (state == IgniteState.STOPPED) {
-//                    U.closeQuiet(new AutoCloseable() {
-//                        @Override public void close() throws Exception {
-//                            fileSysLazyMap.close();
-//                        }
-//                    });
-//                }
-//            }
-//        });
-    }
+    private static final HadoopLazyConcurrentMap<FsCacheKey, FileSystem> fileSysLazyMap
+        = createHadoopLazyConcurrentMap();
+
+    /** */
+    private static final Map<String, T2<HadoopLazyConcurrentMap<FsCacheKey,FileSystem>, Set<String>>> jobFsMap
+        = new HashMap<>(16);
 
+    /**
+     * Creates HadoopLazyConcurrentMap.
+     * @return a new HadoopLazyConcurrentMap.
+     */
     public static HadoopLazyConcurrentMap<FsCacheKey, FileSystem> createHadoopLazyConcurrentMap() {
         return new HadoopLazyConcurrentMap<>(
             new HadoopLazyConcurrentMap.ValueFactory<FsCacheKey, FileSystem>() {
@@ -115,7 +101,7 @@ public class HadoopUtils {
                     }
                 }
             }
-        );
+        , HadoopUtils.class.getClassLoader());
     }
 
     /**
@@ -429,7 +415,7 @@ public class HadoopUtils {
      * @throws IOException
      */
     public static FileSystem fileSystemForMrUser(@Nullable URI uri, Configuration cfg,
-            @Nullable String jobId) throws IOException {
+        @Nullable String jobId, @Nullable String locId) throws IOException {
         final String usr = getMrHadoopUser(cfg);
 
         assert usr != null;
@@ -439,26 +425,12 @@ public class HadoopUtils {
 
         final FileSystem fs;
 
-//        if (doCacheFs) {
-            try {
-                fs = getWithCaching(uri, cfg, usr, jobId);
-            }
-            catch (IgniteException ie) {
-                throw new IOException(ie);
-            }
-//        }
-//        else {
-//            assert false;
-//
-//            try {
-//                fs = FileSystem.get(uri, cfg, usr);
-//            }
-//            catch (InterruptedException ie) {
-//                Thread.currentThread().interrupt();
-//
-//                throw new IOException(ie);
-//            }
-//        }
+        try {
+            fs = getWithCaching(uri, cfg, usr, jobId, locId);
+        }
+        catch (IgniteException ie) {
+            throw new IOException(ie);
+        }
 
         assert fs != null;
         assert !(fs instanceof IgniteHadoopFileSystem) || F.eq(usr, ((IgniteHadoopFileSystem)fs).user());
@@ -573,51 +545,96 @@ public class HadoopUtils {
      * @param usr The user to create file system for.
      * @return The file system: either created, or taken from the cache.
      */
-    private static FileSystem getWithCaching(URI uri, Configuration cfg, String usr, @Nullable String jobId) {
+    private static FileSystem getWithCaching(URI uri, Configuration cfg, String usr,
+            @Nullable String jobId, @Nullable String locId) {
         final FsCacheKey key = new FsCacheKey(uri, usr, cfg);
 
         if (jobId == null)
             return fileSysLazyMap.getOrCreate(key);
         else {
-            HadoopLazyConcurrentMap<FsCacheKey,FileSystem> lm = getLazyMap(jobId);
+            HadoopLazyConcurrentMap<FsCacheKey,FileSystem> lm = getFsMapForJob(jobId, locId);
 
             return lm.getOrCreate(key);
         }
     }
 
-    private static HadoopLazyConcurrentMap<FsCacheKey,FileSystem> getLazyMap(String jobId) {
+    private static synchronized HadoopLazyConcurrentMap<FsCacheKey,FileSystem> getFsMapForJob(String jobId,
+        @Nullable String locId) {
         assert jobId != null;
 
-        HadoopLazyConcurrentMap<FsCacheKey,FileSystem> lm0 = m.get(jobId);
+        T2<HadoopLazyConcurrentMap<FsCacheKey, FileSystem>, Set<String>> t2 = jobFsMap.get(jobId);
 
-        if (lm0 != null)
-            return lm0;
+        if (t2 == null) {
+            HadoopLazyConcurrentMap<FsCacheKey, FileSystem> newLM = createHadoopLazyConcurrentMap();
 
-        HadoopLazyConcurrentMap<FsCacheKey,FileSystem> newLM = createHadoopLazyConcurrentMap();
+            t2 = new T2<>(newLM, (Set<String>)new HashSet<String>());
 
-        HadoopLazyConcurrentMap<FsCacheKey,FileSystem> pushedLM = m.putIfAbsent(jobId, newLM);
+            T2<HadoopLazyConcurrentMap<FsCacheKey, FileSystem>, Set<String>> pushedT2 = jobFsMap.put(jobId, t2);
 
-        if (pushedLM == null)
-            return newLM;
-        else {
-            try {
-                newLM.close();
-            } catch (IgniteCheckedException ice) {
-                throw new IgniteException(ice);
-            }
+            assert pushedT2 == null;
+        }
+
+        if (locId != null) {
+            // If local  node Id is given, register this local Id for this job:
+            boolean added = t2.get2().add(locId);
 
-            return pushedLM;
+            // new locId appears in HadoopV2Job#initialize(), no job with the same locId should be registered:
+            assert added;
+
+            if (added)
+                X.println("#### INCREMENTED: usages: " + t2.get2().size() + ", jobId = " + jobId + ", locId = " + locId);
         }
+
+        return t2.get1();
     }
 
-    private static final ConcurrentMap<String, HadoopLazyConcurrentMap<FsCacheKey,FileSystem>> m
-        = new ConcurrentHashMap8<>();
+    public static synchronized void close(final String jobId, final String locId) throws IgniteCheckedException {
+        assert jobId != null;
+        assert locId != null;
+
+        final T2<HadoopLazyConcurrentMap<FsCacheKey, FileSystem>, Set<String>> t2 = jobFsMap.get(jobId);
+
+        if (t2 != null) {
+            Set<String> locIds = t2.get2();
+
+            boolean rm = locIds.remove(locId);
 
-    public static void close(String jobId) throws IgniteCheckedException {
-        HadoopLazyConcurrentMap<FsCacheKey,FileSystem> lm = m.remove(jobId);
+            assert rm;
 
-        if (lm != null)
-            lm.close();
+            final int usageCount = locIds.size();
+
+            X.println("### DECREMENT: usage count == " + usageCount + ", jobId = " + jobId + ", locId = " + locId);
+
+            if (usageCount < 0) {
+                X.println("negative usage count map: " + t2.get1());
+
+                assert false;
+            }
+
+            if (usageCount == 0) {
+                T2<HadoopLazyConcurrentMap<FsCacheKey, FileSystem>, Set<String>> removedT2 = jobFsMap.remove(jobId);
+
+                assert removedT2 == t2;
+
+                t2.get1().close();
+            }
+            else
+                X.println("### Not closing Fs since usage count == " + usageCount);
+        }
+    }
+
+    /**
+     * Diagnostic method.
+     */
+    public static synchronized void dump() {
+        System.out.println("DUMP: ##################### main map: " + fileSysLazyMap);
+        if (!jobFsMap.isEmpty()) {
+            System.out.println("##################### job map: " + jobFsMap.size());
+            for (Map.Entry<String, T2<HadoopLazyConcurrentMap<FsCacheKey, FileSystem>, Set<String>>> e : jobFsMap.entrySet()) {
+                T2<HadoopLazyConcurrentMap<FsCacheKey, FileSystem>, Set<String>> t2 = e.getValue();
+                System.out.println("###### job = " + e.getKey() + ", cnt = " + t2.get2() + ", map = " + t2.get1());
+            }
+        }
     }
 
     /**
@@ -654,10 +671,12 @@ public class HadoopUtils {
     }
 
     /**
-     * NB: This method is called with reflection.
+     * This method is called with reflection upon Job finish. This will clean up all the Fs created for tasks.
      * @throws IgniteCheckedException
      */
     public static void close() throws IgniteCheckedException {
         fileSysLazyMap.close();
+
+        dump();
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLazyConcurrentMap.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLazyConcurrentMap.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLazyConcurrentMap.java
index e512aa3..e693c91 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLazyConcurrentMap.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLazyConcurrentMap.java
@@ -20,6 +20,7 @@ package org.apache.ignite.internal.processors.hadoop.fs;
 import org.apache.ignite.*;
 import org.apache.ignite.internal.util.future.*;
 import org.apache.ignite.internal.util.typedef.*;
+import org.apache.ignite.internal.util.typedef.internal.*;
 import org.jsr166.*;
 
 import java.io.*;
@@ -49,16 +50,22 @@ public class HadoopLazyConcurrentMap<K, V extends Closeable> {
     /** Flag indicating that this map is closed and cleared. */
     private boolean closed;
 
-    final AtomicInteger createdCount = new AtomicInteger();
+    final AtomicInteger createdFsCount = new AtomicInteger();
+
+    final ClassLoader callerCL;
+
+    final int instanceNumber;
 
     /**
      * Constructor.
      * @param factory the factory to create new values lazily.
      */
-    public HadoopLazyConcurrentMap(ValueFactory<K, V> factory) {
-        int i = instances.incrementAndGet();
-        X.println("### +++ HLCM created: " + i + ", classloader = " + getClass().getClassLoader());
+    public HadoopLazyConcurrentMap(ValueFactory<K, V> factory, ClassLoader cl) {
+        instanceNumber = instances.incrementAndGet();
         this.factory = factory;
+        callerCL = cl;
+
+        assert getClass().getClassLoader() == Ignite.class.getClassLoader();
     }
 
     /**
@@ -136,15 +143,15 @@ public class HadoopLazyConcurrentMap<K, V extends Closeable> {
                     try {
                         v.close();
 
-                        createdCount.decrementAndGet();
+                        createdFsCount.decrementAndGet();
                     }
                     catch (Exception err0) {
                         if (err == null)
                             err = err0;
                     }
 
-                    X.println("### closed Fs: key=[" + key + "], fs cnt = " + createdCount.get() +  ", value=[" + v.getClass() + "], dump:");
-                    new Throwable().printStackTrace(System.out);
+                    X.println("### closed Fs: key=[" + key + "], fs cnt = " + createdFsCount.get() +  ", value=[" + v.getClass() + "], dump:");
+                    //new Throwable().printStackTrace(System.out);
                 }
             }
 
@@ -154,7 +161,7 @@ public class HadoopLazyConcurrentMap<K, V extends Closeable> {
             map.clear();
 
             int i = instances.decrementAndGet();
-            X.println("### --- HLCM closed. Fs cnt = " + createdCount.get() + ", instances=" + i + ", classloader = " + getClass().getClassLoader());
+            X.println("### --- HLCM closed: " + this);
 
             if (err != null)
                 throw new IgniteCheckedException(err);
@@ -191,9 +198,8 @@ public class HadoopLazyConcurrentMap<K, V extends Closeable> {
                 if (v0 == null)
                     throw new IgniteException("Failed to create non-null value. [key=" + key + ']');
 
-                int created = createdCount.incrementAndGet();
-                X.println("+ Created fs: " + created + ", fs = " + v0);
-
+                int created = createdFsCount.incrementAndGet();
+                X.println("+ Created Fs: " + created + ", fs = " + v0);
                 fut.onDone(v0);
             }
             catch (Throwable e) {
@@ -227,10 +233,23 @@ public class HadoopLazyConcurrentMap<K, V extends Closeable> {
         public V createValue(K key);
     }
 
-    @Override protected void finalize() throws Throwable {
-        super.finalize();
+//    @Override protected void finalize() throws Throwable {
+//        super.finalize();
+//
+//        X.print("### FINALIZE: ");
+//        close();
+//    }
+
+    @Override public String toString() {
+        return  S.toString((Class)getClass(), this)
+            + "\n, FsCount = " + createdFsCount.get()
+            + "\n, i = " + instanceNumber
+            + "\n, instances = " + instances.get()
+            + "\n, caller classloader = " + callerCL
+            + "\n, this cl = " + getClass().getClassLoader();
+    }
 
-        X.print("### FINALIZE: ");
-        close();
+    public static int getNumberOfInstances() {
+        return instances.get();
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java
index 29cdc99..2f07817 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java
@@ -967,8 +967,6 @@ public class HadoopJobTracker extends HadoopComponent {
      * @throws IgniteCheckedException If failed.
      */
     @Nullable public HadoopJob job(HadoopJobId jobId, @Nullable HadoopJobInfo jobInfo) throws IgniteCheckedException {
-        X.println("### tracker: " + this + ", jobId = " + jobId);
-
         GridFutureAdapter<HadoopJob> fut = jobs.get(jobId);
 
         if (fut != null || (fut = jobs.putIfAbsent(jobId, new GridFutureAdapter<HadoopJob>())) != null)

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Job.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Job.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Job.java
index 0ca3355..cff27eb 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Job.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Job.java
@@ -37,6 +37,7 @@ import java.lang.reflect.*;
 import java.util.*;
 import java.util.Queue;
 import java.util.concurrent.*;
+import java.util.concurrent.atomic.*;
 
 import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.*;
 
@@ -135,7 +136,7 @@ public class HadoopV2Job implements HadoopJob {
 
             try {
                 // Job classloader.
-                FileSystem fs = fileSystemForMrUser(jobDir.toUri(), jobConf, jobId.toString());
+                FileSystem fs = fileSystemForMrUser(jobDir.toUri(), jobConf, jobId.toString(), null);
 
                 JobSplit.TaskSplitMetaInfo[] metaInfos = SplitMetaInfoReader.readSplitMetaInfo(hadoopJobID, fs, jobConf,
                     jobDir);
@@ -242,15 +243,38 @@ public class HadoopV2Job implements HadoopJob {
         }
     }
 
+    private final AtomicBoolean initialized = new AtomicBoolean();
+
+    private final AtomicBoolean disposed = new AtomicBoolean();
+
     /** {@inheritDoc} */
     @Override public void initialize(boolean external, UUID locNodeId) throws IgniteCheckedException {
+        assert locNodeId != null;
         assert this.locNodeId == null;
 
+        boolean ok = initialized.compareAndSet(false, true);
+        assert ok;
+
         this.locNodeId = locNodeId;
 
         Thread.currentThread().setContextClassLoader(jobConf.getClassLoader());
 
         try {
+
+            String jobIdStr = jobId.toString();
+            assert jobIdStr != null;
+
+            String locId = this.locNodeId.toString();
+            assert locId != null;
+
+            X.println("##### initialize : " + jobIdStr + ", loc node id = " + locId);
+
+            final FileSystem fs = HadoopUtils.fileSystemForMrUser(null, jobConf, jobIdStr, locId);
+        } catch (IOException ioe) {
+            throw new IgniteCheckedException(ioe);
+        }
+
+        try {
             rsrcMgr.prepareJobEnvironment(!external, jobLocalDir(locNodeId, jobId));
         }
         finally {
@@ -261,8 +285,15 @@ public class HadoopV2Job implements HadoopJob {
     /** {@inheritDoc} */
     @SuppressWarnings("ThrowFromFinallyBlock")
     @Override public void dispose(boolean external) throws IgniteCheckedException {
-        X.println("############# dispose: jod = " + jobId + ", this = " + this);
-        X.println(" loc node id = " + locNodeId);
+        X.println("############# DISPOSE: jod = " + jobId + ", locId = " + locNodeId.toString() + ", this = " + this);
+        //X.println(" loc node id = " + locNodeId);
+        X.println(" XXXXXX this id = " + System.identityHashCode(this));
+
+        assert initialized.get();
+
+        boolean dsp = disposed.compareAndSet(false, true);
+        assert dsp;
+
         try {
             if (rsrcMgr != null && !external) {
                 File jobLocDir = jobLocalDir(locNodeId, jobId);
@@ -294,15 +325,12 @@ public class HadoopV2Job implements HadoopJob {
                     m.invoke(null);
 
                     // Also close all the FileSystems cached in
-                    // HadoopLazyConcurrentMap for this task class loader:
+                    // HadoopLazyConcurrentMap for this *task* class loader:
                     closeCachedFileSystems(ldr);
 
 
 //                    assert getClass().getClassLoader() instanceof HadoopClassLoader;
 //                    assert getClass().getClassLoader().toString().contains("hadoop-job");
-
-                    // Close all cached Fs for this Job:
-                    HadoopUtils.close(jobId.toString());
                 }
                 catch (Throwable e) {
                     if (err == null)
@@ -315,6 +343,9 @@ public class HadoopV2Job implements HadoopJob {
 
             assert fullCtxClsQueue.isEmpty();
 
+            // Close all cached Fs for this Job:
+            HadoopUtils.close(jobId.toString(), locNodeId.toString());
+
             for (int q=0; q<10; q++)
                 System.gc();
 

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2JobResourceManager.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2JobResourceManager.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2JobResourceManager.java
index 91300fe..68dd292 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2JobResourceManager.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2JobResourceManager.java
@@ -115,8 +115,8 @@ public class HadoopV2JobResourceManager {
                 stagingDir = new Path(new URI(mrDir));
 
                 if (download) {
-                    // job Cl.
-                    FileSystem fs = HadoopUtils.fileSystemForMrUser(stagingDir.toUri(), cfg, jobId.toString());
+                    // Job class loader..
+                    FileSystem fs = HadoopUtils.fileSystemForMrUser(stagingDir.toUri(), cfg, jobId.toString(), null);
 
                     if (!fs.exists(stagingDir))
                         throw new IgniteCheckedException("Failed to find map-reduce submission " +
@@ -211,7 +211,7 @@ public class HadoopV2JobResourceManager {
 
             FileSystem dstFs = FileSystem.getLocal(cfg);
 
-            FileSystem srcFs = HadoopUtils.fileSystemForMrUser(srcPath.toUri(), cfg, jobId.toString());
+            FileSystem srcFs = HadoopUtils.fileSystemForMrUser(srcPath.toUri(), cfg, jobId.toString(), null);
 
             if (extract) {
                 File archivesPath = new File(jobLocDir.getAbsolutePath(), ".cached-archives");
@@ -294,9 +294,8 @@ public class HadoopV2JobResourceManager {
     public void cleanupStagingDirectory() {
         try {
             if (stagingDir != null) {
-                FileSystem fs = HadoopUtils.fileSystemForMrUser(stagingDir.toUri(), ctx.getJobConf(), jobId.toString());
-
-                System.out.println("##### Cl: " + getClass().getClassLoader());
+                FileSystem fs = HadoopUtils.fileSystemForMrUser(stagingDir.toUri(), ctx.getJobConf(),
+                    jobId.toString(), null);
 
                 fs.delete(stagingDir, true);
             }

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2TaskContext.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2TaskContext.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2TaskContext.java
index f142aa6..3452678 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2TaskContext.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2TaskContext.java
@@ -426,9 +426,9 @@ public class HadoopV2TaskContext extends HadoopTaskContext {
         FileSystem fs;
 
         try {
-            // !! Task classloader.
-            // We also cache Fs there since
-            fs = fileSystemForMrUser(jobDir.toUri(), jobConf(), null);
+            // Task class loader.
+            // We also cache Fs there, all them will be cleared explicitly upon the Job end.
+            fs = fileSystemForMrUser(jobDir.toUri(), jobConf(), null, null);
         }
         catch (IOException e) {
             throw new IgniteCheckedException(e);

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/test/java/org/apache/ignite/client/hadoop/HadoopClientProtocolSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/client/hadoop/HadoopClientProtocolSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/client/hadoop/HadoopClientProtocolSelfTest.java
index 950c303..b9f8179 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/client/hadoop/HadoopClientProtocolSelfTest.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/client/hadoop/HadoopClientProtocolSelfTest.java
@@ -25,7 +25,6 @@ import org.apache.hadoop.mapreduce.lib.input.*;
 import org.apache.hadoop.mapreduce.lib.output.*;
 import org.apache.hadoop.mapreduce.protocol.*;
 import org.apache.ignite.*;
-import org.apache.ignite.configuration.*;
 import org.apache.ignite.hadoop.mapreduce.*;
 import org.apache.ignite.igfs.*;
 import org.apache.ignite.internal.processors.hadoop.*;
@@ -78,14 +77,6 @@ public class HadoopClientProtocolSelfTest extends HadoopAbstractSelfTest {
         return true;
     }
 
-//    @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
-//        IgniteConfiguration ic = super.getConfiguration(gridName);
-////
-////        ic.setLocalHost("127.0.0.1");
-////
-//        return ic;
-//    }
-
     /** {@inheritDoc} */
     @Override protected void beforeTestsStarted() throws Exception {
         super.beforeTestsStarted();

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsEventsTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsEventsTestSuite.java b/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsEventsTestSuite.java
index 37b9253..e7c7f8a 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsEventsTestSuite.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsEventsTestSuite.java
@@ -21,7 +21,6 @@ import junit.framework.*;
 import org.apache.ignite.*;
 import org.apache.ignite.configuration.*;
 import org.apache.ignite.hadoop.fs.*;
-import org.apache.ignite.internal.processors.hadoop.*;
 import org.apache.ignite.internal.util.ipc.shmem.*;
 import org.apache.ignite.internal.util.typedef.*;
 import org.jetbrains.annotations.*;
@@ -38,7 +37,7 @@ public class IgfsEventsTestSuite extends TestSuite {
      * @throws Exception Thrown in case of the failure.
      */
     public static TestSuite suite() throws Exception {
-        ClassLoader ldr = TestSuite.class.getClassLoader(); //new HadoopClassLoader(null, "test");
+        ClassLoader ldr = TestSuite.class.getClassLoader();
 
         TestSuite suite = new TestSuite("Ignite FS Events Test Suite");
 
@@ -58,7 +57,7 @@ public class IgfsEventsTestSuite extends TestSuite {
      * @throws Exception Thrown in case of the failure.
      */
     public static TestSuite suiteNoarchOnly() throws Exception {
-        ClassLoader ldr = TestSuite.class.getClassLoader(); //new HadoopClassLoader(null, "test");
+        ClassLoader ldr = TestSuite.class.getClassLoader();
 
         TestSuite suite = new TestSuite("Ignite IGFS Events Test Suite Noarch Only");
 

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopAbstractSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopAbstractSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopAbstractSelfTest.java
index a3c9bde..8d5faa9 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopAbstractSelfTest.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopAbstractSelfTest.java
@@ -22,6 +22,7 @@ import org.apache.ignite.configuration.*;
 import org.apache.ignite.igfs.*;
 import org.apache.ignite.hadoop.fs.v2.IgniteHadoopFileSystem;
 import org.apache.ignite.internal.processors.hadoop.fs.*;
+import org.apache.ignite.internal.util.typedef.*;
 import org.apache.ignite.spi.communication.tcp.*;
 import org.apache.ignite.spi.discovery.tcp.*;
 import org.apache.ignite.spi.discovery.tcp.ipfinder.*;
@@ -97,6 +98,14 @@ public abstract class HadoopAbstractSelfTest extends GridCommonAbstractTest {
         System.setProperty("java.class.path", initCp);
 
         initCp = null;
+
+        int inst = HadoopLazyConcurrentMap.getNumberOfInstances();
+
+        X.println("HLCM instances: " + inst);
+
+        // TODO: harden this contraint. It looks like sometimes Job#dispose(boolean) is not caled, so not all
+        // file systems are cleared:
+        assertTrue("HadoopLazyConcurrentMap instances: " + inst, inst <= 8);
     }
 
     /** {@inheritDoc} */

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopFileSystemsTest.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopFileSystemsTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopFileSystemsTest.java
index 5f90bd4..5eb5084 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopFileSystemsTest.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopFileSystemsTest.java
@@ -88,31 +88,51 @@ public class HadoopFileSystemsTest extends HadoopAbstractSelfTest {
                 try {
                     int curThreadNum = threadNum.getAndIncrement();
 
-                    if ("file".equals(uri.getScheme()))
-                        FileSystem.get(uri, cfg).setWorkingDirectory(new Path("file:///user/user" + curThreadNum));
+                    final FileSystem fs0 = FileSystem.get(uri, cfg);
+
+                    FileSystem fs;
+
+                    if ("file".equals(uri.getScheme())) {
+                        fs = FileSystem.get(uri, cfg);
+                        fs.setWorkingDirectory(new Path("file:///user/user" + curThreadNum));
+                    }
 
                     changeUserPhase.countDown();
                     changeUserPhase.await();
 
-                    newUserInitWorkDir[curThreadNum] = FileSystem.get(uri, cfg).getWorkingDirectory();
+                    fs = FileSystem.get(uri, cfg);
+                    assertSame(fs, fs0);
+                    newUserInitWorkDir[curThreadNum] = fs.getWorkingDirectory();
 
-                    FileSystem.get(uri, cfg).setWorkingDirectory(new Path("folder" + curThreadNum));
+                    fs = FileSystem.get(uri, cfg);
+                    assertSame(fs, fs0);
+                    fs.setWorkingDirectory(new Path("folder" + curThreadNum));
 
                     changeDirPhase.countDown();
                     changeDirPhase.await();
 
-                    newWorkDir[curThreadNum] = FileSystem.get(uri, cfg).getWorkingDirectory();
+                    fs = FileSystem.get(uri, cfg);
+                    assertSame(fs, fs0);
+                    newWorkDir[curThreadNum] = fs.getWorkingDirectory();
 
-                    FileSystem.get(uri, cfg).setWorkingDirectory(new Path("/folder" + curThreadNum));
+                    fs = FileSystem.get(uri, cfg);
+                    assertSame(fs, fs0);
+                    fs.setWorkingDirectory(new Path("/folder" + curThreadNum));
 
                     changeAbsDirPhase.countDown();
                     changeAbsDirPhase.await();
 
-                    newAbsWorkDir[curThreadNum] = FileSystem.get(uri, cfg).getWorkingDirectory();
+                    fs = FileSystem.get(uri, cfg);
+                    assertSame(fs, fs0);
+                    newAbsWorkDir[curThreadNum] = fs.getWorkingDirectory();
 
-                    newInstanceWorkDir[curThreadNum] = FileSystem.newInstance(uri, cfg).getWorkingDirectory();
+                    fs = FileSystem.newInstance(uri, cfg);
+                    assertNotSame(fs, fs0);
+                    newInstanceWorkDir[curThreadNum] = fs.getWorkingDirectory();
 
                     finishPhase.countDown();
+
+                    fs.close();
                 }
                 catch (InterruptedException | IOException e) {
                     error("Failed to execute test thread.", e);

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceTest.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceTest.java
index 9e22bba..da6f9c7 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceTest.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceTest.java
@@ -200,21 +200,6 @@ public class HadoopMapReduceTest extends HadoopAbstractWordCountTest {
         }
     }
 
-//    @Override protected void afterTest() throws Exception {
-//        super.afterTest();
-//    }
-
-//    @Override protected void afterTestsStopped() throws Exception {
-//        super.afterTestsStopped();
-//
-//        X.println("GCing..."); // TODO
-//
-//        for (int i=0; i<250; i++) {
-//            System.gc();
-//            Thread.sleep(10);
-//        }
-//    }
-
     /**
      * Simple test job statistics.
      *

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java
index c584e91..f5def91 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java
@@ -50,35 +50,37 @@ public class IgniteHadoopTestSuite extends TestSuite {
         downloadHadoop();
         downloadHive();
 
-        final ClassLoader ldr = TestSuite.class.getClassLoader(); //new HadoopClassLoader(null, "test");
+        final ClassLoader ldr = TestSuite.class.getClassLoader();
 
         TestSuite suite = new TestSuite("Ignite Hadoop MR Test Suite");
 
-//        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackExternalPrimarySelfTest.class.getName())));
-//        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackExternalSecondarySelfTest.class.getName())));
-//        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackExternalDualSyncSelfTest.class.getName())));
-//        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackExternalDualAsyncSelfTest.class.getName())));
-//        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackEmbeddedPrimarySelfTest.class.getName())));
-//        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackEmbeddedSecondarySelfTest.class.getName())));
-//        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackEmbeddedDualSyncSelfTest.class.getName())));
-//        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackEmbeddedDualAsyncSelfTest.class.getName())));
-//
-//        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemSecondaryModeSelfTest.class.getName())));
-//
-//        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemClientSelfTest.class.getName())));
-//
-//        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoggerStateSelfTest.class.getName())));
-//        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoggerSelfTest.class.getName())));
-//
-//        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemHandshakeSelfTest.class.getName())));
-//
-//        suite.addTest(new TestSuite(ldr.loadClass(HadoopIgfs20FileSystemLoopbackPrimarySelfTest.class.getName())));
-//
-//        suite.addTest(new TestSuite(ldr.loadClass(HadoopIgfsDualSyncSelfTest.class.getName())));
-//        suite.addTest(new TestSuite(ldr.loadClass(HadoopIgfsDualAsyncSelfTest.class.getName())));
-//
-//        suite.addTest(IgfsEventsTestSuite.suiteNoarchOnly());
+        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackExternalPrimarySelfTest.class.getName())));
+        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackExternalSecondarySelfTest.class.getName())));
+        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackExternalDualSyncSelfTest.class.getName())));
+        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackExternalDualAsyncSelfTest.class.getName())));
+        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackEmbeddedPrimarySelfTest.class.getName())));
+        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackEmbeddedSecondarySelfTest.class.getName())));
+        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackEmbeddedDualSyncSelfTest.class.getName())));
+        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoopbackEmbeddedDualAsyncSelfTest.class.getName())));
 
+        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemSecondaryModeSelfTest.class.getName())));
+
+        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemClientSelfTest.class.getName())));
+
+        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoggerStateSelfTest.class.getName())));
+        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemLoggerSelfTest.class.getName())));
+
+        suite.addTest(new TestSuite(ldr.loadClass(IgniteHadoopFileSystemHandshakeSelfTest.class.getName())));
+
+        suite.addTest(new TestSuite(ldr.loadClass(HadoopIgfs20FileSystemLoopbackPrimarySelfTest.class.getName())));
+
+        suite.addTest(new TestSuite(ldr.loadClass(HadoopIgfsDualSyncSelfTest.class.getName())));
+        suite.addTest(new TestSuite(ldr.loadClass(HadoopIgfsDualAsyncSelfTest.class.getName())));
+
+        suite.addTest(IgfsEventsTestSuite.suiteNoarchOnly());
+
+
+        // ok:
         suite.addTest(new TestSuite(ldr.loadClass(HadoopFileSystemsTest.class.getName())));
 
         suite.addTest(new TestSuite(ldr.loadClass(HadoopValidationSelfTest.class.getName())));

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3df0112b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteIgfsLinuxAndMacOSTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteIgfsLinuxAndMacOSTestSuite.java b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteIgfsLinuxAndMacOSTestSuite.java
index 5b35dd0..22beea6 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteIgfsLinuxAndMacOSTestSuite.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteIgfsLinuxAndMacOSTestSuite.java
@@ -35,7 +35,7 @@ public class IgniteIgfsLinuxAndMacOSTestSuite extends TestSuite {
     public static TestSuite suite() throws Exception {
         downloadHadoop();
 
-        ClassLoader ldr = TestSuite.class.getClassLoader(); //new HadoopClassLoader(null, "test");
+        ClassLoader ldr = TestSuite.class.getClassLoader();
 
         TestSuite suite = new TestSuite("Ignite IGFS Test Suite For Linux And Mac OS");