You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ignite.apache.org by ak...@apache.org on 2015/06/24 04:50:18 UTC

[1/9] incubator-ignite git commit: ignite-1048 create/lock entries before topology read lock

Repository: incubator-ignite
Updated Branches:
  refs/heads/ignite-gg-10457 12821b79d -> 60c4d651f


ignite-1048 create/lock entries before topology read lock


Project: http://git-wip-us.apache.org/repos/asf/incubator-ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ignite/commit/be881a36
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ignite/tree/be881a36
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ignite/diff/be881a36

Branch: refs/heads/ignite-gg-10457
Commit: be881a367ef02a724c2b84aba8f72c6effadc2b2
Parents: 1c66078
Author: sboikov <sb...@gridgain.com>
Authored: Tue Jun 23 10:52:47 2015 +0300
Committer: sboikov <sb...@gridgain.com>
Committed: Tue Jun 23 10:52:47 2015 +0300

----------------------------------------------------------------------
 .../cache/distributed/dht/GridDhtLocalPartition.java        | 3 ++-
 .../cache/distributed/dht/atomic/GridDhtAtomicCache.java    | 9 ++++-----
 2 files changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/be881a36/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtLocalPartition.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtLocalPartition.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtLocalPartition.java
index 3a577a7..87c7f0e 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtLocalPartition.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtLocalPartition.java
@@ -236,7 +236,8 @@ public class GridDhtLocalPartition implements Comparable<GridDhtLocalPartition>,
     void onAdded(GridDhtCacheEntry entry) {
         GridDhtPartitionState state = state();
 
-        assert state != EVICTED : "Adding entry to invalid partition: " + this;
+        if (state == EVICTED)
+            throw new GridDhtInvalidPartitionException(id, "Adding entry to invalid partition [part=" + id + ']');
 
         map.put(entry.key(), entry);
 

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/be881a36/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/atomic/GridDhtAtomicCache.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/atomic/GridDhtAtomicCache.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/atomic/GridDhtAtomicCache.java
index 8630421..7bec302 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/atomic/GridDhtAtomicCache.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/atomic/GridDhtAtomicCache.java
@@ -1028,7 +1028,10 @@ public class GridDhtAtomicCache<K, V> extends GridDhtCacheAdapter<K, V> {
         IgniteCacheExpiryPolicy expiry = null;
 
         try {
-            List<GridDhtCacheEntry> locked = null;
+            // If batch store update is enabled, we need to lock all entries.
+            // First, need to acquire locks on cache entries, then check filter.
+            List<GridDhtCacheEntry> locked = lockEntries(keys, req.topologyVersion());
+
             Collection<IgniteBiTuple<GridDhtCacheEntry, GridCacheVersion>> deleted = null;
 
             try {
@@ -1058,10 +1061,6 @@ public class GridDhtAtomicCache<K, V> extends GridDhtCacheAdapter<K, V> {
                             return;
                         }
 
-                        // If batch store update is enabled, we need to lock all entries.
-                        // First, need to acquire locks on cache entries, then check filter.
-                        locked = lockEntries(keys, req.topologyVersion());
-
                         boolean hasNear = ctx.discovery().cacheNearNode(node, name());
 
                         GridCacheVersion ver = req.updateVersion();


[8/9] incubator-ignite git commit: # ignite-sprint-7: IgniteReflectionFactory javadoc

Posted by ak...@apache.org.
# ignite-sprint-7: IgniteReflectionFactory javadoc


Project: http://git-wip-us.apache.org/repos/asf/incubator-ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ignite/commit/359b4314
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ignite/tree/359b4314
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ignite/diff/359b4314

Branch: refs/heads/ignite-gg-10457
Commit: 359b43144046bc683f9e28794eb7e7bc9c63ebc1
Parents: dfe95bd
Author: ashutak <as...@gridgain.com>
Authored: Tue Jun 23 16:59:44 2015 +0300
Committer: ashutak <as...@gridgain.com>
Committed: Tue Jun 23 16:59:44 2015 +0300

----------------------------------------------------------------------
 .../configuration/IgniteReflectionFactory.java  | 81 ++++++++++++++++++--
 1 file changed, 76 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/359b4314/modules/core/src/main/java/org/apache/ignite/configuration/IgniteReflectionFactory.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/configuration/IgniteReflectionFactory.java b/modules/core/src/main/java/org/apache/ignite/configuration/IgniteReflectionFactory.java
index 3222938..af73737 100644
--- a/modules/core/src/main/java/org/apache/ignite/configuration/IgniteReflectionFactory.java
+++ b/modules/core/src/main/java/org/apache/ignite/configuration/IgniteReflectionFactory.java
@@ -26,26 +26,91 @@ import java.lang.reflect.*;
 import java.util.*;
 
 /**
- * Convenience class for reflection-based object creation.
+ * Factory implementation that use reflection to create instance of given class.
+ * <p>
+ * There are 2 modes of factory: singleton and non-sigletton.
+ * <p>
+ * Class that should be created by {@link IgniteReflectionFactory} (component class) have to be
+ * public java POJO with public setters for field
+ * for which property injection will be used (see {@link #setProperties(Map)}).
+ * <p>
+ * <h1 class="header">Configuration</h1>
+ * <h2 class="header">Mandatory</h2>
+ * The following configuration parameters are mandatory:
+ * <ul>
+ * <li>Component class - class to be created (see {@link #setComponentClass(Class)}.
+ * It have to be public java POJO class with default constructor
+ * and public setters to be used by properties injection (see {@link #setProperties(Map)})</li>
+ * </ul>
+ * <h2 class="header">Optional</h2>
+ * The following configuration parameters are optional:
+ * <ul>
+ * </li>
+ * <li>Singleton mode (see {@link #setSingleton(boolean)})</li>
+ * <li>Properties map (see {@link #setProperties(Map)}</li>
+ * <li>With method (see {@link #setWithMethod(Object, String, Serializable)}</li>
+ * </ul>
+ * <h2 class="header">Java Example</h2>
+ * <pre name="code" class="java">
+ * Factory<CacheStoreSessionListener> factory =
+ *     new IgniteReflectionFactory<CacheStoreSessionListener>(MyCacheStoreSessionListener.class);
+ *
+ * CacheConfiguration cc = new CacheConfiguration()
+ *     .setCacheStoreSessionListenerFactories(factory);
+ *
+ * IgniteConfiguration cfg = new IgniteConfiguration()
+ *     .setCacheConfiguration(cc);
+ *
+ * // Start grid.
+ * Ignition.start(cfg);
+ * </pre>
+ * <h2 class="header">Spring Example</h2>
+ * TcpDiscoverySpi can be configured from Spring XML configuration file:
+ * <pre name="code" class="xml">
+ * &lt;bean id="grid.custom.cfg" class="org.apache.ignite.configuration.IgniteConfiguration"&gt;
+ *     ...
+ *     &lt;property name="cacheConfiguration"&gt;
+ *         &lt;list&gt;
+ *             &lt;bean class="org.apache.ignite.configuration.CacheConfiguration"&gt;
+ *                 ...
+ *                 &lt;property name="cacheStoreSessionListenerFactories"&gt;
+ *                     &lt;list&gt;
+ *                         &lt;bean class="org.apache.ignite.configuration.IgniteReflectionFactory"&gt;
+ *                             &lt;property name="componentClass" value="custom.project.MyCacheStoreSessionListener"/&gt;
+ *                         &lt;/bean&gt;
+ *                     &lt;/list&gt;
+ *                 &lt;/property&gt;
+ *                 ...
+ *             &lt;/bean&gt;
+ *         &lt;/list&gt;
+ *     &lt;/property&gt;
+ *     ...
+ * &lt;/bean&gt;
+ * </pre>
+ * <p>
+ * <img src="http://ignite.incubator.apache.org/images/spring-small.png">
+ * <br>
+ * For information about Spring framework visit <a href="http://www.springframework.org/">www.springframework.org</a>
+ * @see Factory
  */
 public class IgniteReflectionFactory<T> implements Factory<T> {
     /** */
     private static final long serialVersionUID = 0L;
 
-    /** */
+    /** Singletom mode */
     private volatile boolean singleton;
 
-    /** */
+    /** Component class */
     private volatile Class<? extends T> cls;
 
-    /** */
+    /** Properties */
     private volatile Map<String, Serializable> props;
 
     /** */
     private transient T instance;
 
     /**
-     *
+     * Default constructor.
      */
     public IgniteReflectionFactory() {
         // No-op.
@@ -98,6 +163,9 @@ public class IgniteReflectionFactory<T> implements Factory<T> {
     }
 
     /**
+     * Gets a map of properties. Map contains entries of component class field name
+     * to value of the filed which will be used as initial value.
+     *
      * @return Properties.
      */
     public Map<String, Serializable> getProperties() {
@@ -105,6 +173,9 @@ public class IgniteReflectionFactory<T> implements Factory<T> {
     }
 
     /**
+     * Sets a map of properties. Map contains entries of component class field name
+     * to a value of the filed which will be used as initial value.
+     *
      * @param props Properties.
      */
     public void setProperties(Map<String, Serializable> props) {


[4/9] incubator-ignite git commit: # ignite-sprint-7 minor

Posted by ak...@apache.org.
# ignite-sprint-7 minor


Project: http://git-wip-us.apache.org/repos/asf/incubator-ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ignite/commit/36433bc3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ignite/tree/36433bc3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ignite/diff/36433bc3

Branch: refs/heads/ignite-gg-10457
Commit: 36433bc3d96a67975ce99a0774e608ccf50c2a21
Parents: c7ba154
Author: sboikov <sb...@gridgain.com>
Authored: Tue Jun 23 13:31:34 2015 +0300
Committer: sboikov <sb...@gridgain.com>
Committed: Tue Jun 23 13:31:34 2015 +0300

----------------------------------------------------------------------
 .../org/apache/ignite/testframework/junits/GridAbstractTest.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/36433bc3/modules/core/src/test/java/org/apache/ignite/testframework/junits/GridAbstractTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/testframework/junits/GridAbstractTest.java b/modules/core/src/test/java/org/apache/ignite/testframework/junits/GridAbstractTest.java
index 9c42920..9a63b3a 100644
--- a/modules/core/src/test/java/org/apache/ignite/testframework/junits/GridAbstractTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/testframework/junits/GridAbstractTest.java
@@ -606,7 +606,7 @@ public abstract class GridAbstractTest extends TestCase {
                 Thread.sleep(1000);
         }
 
-        throw new Exception("Failed to wait for proper topology.");
+        throw new Exception("Failed to wait for proper topology: " + cnt);
     }
 
     /** */


[7/9] incubator-ignite git commit: Merge remote-tracking branch 'remotes/origin/ignite-1048' into ignite-sprint-7

Posted by ak...@apache.org.
Merge remote-tracking branch 'remotes/origin/ignite-1048' into ignite-sprint-7


Project: http://git-wip-us.apache.org/repos/asf/incubator-ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ignite/commit/dfe95bdf
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ignite/tree/dfe95bdf
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ignite/diff/dfe95bdf

Branch: refs/heads/ignite-gg-10457
Commit: dfe95bdfb98663134809ca456f63e100fb1e7c26
Parents: 2bb6e0f a2d2c9b
Author: sboikov <sb...@gridgain.com>
Authored: Tue Jun 23 16:38:38 2015 +0300
Committer: sboikov <sb...@gridgain.com>
Committed: Tue Jun 23 16:38:38 2015 +0300

----------------------------------------------------------------------
 .../cache/distributed/dht/GridDhtLocalPartition.java        | 3 ++-
 .../cache/distributed/dht/atomic/GridDhtAtomicCache.java    | 9 ++++-----
 2 files changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------



[9/9] incubator-ignite git commit: Merge branches 'ignite-gg-10457' and 'ignite-sprint-7' of https://git-wip-us.apache.org/repos/asf/incubator-ignite into ignite-gg-10457

Posted by ak...@apache.org.
Merge branches 'ignite-gg-10457' and 'ignite-sprint-7' of https://git-wip-us.apache.org/repos/asf/incubator-ignite into ignite-gg-10457


Project: http://git-wip-us.apache.org/repos/asf/incubator-ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ignite/commit/60c4d651
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ignite/tree/60c4d651
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ignite/diff/60c4d651

Branch: refs/heads/ignite-gg-10457
Commit: 60c4d651fb02b24d8f18b9ee85cae3735501bbb2
Parents: 12821b7 359b431
Author: AKuznetsov <ak...@gridgain.com>
Authored: Wed Jun 24 09:42:57 2015 +0700
Committer: AKuznetsov <ak...@gridgain.com>
Committed: Wed Jun 24 09:42:57 2015 +0700

----------------------------------------------------------------------
 .../configuration/IgniteReflectionFactory.java  |  81 ++++++-
 .../distributed/dht/GridDhtLocalPartition.java  |   3 +-
 .../dht/atomic/GridDhtAtomicCache.java          |   9 +-
 .../processors/hadoop/HadoopJobInfo.java        |   4 +-
 .../hadoop/counter/HadoopCounterWriter.java     |   5 +-
 .../GridCacheReplicatedFailoverSelfTest.java    |   5 +
 .../testframework/junits/GridAbstractTest.java  |   2 +-
 modules/hadoop/pom.xml                          |  78 ------
 .../fs/IgniteHadoopFileSystemCounterWriter.java |   9 +-
 .../processors/hadoop/HadoopClassLoader.java    |  29 +++
 .../processors/hadoop/HadoopDefaultJobInfo.java |  27 +--
 .../internal/processors/hadoop/HadoopUtils.java | 237 ------------------
 .../hadoop/SecondaryFileSystemProvider.java     |   3 +-
 .../hadoop/fs/HadoopFileSystemCacheUtils.java   | 241 +++++++++++++++++++
 .../hadoop/fs/HadoopFileSystemsUtils.java       |  11 +
 .../hadoop/fs/HadoopLazyConcurrentMap.java      |   5 +
 .../hadoop/jobtracker/HadoopJobTracker.java     |  25 +-
 .../child/HadoopChildProcessRunner.java         |   3 +-
 .../processors/hadoop/v2/HadoopV2Job.java       |  84 ++++++-
 .../hadoop/v2/HadoopV2JobResourceManager.java   |  22 +-
 .../hadoop/v2/HadoopV2TaskContext.java          |  37 ++-
 .../apache/ignite/igfs/IgfsEventsTestSuite.java |   5 +-
 .../processors/hadoop/HadoopMapReduceTest.java  |   2 +-
 .../processors/hadoop/HadoopTasksV1Test.java    |   7 +-
 .../processors/hadoop/HadoopTasksV2Test.java    |   7 +-
 .../processors/hadoop/HadoopV2JobSelfTest.java  |   6 +-
 .../collections/HadoopAbstractMapTest.java      |   3 +-
 .../testsuites/IgniteHadoopTestSuite.java       |   2 +-
 .../IgniteIgfsLinuxAndMacOSTestSuite.java       |   3 +-
 29 files changed, 566 insertions(+), 389 deletions(-)
----------------------------------------------------------------------



[6/9] incubator-ignite git commit: [IGNITE-980]: added missing file

Posted by ak...@apache.org.
[IGNITE-980]: added missing file


Project: http://git-wip-us.apache.org/repos/asf/incubator-ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ignite/commit/2bb6e0f8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ignite/tree/2bb6e0f8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ignite/diff/2bb6e0f8

Branch: refs/heads/ignite-gg-10457
Commit: 2bb6e0f887c99de70e479c48992f5a0a86cbf5ec
Parents: 3b49184
Author: iveselovskiy <iv...@gridgain.com>
Authored: Tue Jun 23 15:02:23 2015 +0300
Committer: iveselovskiy <iv...@gridgain.com>
Committed: Tue Jun 23 15:02:23 2015 +0300

----------------------------------------------------------------------
 .../hadoop/fs/HadoopFileSystemCacheUtils.java   | 241 +++++++++++++++++++
 1 file changed, 241 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/2bb6e0f8/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemCacheUtils.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemCacheUtils.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemCacheUtils.java
new file mode 100644
index 0000000..f94b932
--- /dev/null
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemCacheUtils.java
@@ -0,0 +1,241 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.hadoop.fs;
+
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.mapreduce.*;
+import org.apache.ignite.*;
+import org.apache.ignite.hadoop.fs.v1.*;
+import org.apache.ignite.internal.util.*;
+import org.apache.ignite.internal.util.typedef.*;
+import org.jetbrains.annotations.*;
+
+import java.io.*;
+import java.net.*;
+
+/**
+ * File system cache utility methods used by Map-Reduce tasks and jobs.
+ */
+public class HadoopFileSystemCacheUtils {
+    /**
+     * A common static factory method. Creates new HadoopLazyConcurrentMap.
+     * @return a new HadoopLazyConcurrentMap.
+     */
+    public static HadoopLazyConcurrentMap<FsCacheKey, FileSystem> createHadoopLazyConcurrentMap() {
+        return new HadoopLazyConcurrentMap<>(
+            new HadoopLazyConcurrentMap.ValueFactory<FsCacheKey, FileSystem>() {
+                @Override public FileSystem createValue(FsCacheKey key) {
+                    try {
+                        assert key != null;
+
+                        // Explicitly disable FileSystem caching:
+                        URI uri = key.uri();
+
+                        String scheme = uri.getScheme();
+
+                        // Copy the configuration to avoid altering the external object.
+                        Configuration cfg = new Configuration(key.configuration());
+
+                        String prop = HadoopFileSystemsUtils.disableFsCachePropertyName(scheme);
+
+                        cfg.setBoolean(prop, true);
+
+                        return FileSystem.get(uri, cfg, key.user());
+                    }
+                    catch (IOException | InterruptedException ioe) {
+                        throw new IgniteException(ioe);
+                    }
+                }
+            }
+        );
+    }
+
+    /**
+     * Gets non-null user name as per the Hadoop viewpoint.
+     * @param cfg the Hadoop job configuration, may be null.
+     * @return the user name, never null.
+     */
+    private static String getMrHadoopUser(Configuration cfg) throws IOException {
+        String user = cfg.get(MRJobConfig.USER_NAME);
+
+        if (user == null)
+            user = IgniteHadoopFileSystem.getFsHadoopUser();
+
+        return user;
+    }
+
+    /**
+     * Common method to get the V1 file system in MapRed engine.
+     * It gets the filesystem for the user specified in the
+     * configuration with {@link MRJobConfig#USER_NAME} property.
+     * The file systems are created and cached in the given map upon first request.
+     *
+     * @param uri The file system uri.
+     * @param cfg The configuration.
+     * @param map The caching map.
+     * @return The file system.
+     * @throws IOException On error.
+     */
+    public static FileSystem fileSystemForMrUserWithCaching(@Nullable URI uri, Configuration cfg,
+        HadoopLazyConcurrentMap<FsCacheKey, FileSystem> map)
+            throws IOException {
+        assert map != null;
+        assert cfg != null;
+
+        final String usr = getMrHadoopUser(cfg);
+
+        assert usr != null;
+
+        if (uri == null)
+            uri = FileSystem.getDefaultUri(cfg);
+
+        final FileSystem fs;
+
+        try {
+            final FsCacheKey key = new FsCacheKey(uri, usr, cfg);
+
+            fs = map.getOrCreate(key);
+        }
+        catch (IgniteException ie) {
+            throw new IOException(ie);
+        }
+
+        assert fs != null;
+        assert !(fs instanceof IgniteHadoopFileSystem) || F.eq(usr, ((IgniteHadoopFileSystem)fs).user());
+
+        return fs;
+    }
+
+    /**
+     * Takes Fs URI using logic similar to that used in FileSystem#get(1,2,3).
+     * @param uri0 The uri.
+     * @param cfg The cfg.
+     * @return Correct URI.
+     */
+    private static URI fixUri(URI uri0, Configuration cfg) {
+        if (uri0 == null)
+            return FileSystem.getDefaultUri(cfg);
+
+        String scheme = uri0.getScheme();
+        String authority = uri0.getAuthority();
+
+        if (authority == null) {
+            URI dfltUri = FileSystem.getDefaultUri(cfg);
+
+            if (scheme == null || (scheme.equals(dfltUri.getScheme()) && dfltUri.getAuthority() != null))
+                return dfltUri;
+        }
+
+        return uri0;
+    }
+
+    /**
+     * Note that configuration is not a part of the key.
+     * It is used solely to initialize the first instance
+     * that is created for the key.
+     */
+    public static final class FsCacheKey {
+        /** */
+        private final URI uri;
+
+        /** */
+        private final String usr;
+
+        /** */
+        private final String equalityKey;
+
+        /** */
+        private final Configuration cfg;
+
+        /**
+         * Constructor
+         */
+        public FsCacheKey(URI uri, String usr, Configuration cfg) {
+            assert uri != null;
+            assert usr != null;
+            assert cfg != null;
+
+            this.uri = fixUri(uri, cfg);
+            this.usr = usr;
+            this.cfg = cfg;
+
+            this.equalityKey = createEqualityKey();
+        }
+
+        /**
+         * Creates String key used for equality and hashing.
+         */
+        private String createEqualityKey() {
+            GridStringBuilder sb = new GridStringBuilder("(").a(usr).a(")@");
+
+            if (uri.getScheme() != null)
+                sb.a(uri.getScheme().toLowerCase());
+
+            sb.a("://");
+
+            if (uri.getAuthority() != null)
+                sb.a(uri.getAuthority().toLowerCase());
+
+            return sb.toString();
+        }
+
+        /**
+         * The URI.
+         */
+        public URI uri() {
+            return uri;
+        }
+
+        /**
+         * The User.
+         */
+        public String user() {
+            return usr;
+        }
+
+        /**
+         * The Configuration.
+         */
+        public Configuration configuration() {
+            return cfg;
+        }
+
+        /** {@inheritDoc} */
+        @SuppressWarnings("SimplifiableIfStatement")
+        @Override public boolean equals(Object obj) {
+            if (obj == this)
+                return true;
+
+            if (obj == null || getClass() != obj.getClass())
+                return false;
+
+            return equalityKey.equals(((FsCacheKey)obj).equalityKey);
+        }
+
+        /** {@inheritDoc} */
+        @Override public int hashCode() {
+            return equalityKey.hashCode();
+        }
+
+        /** {@inheritDoc} */
+        @Override public String toString() {
+            return equalityKey;
+        }
+    }
+}


[3/9] incubator-ignite git commit: Merge remote-tracking branch 'remotes/origin/ignite-sprint-7' into ignite-1048

Posted by ak...@apache.org.
Merge remote-tracking branch 'remotes/origin/ignite-sprint-7' into ignite-1048


Project: http://git-wip-us.apache.org/repos/asf/incubator-ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ignite/commit/a2d2c9b9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ignite/tree/a2d2c9b9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ignite/diff/a2d2c9b9

Branch: refs/heads/ignite-gg-10457
Commit: a2d2c9b96e9bd86f2f711df5da835c95af57b612
Parents: be881a3 c7ba154
Author: sboikov <sb...@gridgain.com>
Authored: Tue Jun 23 12:56:44 2015 +0300
Committer: sboikov <sb...@gridgain.com>
Committed: Tue Jun 23 12:56:44 2015 +0300

----------------------------------------------------------------------
 .../replicated/GridCacheReplicatedFailoverSelfTest.java         | 5 +++++
 1 file changed, 5 insertions(+)
----------------------------------------------------------------------



[2/9] incubator-ignite git commit: # ignite-sprint-7 disabled hanging test

Posted by ak...@apache.org.
# ignite-sprint-7 disabled hanging test


Project: http://git-wip-us.apache.org/repos/asf/incubator-ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ignite/commit/c7ba1541
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ignite/tree/c7ba1541
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ignite/diff/c7ba1541

Branch: refs/heads/ignite-gg-10457
Commit: c7ba1541959b78451e81fb701550894b09fe6eb9
Parents: 1c66078
Author: sboikov <sb...@gridgain.com>
Authored: Tue Jun 23 12:55:17 2015 +0300
Committer: sboikov <sb...@gridgain.com>
Committed: Tue Jun 23 12:55:17 2015 +0300

----------------------------------------------------------------------
 .../replicated/GridCacheReplicatedFailoverSelfTest.java         | 5 +++++
 1 file changed, 5 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/c7ba1541/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/replicated/GridCacheReplicatedFailoverSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/replicated/GridCacheReplicatedFailoverSelfTest.java b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/replicated/GridCacheReplicatedFailoverSelfTest.java
index 3461dd4..4269ea6 100644
--- a/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/replicated/GridCacheReplicatedFailoverSelfTest.java
+++ b/modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/replicated/GridCacheReplicatedFailoverSelfTest.java
@@ -28,6 +28,11 @@ import static org.apache.ignite.cache.CacheMode.*;
  */
 public class GridCacheReplicatedFailoverSelfTest extends GridCacheAbstractFailoverTxSelfTest {
     /** {@inheritDoc} */
+    @Override protected void beforeTest() throws Exception {
+        fail("https://issues.apache.org/jira/browse/IGNITE-882");
+    }
+
+    /** {@inheritDoc} */
     @Override protected CacheMode cacheMode() {
         return REPLICATED;
     }


[5/9] incubator-ignite git commit: [IGNITE-980]: Investigate if we should close() the FileSystems in MapRed task implementations.

Posted by ak...@apache.org.
[IGNITE-980]: Investigate if we should close() the FileSystems in MapRed task implementations.


Project: http://git-wip-us.apache.org/repos/asf/incubator-ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ignite/commit/3b49184b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ignite/tree/3b49184b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ignite/diff/3b49184b

Branch: refs/heads/ignite-gg-10457
Commit: 3b49184b48ca55d5cf4dcdd7485ab8aba1bb9cff
Parents: 36433bc
Author: iveselovskiy <iv...@gridgain.com>
Authored: Tue Jun 23 13:58:06 2015 +0300
Committer: iveselovskiy <iv...@gridgain.com>
Committed: Tue Jun 23 13:58:06 2015 +0300

----------------------------------------------------------------------
 .../processors/hadoop/HadoopJobInfo.java        |   4 +-
 .../hadoop/counter/HadoopCounterWriter.java     |   5 +-
 modules/hadoop/pom.xml                          |  78 ------
 .../fs/IgniteHadoopFileSystemCounterWriter.java |   9 +-
 .../processors/hadoop/HadoopClassLoader.java    |  29 +++
 .../processors/hadoop/HadoopDefaultJobInfo.java |  27 +--
 .../internal/processors/hadoop/HadoopUtils.java | 237 -------------------
 .../hadoop/SecondaryFileSystemProvider.java     |   3 +-
 .../hadoop/fs/HadoopFileSystemsUtils.java       |  11 +
 .../hadoop/fs/HadoopLazyConcurrentMap.java      |   5 +
 .../hadoop/jobtracker/HadoopJobTracker.java     |  25 +-
 .../child/HadoopChildProcessRunner.java         |   3 +-
 .../processors/hadoop/v2/HadoopV2Job.java       |  84 ++++++-
 .../hadoop/v2/HadoopV2JobResourceManager.java   |  22 +-
 .../hadoop/v2/HadoopV2TaskContext.java          |  37 ++-
 .../apache/ignite/igfs/IgfsEventsTestSuite.java |   5 +-
 .../processors/hadoop/HadoopMapReduceTest.java  |   2 +-
 .../processors/hadoop/HadoopTasksV1Test.java    |   7 +-
 .../processors/hadoop/HadoopTasksV2Test.java    |   7 +-
 .../processors/hadoop/HadoopV2JobSelfTest.java  |   6 +-
 .../collections/HadoopAbstractMapTest.java      |   3 +-
 .../testsuites/IgniteHadoopTestSuite.java       |   2 +-
 .../IgniteIgfsLinuxAndMacOSTestSuite.java       |   3 +-
 23 files changed, 237 insertions(+), 377 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopJobInfo.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopJobInfo.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopJobInfo.java
index 51faf5d..d3735d5 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopJobInfo.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopJobInfo.java
@@ -55,12 +55,14 @@ public interface HadoopJobInfo extends Serializable {
      * This method will be called once for the same ID on one node, though it can be called on the same host
      * multiple times from different processes (in case of multiple nodes on the same host or external execution).
      *
+     * @param jobCls The job class.
      * @param jobId Job ID.
      * @param log Logger.
      * @return Job.
      * @throws IgniteCheckedException If failed.
      */
-    HadoopJob createJob(HadoopJobId jobId, IgniteLogger log) throws IgniteCheckedException;
+    public HadoopJob createJob(Class<? extends HadoopJob> jobCls,
+        HadoopJobId jobId, IgniteLogger log) throws IgniteCheckedException;
 
     /**
      * @return Number of reducers configured for job.

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCounterWriter.java
----------------------------------------------------------------------
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCounterWriter.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCounterWriter.java
index ce67c57..f21a1e6 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCounterWriter.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCounterWriter.java
@@ -28,10 +28,9 @@ public interface HadoopCounterWriter {
     /**
      * Writes counters of given job to some statistics storage.
      *
-     * @param jobInfo Job info.
-     * @param jobId Job id.
+     * @param job The job.
      * @param cntrs Counters.
      * @throws IgniteCheckedException If failed.
      */
-    public void write(HadoopJobInfo jobInfo, HadoopJobId jobId, HadoopCounters cntrs) throws IgniteCheckedException;
+    public void write(HadoopJob job, HadoopCounters cntrs) throws IgniteCheckedException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/pom.xml
----------------------------------------------------------------------
diff --git a/modules/hadoop/pom.xml b/modules/hadoop/pom.xml
index 637f097..e300ba0 100644
--- a/modules/hadoop/pom.xml
+++ b/modules/hadoop/pom.xml
@@ -143,84 +143,6 @@
                 <artifactId>maven-surefire-plugin</artifactId>
                 <version>2.17</version>
                 <configuration>
-                    <classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-annotations</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-auth</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>commons-codec:commons-codec</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.httpcomponents:httpclient</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.httpcomponents:httpcore</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-common</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.google.guava:guava</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>commons-cli:commons-cli</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.commons:commons-math3</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>xmlenc:xmlenc</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>commons-httpclient:commons-httpclient</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>commons-net:commons-net</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>javax.servlet:servlet-api</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.mortbay.jetty:jetty</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.mortbay.jetty:jetty-util</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.sun.jersey:jersey-core</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.sun.jersey:jersey-json</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.codehaus.jettison:jettison</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.sun.xml.bind:jaxb-impl</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.codehaus.jackson:jackson-jaxrs</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.codehaus.jackson:jackson-xc</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.sun.jersey:jersey-server</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>asm:asm</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>tomcat:jasper-compiler</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>tomcat:jasper-runtime</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>javax.servlet.jsp:jsp-api</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>commons-el:commons-el</classpathDependencyExcludes>
-                        <!--<classpathDependencyExcludes>commons-logging:commons-logging</classpathDependencyExcludes>-->
-                        <classpathDependencyExcludes>net.java.dev.jets3t:jets3t</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.jamesmurty.utils:java-xmlbuilder</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.codehaus.jackson:jackson-core-asl</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.codehaus.jackson:jackson-mapper-asl
-                        </classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.avro:avro</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.thoughtworks.paranamer:paranamer</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.xerial.snappy:snappy-java</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.google.protobuf:protobuf-java</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.jcraft:jsch</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.google.code.findbugs:jsr305</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.zookeeper:zookeeper</classpathDependencyExcludes>
-                        <!--<classpathDependencyExcludes>org.apache.commons:commons-compress</classpathDependencyExcludes>-->
-                        <classpathDependencyExcludes>org.tukaani:xz</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-hdfs</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>commons-daemon:commons-daemon</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-mapreduce-client-common
-                        </classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-yarn-common</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-yarn-api</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>javax.xml.bind:jaxb-api</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>javax.xml.stream:stax-api</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>javax.activation:activation</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.google.inject:guice</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>javax.inject:javax.inject</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.sun.jersey.contribs:jersey-guice</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-yarn-client</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.sun.jersey:jersey-client</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-yarn-server-common
-                        </classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.google.inject.extensions:guice-servlet
-                        </classpathDependencyExcludes>
-                        <classpathDependencyExcludes>io.netty:netty</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.apache.hadoop:hadoop-mapreduce-client-core
-                        </classpathDependencyExcludes>
-                        <!--<classpathDependencyExcludes>commons-beanutils:commons-beanutils</classpathDependencyExcludes>-->
-                        <classpathDependencyExcludes>org.hamcrest:hamcrest-core</classpathDependencyExcludes>
-                        <!--<classpathDependencyExcludes>commons-collections:commons-collections</classpathDependencyExcludes>-->
-                        <classpathDependencyExcludes>org.eclipse.jetty:jetty-http</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>commons-io:commons-io</classpathDependencyExcludes>
-                        <!--<classpathDependencyExcludes>commons-lang:commons-lang</classpathDependencyExcludes>-->
-                        <classpathDependencyExcludes>commons-configuration:commons-configuration
-                        </classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.eclipse.jetty:jetty-server</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.eclipse.jetty:jetty-util</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>org.eclipse.jetty:jetty-io</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>aopalliance:aopalliance</classpathDependencyExcludes>
-                        <classpathDependencyExcludes>com.beust:jcommander</classpathDependencyExcludes>
-                    </classpathDependencyExcludes>
                 </configuration>
             </plugin>
         </plugins>

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopFileSystemCounterWriter.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopFileSystemCounterWriter.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopFileSystemCounterWriter.java
index d910507..f76b354 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopFileSystemCounterWriter.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/IgniteHadoopFileSystemCounterWriter.java
@@ -25,6 +25,7 @@ import org.apache.ignite.*;
 import org.apache.ignite.internal.processors.hadoop.*;
 import org.apache.ignite.internal.processors.hadoop.counter.*;
 import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounters;
+import org.apache.ignite.internal.processors.hadoop.v2.*;
 import org.apache.ignite.internal.processors.igfs.*;
 import org.apache.ignite.internal.util.typedef.*;
 
@@ -48,11 +49,15 @@ public class IgniteHadoopFileSystemCounterWriter implements HadoopCounterWriter
     private static final String DEFAULT_COUNTER_WRITER_DIR = "/user/" + USER_MACRO;
 
     /** {@inheritDoc} */
-    @Override public void write(HadoopJobInfo jobInfo, HadoopJobId jobId, HadoopCounters cntrs)
+    @Override public void write(HadoopJob job, HadoopCounters cntrs)
         throws IgniteCheckedException {
 
         Configuration hadoopCfg = HadoopUtils.safeCreateConfiguration();
 
+        final HadoopJobInfo jobInfo = job.info();
+
+        final HadoopJobId jobId = job.id();
+
         for (Map.Entry<String, String> e : ((HadoopDefaultJobInfo)jobInfo).properties().entrySet())
             hadoopCfg.set(e.getKey(), e.getValue());
 
@@ -72,7 +77,7 @@ public class IgniteHadoopFileSystemCounterWriter implements HadoopCounterWriter
         try {
             hadoopCfg.set(MRJobConfig.USER_NAME, user);
 
-            FileSystem fs = HadoopUtils.fileSystemForMrUser(jobStatPath.toUri(), hadoopCfg, true);
+            FileSystem fs = ((HadoopV2Job)job).fileSystem(jobStatPath.toUri(), hadoopCfg);
 
             fs.mkdirs(jobStatPath);
 

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
index eb98ff9..0988fe0 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
@@ -67,6 +67,28 @@ public class HadoopClassLoader extends URLClassLoader {
     private final String name;
 
     /**
+     * Gets name for Job class loader. The name is specific for local node id.
+     * @param locNodeId The local node id.
+     * @return The class loader name.
+     */
+    public static String nameForJob(UUID locNodeId) {
+        return "hadoop-job-node-" + locNodeId.toString();
+    }
+
+    /**
+     * Gets name for the task class loader. Task class loader
+     * @param info The task info.
+     * @param prefix Get only prefix (without task type and number)
+     * @return The class loader name.
+     */
+    public static String nameForTask(HadoopTaskInfo info, boolean prefix) {
+        if (prefix)
+            return "hadoop-task-" + info.jobId() + "-";
+        else
+            return "hadoop-task-" + info.jobId() + "-" + info.type() + "-" + info.taskNumber();
+    }
+
+    /**
      * @param urls Urls.
      */
     public HadoopClassLoader(URL[] urls, String name) {
@@ -568,4 +590,11 @@ public class HadoopClassLoader extends URLClassLoader {
     @Override public String toString() {
         return S.toString(HadoopClassLoader.class, this);
     }
+
+    /**
+     * Getter for name field.
+     */
+    public String name() {
+        return name;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java
index 2e855d0..95e03c8 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopDefaultJobInfo.java
@@ -18,7 +18,6 @@
 package org.apache.ignite.internal.processors.hadoop;
 
 import org.apache.ignite.*;
-import org.apache.ignite.internal.processors.hadoop.v2.*;
 import org.apache.ignite.internal.util.typedef.internal.*;
 import org.jetbrains.annotations.*;
 
@@ -48,9 +47,6 @@ public class HadoopDefaultJobInfo implements HadoopJobInfo, Externalizable {
     /** User name. */
     private String user;
 
-    /** */
-    private static volatile Class<?> jobCls;
-
     /**
      * Default constructor required by {@link Externalizable}.
      */
@@ -82,24 +78,15 @@ public class HadoopDefaultJobInfo implements HadoopJobInfo, Externalizable {
     }
 
     /** {@inheritDoc} */
-    @Override public HadoopJob createJob(HadoopJobId jobId, IgniteLogger log) throws IgniteCheckedException {
-        try {
-            Class<?> jobCls0 = jobCls;
+    @Override public HadoopJob createJob(Class<? extends HadoopJob> jobCls,
+            HadoopJobId jobId, IgniteLogger log) throws IgniteCheckedException {
+        assert jobCls != null;
 
-            if (jobCls0 == null) { // It is enough to have only one class loader with only Hadoop classes.
-                synchronized (HadoopDefaultJobInfo.class) {
-                    if ((jobCls0 = jobCls) == null) {
-                        HadoopClassLoader ldr = new HadoopClassLoader(null, "hadoop-job");
-
-                        jobCls = jobCls0 = ldr.loadClass(HadoopV2Job.class.getName());
-                    }
-                }
-            }
-
-            Constructor<?> constructor = jobCls0.getConstructor(HadoopJobId.class, HadoopDefaultJobInfo.class,
-                IgniteLogger.class);
+        try {
+            Constructor<? extends HadoopJob> constructor = jobCls.getConstructor(HadoopJobId.class,
+                HadoopDefaultJobInfo.class, IgniteLogger.class);
 
-            return (HadoopJob)constructor.newInstance(jobId, this, log);
+            return constructor.newInstance(jobId, this, log);
         }
         // NB: java.lang.NoClassDefFoundError may be thrown from Class#getConstructor() call.
         catch (Throwable t) {

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopUtils.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopUtils.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopUtils.java
index 68a9ef6..f87e610 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopUtils.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopUtils.java
@@ -26,16 +26,10 @@ import org.apache.hadoop.mapreduce.JobPriority;
 import org.apache.hadoop.mapreduce.JobStatus;
 import org.apache.hadoop.mapreduce.*;
 import org.apache.ignite.*;
-import org.apache.ignite.hadoop.fs.v1.*;
-import org.apache.ignite.internal.processors.hadoop.fs.*;
 import org.apache.ignite.internal.processors.hadoop.v2.*;
-import org.apache.ignite.internal.util.*;
-import org.apache.ignite.internal.util.typedef.*;
 import org.apache.ignite.internal.util.typedef.internal.*;
-import org.jetbrains.annotations.*;
 
 import java.io.*;
-import java.net.*;
 import java.util.*;
 
 /**
@@ -63,34 +57,6 @@ public class HadoopUtils {
     /** Old reducer class attribute. */
     private static final String OLD_REDUCE_CLASS_ATTR = "mapred.reducer.class";
 
-    /** Lazy per-user cache for the file systems. It is cleared and nulled in #close() method. */
-    private static final HadoopLazyConcurrentMap<FsCacheKey, FileSystem> fileSysLazyMap = new HadoopLazyConcurrentMap<>(
-        new HadoopLazyConcurrentMap.ValueFactory<FsCacheKey, FileSystem>() {
-            @Override public FileSystem createValue(FsCacheKey key) {
-                try {
-                    assert key != null;
-
-                    // Explicitly disable FileSystem caching:
-                    URI uri = key.uri();
-
-                    String scheme = uri.getScheme();
-
-                    // Copy the configuration to avoid altering the external object.
-                    Configuration cfg = new Configuration(key.configuration());
-
-                    String prop = HadoopUtils.disableFsCachePropertyName(scheme);
-
-                    cfg.setBoolean(prop, true);
-
-                    return FileSystem.get(uri, cfg, key.user());
-                }
-                catch (IOException | InterruptedException ioe) {
-                    throw new IgniteException(ioe);
-                }
-            }
-        }
-    );
-
     /**
      * Constructor.
      */
@@ -378,207 +344,4 @@ public class HadoopUtils {
         }
     }
 
-    /**
-     * Gets non-null user name as per the Hadoop viewpoint.
-     * @param cfg the Hadoop job configuration, may be null.
-     * @return the user name, never null.
-     */
-    private static String getMrHadoopUser(Configuration cfg) throws IOException {
-        String user = cfg.get(MRJobConfig.USER_NAME);
-
-        if (user == null)
-            user = IgniteHadoopFileSystem.getFsHadoopUser();
-
-        return user;
-    }
-
-    /**
-     * Common method to get the V1 file system in MapRed engine.
-     * It creates the filesystem for the user specified in the
-     * configuration with {@link MRJobConfig#USER_NAME} property.
-     * @param uri the file system uri.
-     * @param cfg the configuration.
-     * @return the file system
-     * @throws IOException
-     */
-    public static FileSystem fileSystemForMrUser(@Nullable URI uri, Configuration cfg, boolean doCacheFs) throws IOException {
-        final String usr = getMrHadoopUser(cfg);
-
-        assert usr != null;
-
-        if (uri == null)
-            uri = FileSystem.getDefaultUri(cfg);
-
-        final FileSystem fs;
-
-        if (doCacheFs) {
-            try {
-                fs = getWithCaching(uri, cfg, usr);
-            }
-            catch (IgniteException ie) {
-                throw new IOException(ie);
-            }
-        }
-        else {
-            try {
-                fs = FileSystem.get(uri, cfg, usr);
-            }
-            catch (InterruptedException ie) {
-                Thread.currentThread().interrupt();
-
-                throw new IOException(ie);
-            }
-        }
-
-        assert fs != null;
-        assert !(fs instanceof IgniteHadoopFileSystem) || F.eq(usr, ((IgniteHadoopFileSystem)fs).user());
-
-        return fs;
-    }
-
-    /**
-     * Note that configuration is not a part of the key.
-     * It is used solely to initialize the first instance
-     * that is created for the key.
-     */
-    public static final class FsCacheKey {
-        /** */
-        private final URI uri;
-
-        /** */
-        private final String usr;
-
-        /** */
-        private final String equalityKey;
-
-        /** */
-        private final Configuration cfg;
-
-        /**
-         * Constructor
-         */
-        public FsCacheKey(URI uri, String usr, Configuration cfg) {
-            assert uri != null;
-            assert usr != null;
-            assert cfg != null;
-
-            this.uri = fixUri(uri, cfg);
-            this.usr = usr;
-            this.cfg = cfg;
-
-            this.equalityKey = createEqualityKey();
-        }
-
-        /**
-         * Creates String key used for equality and hashing.
-         */
-        private String createEqualityKey() {
-            GridStringBuilder sb = new GridStringBuilder("(").a(usr).a(")@");
-
-            if (uri.getScheme() != null)
-                sb.a(uri.getScheme().toLowerCase());
-
-            sb.a("://");
-
-            if (uri.getAuthority() != null)
-                sb.a(uri.getAuthority().toLowerCase());
-
-            return sb.toString();
-        }
-
-        /**
-         * The URI.
-         */
-        public URI uri() {
-            return uri;
-        }
-
-        /**
-         * The User.
-         */
-        public String user() {
-            return usr;
-        }
-
-        /**
-         * The Configuration.
-         */
-        public Configuration configuration() {
-            return cfg;
-        }
-
-        /** {@inheritDoc} */
-        @SuppressWarnings("SimplifiableIfStatement")
-        @Override public boolean equals(Object obj) {
-            if (obj == this)
-                return true;
-
-            if (obj == null || getClass() != obj.getClass())
-                return false;
-
-            return equalityKey.equals(((FsCacheKey)obj).equalityKey);
-        }
-
-        /** {@inheritDoc} */
-        @Override public int hashCode() {
-            return equalityKey.hashCode();
-        }
-
-        /** {@inheritDoc} */
-        @Override public String toString() {
-            return equalityKey;
-        }
-    }
-
-    /**
-     * Gets FileSystem caching it in static Ignite cache. The cache is a singleton
-     * for each class loader.
-     *
-     * <p/>Note that the file systems in the cache are keyed by a triplet {scheme, authority, user}.
-     * The Configuration is not a part of the key. This means that for the given key file system is
-     * initialized only once with the Configuration passed in upon the file system creation.
-     *
-     * @param uri The file system URI.
-     * @param cfg The configuration.
-     * @param usr The user to create file system for.
-     * @return The file system: either created, or taken from the cache.
-     */
-    private static FileSystem getWithCaching(URI uri, Configuration cfg, String usr) {
-        FsCacheKey key = new FsCacheKey(uri, usr, cfg);
-
-        return fileSysLazyMap.getOrCreate(key);
-    }
-
-    /**
-     * Gets the property name to disable file system cache.
-     * @param scheme The file system URI scheme.
-     * @return The property name. If scheme is null,
-     * returns "fs.null.impl.disable.cache".
-     */
-    public static String disableFsCachePropertyName(@Nullable String scheme) {
-        return String.format("fs.%s.impl.disable.cache", scheme);
-    }
-
-    /**
-     * Takes Fs URI using logic similar to that used in FileSystem#get(1,2,3).
-     * @param uri0 The uri.
-     * @param cfg The cfg.
-     * @return Correct URI.
-     */
-    public static URI fixUri(URI uri0, Configuration cfg) {
-        if (uri0 == null)
-            return FileSystem.getDefaultUri(cfg);
-
-        String scheme = uri0.getScheme();
-        String authority = uri0.getAuthority();
-
-        if (authority == null) {
-            URI dfltUri = FileSystem.getDefaultUri(cfg);
-
-            if (scheme == null || (scheme.equals(dfltUri.getScheme()) && dfltUri.getAuthority() != null))
-                return dfltUri;
-        }
-
-        return uri0;
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/SecondaryFileSystemProvider.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/SecondaryFileSystemProvider.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/SecondaryFileSystemProvider.java
index dd679de..ef04b0f 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/SecondaryFileSystemProvider.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/SecondaryFileSystemProvider.java
@@ -20,6 +20,7 @@ package org.apache.ignite.internal.processors.hadoop;
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.security.*;
+import org.apache.ignite.internal.processors.hadoop.fs.*;
 import org.apache.ignite.internal.processors.igfs.*;
 import org.apache.ignite.internal.util.*;
 import org.apache.ignite.internal.util.typedef.internal.*;
@@ -76,7 +77,7 @@ public class SecondaryFileSystemProvider {
         }
 
         // Disable caching:
-        String prop = HadoopUtils.disableFsCachePropertyName(uri.getScheme());
+        String prop = HadoopFileSystemsUtils.disableFsCachePropertyName(uri.getScheme());
 
         cfg.setBoolean(prop, true);
     }

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemsUtils.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemsUtils.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemsUtils.java
index d90bc28..382bbd0 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemsUtils.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopFileSystemsUtils.java
@@ -19,6 +19,7 @@ package org.apache.ignite.internal.processors.hadoop.fs;
 
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.fs.*;
+import org.jetbrains.annotations.*;
 
 /**
  * Utilities for configuring file systems to support the separate working directory per each thread.
@@ -37,4 +38,14 @@ public class HadoopFileSystemsUtils {
         cfg.set("fs.AbstractFileSystem." + FsConstants.LOCAL_FS_URI.getScheme() + ".impl",
                 HadoopLocalFileSystemV2.class.getName());
     }
+
+    /**
+     * Gets the property name to disable file system cache.
+     * @param scheme The file system URI scheme.
+     * @return The property name. If scheme is null,
+     * returns "fs.null.impl.disable.cache".
+     */
+    public static String disableFsCachePropertyName(@Nullable String scheme) {
+        return String.format("fs.%s.impl.disable.cache", scheme);
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLazyConcurrentMap.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLazyConcurrentMap.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLazyConcurrentMap.java
index 71b38c4..c7565d3 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLazyConcurrentMap.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/fs/HadoopLazyConcurrentMap.java
@@ -51,6 +51,8 @@ public class HadoopLazyConcurrentMap<K, V extends Closeable> {
      */
     public HadoopLazyConcurrentMap(ValueFactory<K, V> factory) {
         this.factory = factory;
+
+        assert getClass().getClassLoader() == Ignite.class.getClassLoader();
     }
 
     /**
@@ -105,6 +107,9 @@ public class HadoopLazyConcurrentMap<K, V extends Closeable> {
         closeLock.writeLock().lock();
 
         try {
+            if (closed)
+                return;
+
             closed = true;
 
             Exception err = null;

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java
index 2f07817..194ae33 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java
@@ -28,6 +28,7 @@ import org.apache.ignite.internal.processors.hadoop.counter.*;
 import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounters;
 import org.apache.ignite.internal.processors.hadoop.taskexecutor.*;
 import org.apache.ignite.internal.processors.hadoop.taskexecutor.external.*;
+import org.apache.ignite.internal.processors.hadoop.v2.*;
 import org.apache.ignite.internal.util.*;
 import org.apache.ignite.internal.util.future.*;
 import org.apache.ignite.internal.util.typedef.*;
@@ -82,6 +83,9 @@ public class HadoopJobTracker extends HadoopComponent {
     /** Component busy lock. */
     private GridSpinReadWriteLock busyLock;
 
+    /** Class to create HadoopJob instances from. */
+    private Class<? extends HadoopJob> jobCls;
+
     /** Closure to check result of async transform of system cache. */
     private final IgniteInClosure<IgniteInternalFuture<?>> failsLog = new CI1<IgniteInternalFuture<?>>() {
         @Override public void apply(IgniteInternalFuture<?> gridFut) {
@@ -95,12 +99,27 @@ public class HadoopJobTracker extends HadoopComponent {
     };
 
     /** {@inheritDoc} */
-    @Override public void start(HadoopContext ctx) throws IgniteCheckedException {
+    @SuppressWarnings("unchecked")
+    @Override public void start(final HadoopContext ctx) throws IgniteCheckedException {
         super.start(ctx);
 
         busyLock = new GridSpinReadWriteLock();
 
         evtProcSvc = Executors.newFixedThreadPool(1);
+
+        UUID nodeId = ctx.localNodeId();
+
+        assert jobCls == null;
+
+        HadoopClassLoader ldr = new HadoopClassLoader(null, HadoopClassLoader.nameForJob(nodeId));
+
+        try {
+            jobCls = (Class<HadoopV2Job>)ldr.loadClass(HadoopV2Job.class.getName());
+        }
+        catch (Exception ioe) {
+            throw new IgniteCheckedException("Failed to load job class [class="
+                + HadoopV2Job.class.getName() + ']', ioe);
+        }
     }
 
     /**
@@ -838,7 +857,7 @@ public class HadoopJobTracker extends HadoopComponent {
 
                             HadoopCounters cntrs = meta.counters();
 
-                            writer.write(job.info(), jobId, cntrs);
+                            writer.write(job, cntrs);
                         }
                     }
                     catch (Exception e) {
@@ -986,7 +1005,7 @@ public class HadoopJobTracker extends HadoopComponent {
                 jobInfo = meta.jobInfo();
             }
 
-            job = jobInfo.createJob(jobId, log);
+            job = jobInfo.createJob(jobCls, jobId, log);
 
             job.initialize(false, ctx.localNodeId());
 

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/child/HadoopChildProcessRunner.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/child/HadoopChildProcessRunner.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/child/HadoopChildProcessRunner.java
index 040552a..b0b0b8c 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/child/HadoopChildProcessRunner.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/taskexecutor/external/child/HadoopChildProcessRunner.java
@@ -25,6 +25,7 @@ import org.apache.ignite.internal.processors.hadoop.shuffle.*;
 import org.apache.ignite.internal.processors.hadoop.taskexecutor.*;
 import org.apache.ignite.internal.processors.hadoop.taskexecutor.external.*;
 import org.apache.ignite.internal.processors.hadoop.taskexecutor.external.communication.*;
+import org.apache.ignite.internal.processors.hadoop.v2.*;
 import org.apache.ignite.internal.util.future.*;
 import org.apache.ignite.internal.util.lang.*;
 import org.apache.ignite.internal.util.offheap.unsafe.*;
@@ -115,7 +116,7 @@ public class HadoopChildProcessRunner {
 
                 assert job == null;
 
-                job = req.jobInfo().createJob(req.jobId(), log);
+                job = req.jobInfo().createJob(HadoopV2Job.class, req.jobId(), log);
 
                 job.initialize(true, nodeDesc.processId());
 

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Job.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Job.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Job.java
index d754039..33a218d 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Job.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2Job.java
@@ -17,6 +17,7 @@
 
 package org.apache.ignite.internal.processors.hadoop.v2;
 
+import org.apache.hadoop.conf.*;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapred.*;
@@ -30,15 +31,18 @@ import org.apache.ignite.internal.processors.hadoop.v1.*;
 import org.apache.ignite.internal.util.future.*;
 import org.apache.ignite.internal.util.typedef.*;
 import org.apache.ignite.internal.util.typedef.internal.*;
+import org.jetbrains.annotations.*;
 import org.jsr166.*;
 
 import java.io.*;
 import java.lang.reflect.*;
+import java.net.*;
 import java.util.*;
 import java.util.Queue;
 import java.util.concurrent.*;
 
 import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.*;
+import static org.apache.ignite.internal.processors.hadoop.fs.HadoopFileSystemCacheUtils.*;
 
 /**
  * Hadoop job implementation for v2 API.
@@ -70,7 +74,10 @@ public class HadoopV2Job implements HadoopJob {
     private final Queue<Class<? extends HadoopTaskContext>> taskCtxClsPool = new ConcurrentLinkedQueue<>();
 
     /** All created contexts. */
-    private final Queue<Class<?>> fullCtxClsQueue = new ConcurrentLinkedDeque<>();
+    private final Queue<Class<? extends HadoopTaskContext>> fullCtxClsQueue = new ConcurrentLinkedDeque<>();
+
+    /** File system cache map. */
+    private final HadoopLazyConcurrentMap<FsCacheKey, FileSystem> fsMap = createHadoopLazyConcurrentMap();
 
     /** Local node ID */
     private volatile UUID locNodeId;
@@ -103,7 +110,7 @@ public class HadoopV2Job implements HadoopJob {
 
         jobCtx = new JobContextImpl(jobConf, hadoopJobID);
 
-        rsrcMgr = new HadoopV2JobResourceManager(jobId, jobCtx, log);
+        rsrcMgr = new HadoopV2JobResourceManager(jobId, jobCtx, log, this);
     }
 
     /** {@inheritDoc} */
@@ -134,7 +141,7 @@ public class HadoopV2Job implements HadoopJob {
             Path jobDir = new Path(jobDirPath);
 
             try {
-                FileSystem fs = fileSystemForMrUser(jobDir.toUri(), jobConf, true);
+                FileSystem fs = fileSystem(jobDir.toUri(), jobConf);
 
                 JobSplit.TaskSplitMetaInfo[] metaInfos = SplitMetaInfoReader.readSplitMetaInfo(hadoopJobID, fs, jobConf,
                     jobDir);
@@ -180,6 +187,7 @@ public class HadoopV2Job implements HadoopJob {
     }
 
     /** {@inheritDoc} */
+    @SuppressWarnings("unchecked")
     @Override public HadoopTaskContext getTaskContext(HadoopTaskInfo info) throws IgniteCheckedException {
         T2<HadoopTaskType, Integer> locTaskId = new T2<>(info.type(),  info.taskNumber());
 
@@ -201,7 +209,7 @@ public class HadoopV2Job implements HadoopJob {
                 // Note that the classloader identified by the task it was initially created for,
                 // but later it may be reused for other tasks.
                 HadoopClassLoader ldr = new HadoopClassLoader(rsrcMgr.classPath(),
-                    "hadoop-task-" + info.jobId() + "-" + info.type() + "-" + info.taskNumber());
+                    HadoopClassLoader.nameForTask(info, false));
 
                 cls = (Class<? extends HadoopTaskContext>)ldr.loadClass(HadoopV2TaskContext.class.getName());
 
@@ -243,8 +251,13 @@ public class HadoopV2Job implements HadoopJob {
 
     /** {@inheritDoc} */
     @Override public void initialize(boolean external, UUID locNodeId) throws IgniteCheckedException {
+        assert locNodeId != null;
+
         this.locNodeId = locNodeId;
 
+        assert ((HadoopClassLoader)getClass().getClassLoader()).name()
+            .equals(HadoopClassLoader.nameForJob(this.locNodeId));
+
         Thread.currentThread().setContextClassLoader(jobConf.getClassLoader());
 
         try {
@@ -274,17 +287,26 @@ public class HadoopV2Job implements HadoopJob {
             // Stop the daemon threads that have been created
             // with the task class loaders:
             while (true) {
-                Class<?> cls = fullCtxClsQueue.poll();
+                Class<? extends HadoopTaskContext> cls = fullCtxClsQueue.poll();
 
                 if (cls == null)
                     break;
 
                 try {
-                    Class<?> daemonCls = cls.getClassLoader().loadClass(HadoopClassLoader.HADOOP_DAEMON_CLASS_NAME);
+                    final ClassLoader ldr = cls.getClassLoader();
 
-                    Method m = daemonCls.getMethod("dequeueAndStopAll");
+                    try {
+                        // Stop Hadoop daemons for this *task*:
+                        stopHadoopFsDaemons(ldr);
+                    }
+                    catch (Exception e) {
+                        if (err == null)
+                            err = e;
+                    }
 
-                    m.invoke(null);
+                    // Also close all the FileSystems cached in
+                    // HadoopLazyConcurrentMap for this *task* class loader:
+                    closeCachedTaskFileSystems(ldr);
                 }
                 catch (Throwable e) {
                     if (err == null)
@@ -297,11 +319,46 @@ public class HadoopV2Job implements HadoopJob {
 
             assert fullCtxClsQueue.isEmpty();
 
+            try {
+                // Close all cached file systems for this *Job*:
+                fsMap.close();
+            }
+            catch (Exception e) {
+                if (err == null)
+                    err = e;
+            }
+
             if (err != null)
                 throw U.cast(err);
         }
     }
 
+    /**
+     * Stops Hadoop Fs daemon threads.
+     * @param ldr The task ClassLoader to stop the daemons for.
+     * @throws Exception On error.
+     */
+    private void stopHadoopFsDaemons(ClassLoader ldr) throws Exception {
+        Class<?> daemonCls = ldr.loadClass(HadoopClassLoader.HADOOP_DAEMON_CLASS_NAME);
+
+        Method m = daemonCls.getMethod("dequeueAndStopAll");
+
+        m.invoke(null);
+    }
+
+    /**
+     * Closes all the file systems user by task
+     * @param ldr The task class loader.
+     * @throws Exception On error.
+     */
+    private void closeCachedTaskFileSystems(ClassLoader ldr) throws Exception {
+        Class<?> clazz = ldr.loadClass(HadoopV2TaskContext.class.getName());
+
+        Method m = clazz.getMethod("close");
+
+        m.invoke(null);
+    }
+
     /** {@inheritDoc} */
     @Override public void prepareTaskEnvironment(HadoopTaskInfo info) throws IgniteCheckedException {
         rsrcMgr.prepareTaskWorkDir(taskLocalDir(locNodeId, info));
@@ -331,4 +388,15 @@ public class HadoopV2Job implements HadoopJob {
     public JobConf jobConf() {
         return jobConf;
     }
+
+    /**
+     * Gets file system for this job.
+     * @param uri The uri.
+     * @param cfg The configuration.
+     * @return The file system.
+     * @throws IOException On error.
+     */
+    public FileSystem fileSystem(@Nullable URI uri, Configuration cfg) throws IOException {
+        return fileSystemForMrUserWithCaching(uri, cfg, fsMap);
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2JobResourceManager.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2JobResourceManager.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2JobResourceManager.java
index 2f64e77..912cc3f 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2JobResourceManager.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2JobResourceManager.java
@@ -39,9 +39,10 @@ import java.util.*;
  * Provides all resources are needed to the job execution. Downloads the main jar, the configuration and additional
  * files are needed to be placed on local files system.
  */
-public class HadoopV2JobResourceManager {
+class HadoopV2JobResourceManager {
     /** File type Fs disable caching property name. */
-    private static final String FILE_DISABLE_CACHING_PROPERTY_NAME = HadoopUtils.disableFsCachePropertyName("file");
+    private static final String FILE_DISABLE_CACHING_PROPERTY_NAME =
+        HadoopFileSystemsUtils.disableFsCachePropertyName("file");
 
     /** Hadoop job context. */
     private final JobContextImpl ctx;
@@ -61,16 +62,20 @@ public class HadoopV2JobResourceManager {
     /** Staging directory to delivery job jar and config to the work nodes. */
     private Path stagingDir;
 
+    /** The job. */
+    private final HadoopV2Job job;
+
     /**
      * Creates new instance.
      * @param jobId Job ID.
      * @param ctx Hadoop job context.
      * @param log Logger.
      */
-    public HadoopV2JobResourceManager(HadoopJobId jobId, JobContextImpl ctx, IgniteLogger log) {
+    public HadoopV2JobResourceManager(HadoopJobId jobId, JobContextImpl ctx, IgniteLogger log, HadoopV2Job job) {
         this.jobId = jobId;
         this.ctx = ctx;
         this.log = log.getLogger(HadoopV2JobResourceManager.class);
+        this.job = job;
     }
 
     /**
@@ -115,7 +120,7 @@ public class HadoopV2JobResourceManager {
                 stagingDir = new Path(new URI(mrDir));
 
                 if (download) {
-                    FileSystem fs = HadoopUtils.fileSystemForMrUser(stagingDir.toUri(), cfg, true);
+                    FileSystem fs = job.fileSystem(stagingDir.toUri(), cfg);
 
                     if (!fs.exists(stagingDir))
                         throw new IgniteCheckedException("Failed to find map-reduce submission " +
@@ -210,7 +215,7 @@ public class HadoopV2JobResourceManager {
 
             FileSystem dstFs = FileSystem.getLocal(cfg);
 
-            FileSystem srcFs = HadoopUtils.fileSystemForMrUser(srcPath.toUri(), cfg, true);
+            FileSystem srcFs = job.fileSystem(srcPath.toUri(), cfg);
 
             if (extract) {
                 File archivesPath = new File(jobLocDir.getAbsolutePath(), ".cached-archives");
@@ -292,8 +297,11 @@ public class HadoopV2JobResourceManager {
      */
     public void cleanupStagingDirectory() {
         try {
-            if (stagingDir != null)
-                HadoopUtils.fileSystemForMrUser(stagingDir.toUri(), ctx.getJobConf(), true).delete(stagingDir, true);
+            if (stagingDir != null) {
+                FileSystem fs = job.fileSystem(stagingDir.toUri(), ctx.getJobConf());
+
+                fs.delete(stagingDir, true);
+            }
         }
         catch (Exception e) {
             log.error("Failed to remove job staging directory [path=" + stagingDir + ", jobId=" + jobId + ']' , e);

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2TaskContext.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2TaskContext.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2TaskContext.java
index e89feba..6eff475 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2TaskContext.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/v2/HadoopV2TaskContext.java
@@ -33,6 +33,7 @@ import org.apache.ignite.*;
 import org.apache.ignite.internal.processors.hadoop.*;
 import org.apache.ignite.internal.processors.hadoop.counter.*;
 import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounters;
+import org.apache.ignite.internal.processors.hadoop.fs.*;
 import org.apache.ignite.internal.processors.hadoop.v1.*;
 import org.apache.ignite.internal.processors.igfs.*;
 import org.apache.ignite.internal.util.typedef.*;
@@ -44,6 +45,7 @@ import java.security.*;
 import java.util.*;
 import java.util.concurrent.*;
 
+import static org.apache.ignite.internal.processors.hadoop.fs.HadoopFileSystemCacheUtils.*;
 import static org.apache.ignite.internal.processors.hadoop.fs.HadoopParameters.*;
 import static org.apache.ignite.internal.processors.hadoop.HadoopUtils.*;
 
@@ -54,6 +56,22 @@ public class HadoopV2TaskContext extends HadoopTaskContext {
     /** */
     private static final boolean COMBINE_KEY_GROUPING_SUPPORTED;
 
+    /** Lazy per-user file system cache used by the Hadoop task. */
+    private static final HadoopLazyConcurrentMap<FsCacheKey, FileSystem> fsMap
+        = createHadoopLazyConcurrentMap();
+
+    /**
+     * This method is called with reflection upon Job finish with class loader of each task.
+     * This will clean up all the Fs created for specific task.
+     * Each class loader sees uses its own instance of <code>fsMap<code/> since the class loaders
+     * are different.
+     *
+     * @throws IgniteCheckedException On error.
+     */
+    public static void close() throws IgniteCheckedException {
+        fsMap.close();
+    }
+
     /**
      * Check for combiner grouping support (available since Hadoop 2.3).
      */
@@ -91,7 +109,7 @@ public class HadoopV2TaskContext extends HadoopTaskContext {
     private volatile HadoopTask task;
 
     /** Local node ID */
-    private UUID locNodeId;
+    private final UUID locNodeId;
 
     /** Counters for task. */
     private final HadoopCounters cntrs = new HadoopCountersImpl();
@@ -423,7 +441,22 @@ public class HadoopV2TaskContext extends HadoopTaskContext {
     private Object readExternalSplit(HadoopExternalSplit split) throws IgniteCheckedException {
         Path jobDir = new Path(jobConf().get(MRJobConfig.MAPREDUCE_JOB_DIR));
 
-        try (FileSystem fs = fileSystemForMrUser(jobDir.toUri(), jobConf(), false);
+        FileSystem fs;
+
+        try {
+            // This assertion uses .startsWith() instead of .equals() because task class loaders may
+            // be reused between tasks of the same job.
+            assert ((HadoopClassLoader)getClass().getClassLoader()).name()
+                .startsWith(HadoopClassLoader.nameForTask(taskInfo(), true));
+
+            // We also cache Fs there, all them will be cleared explicitly upon the Job end.
+            fs = fileSystemForMrUserWithCaching(jobDir.toUri(), jobConf(), fsMap);
+        }
+        catch (IOException e) {
+            throw new IgniteCheckedException(e);
+        }
+
+        try (
             FSDataInputStream in = fs.open(JobSubmissionFiles.getJobSplitFile(jobDir))) {
 
             in.seek(split.offset());

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsEventsTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsEventsTestSuite.java b/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsEventsTestSuite.java
index fb21e2d..e7c7f8a 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsEventsTestSuite.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/igfs/IgfsEventsTestSuite.java
@@ -21,7 +21,6 @@ import junit.framework.*;
 import org.apache.ignite.*;
 import org.apache.ignite.configuration.*;
 import org.apache.ignite.hadoop.fs.*;
-import org.apache.ignite.internal.processors.hadoop.*;
 import org.apache.ignite.internal.util.ipc.shmem.*;
 import org.apache.ignite.internal.util.typedef.*;
 import org.jetbrains.annotations.*;
@@ -38,7 +37,7 @@ public class IgfsEventsTestSuite extends TestSuite {
      * @throws Exception Thrown in case of the failure.
      */
     public static TestSuite suite() throws Exception {
-        HadoopClassLoader ldr = new HadoopClassLoader(null, "test");
+        ClassLoader ldr = TestSuite.class.getClassLoader();
 
         TestSuite suite = new TestSuite("Ignite FS Events Test Suite");
 
@@ -58,7 +57,7 @@ public class IgfsEventsTestSuite extends TestSuite {
      * @throws Exception Thrown in case of the failure.
      */
     public static TestSuite suiteNoarchOnly() throws Exception {
-        HadoopClassLoader ldr = new HadoopClassLoader(null, "test");
+        ClassLoader ldr = TestSuite.class.getClassLoader();
 
         TestSuite suite = new TestSuite("Ignite IGFS Events Test Suite Noarch Only");
 

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceTest.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceTest.java
index 66c14b5..da6f9c7 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceTest.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopMapReduceTest.java
@@ -272,7 +272,7 @@ public class HadoopMapReduceTest extends HadoopAbstractWordCountTest {
             @Override public boolean apply() {
                 return igfs.exists(statPath);
             }
-        }, 10000);
+        }, 20_000);
 
         final long apiEvtCnt0 = apiEvtCnt;
 

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV1Test.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV1Test.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV1Test.java
index 48e83cc..f59be19 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV1Test.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV1Test.java
@@ -19,6 +19,7 @@ package org.apache.ignite.internal.processors.hadoop;
 
 import org.apache.hadoop.mapred.*;
 import org.apache.ignite.internal.processors.hadoop.examples.*;
+import org.apache.ignite.internal.processors.hadoop.v2.*;
 
 import java.io.*;
 import java.util.*;
@@ -44,9 +45,11 @@ public class HadoopTasksV1Test extends HadoopTasksAllVersionsTest {
 
         HadoopDefaultJobInfo jobInfo = createJobInfo(jobConf);
 
-        HadoopJobId jobId = new HadoopJobId(new UUID(0, 0), 0);
+        UUID uuid = new UUID(0, 0);
 
-        return jobInfo.createJob(jobId, log);
+        HadoopJobId jobId = new HadoopJobId(uuid, 0);
+
+        return jobInfo.createJob(HadoopV2Job.class, jobId, log);
     }
 
     /** {@inheritDoc} */

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV2Test.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV2Test.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV2Test.java
index e73fae3..1570807 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV2Test.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTasksV2Test.java
@@ -24,6 +24,7 @@ import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.lib.input.*;
 import org.apache.hadoop.mapreduce.lib.output.*;
 import org.apache.ignite.internal.processors.hadoop.examples.*;
+import org.apache.ignite.internal.processors.hadoop.v2.*;
 
 import java.util.*;
 
@@ -62,9 +63,11 @@ public class HadoopTasksV2Test extends HadoopTasksAllVersionsTest {
 
         HadoopDefaultJobInfo jobInfo = createJobInfo(hadoopJob.getConfiguration());
 
-        HadoopJobId jobId = new HadoopJobId(new UUID(0, 0), 0);
+        UUID uuid = new UUID(0, 0);
 
-        return jobInfo.createJob(jobId, log);
+        HadoopJobId jobId = new HadoopJobId(uuid, 0);
+
+        return jobInfo.createJob(HadoopV2Job.class, jobId, log);
     }
 
     /** {@inheritDoc} */

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopV2JobSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopV2JobSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopV2JobSelfTest.java
index f3b9307..b8f62e6 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopV2JobSelfTest.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopV2JobSelfTest.java
@@ -68,9 +68,11 @@ public class HadoopV2JobSelfTest extends HadoopAbstractSelfTest {
 
         HadoopDefaultJobInfo info = createJobInfo(cfg);
 
-        HadoopJobId id = new HadoopJobId(UUID.randomUUID(), 1);
+        final UUID uuid = UUID.randomUUID();
 
-        HadoopJob job = info.createJob(id, log);
+        HadoopJobId id = new HadoopJobId(uuid, 1);
+
+        HadoopJob job = info.createJob(HadoopV2Job.class, id, log);
 
         HadoopTaskContext taskCtx = job.getTaskContext(new HadoopTaskInfo(HadoopTaskType.MAP, null, 0, 0,
             null));

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopAbstractMapTest.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopAbstractMapTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopAbstractMapTest.java
index 9395c5e..b5e2ab5 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopAbstractMapTest.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/shuffle/collections/HadoopAbstractMapTest.java
@@ -136,7 +136,8 @@ public abstract class HadoopAbstractMapTest extends GridCommonAbstractTest {
         }
 
         /** {@inheritDoc} */
-        @Override public HadoopJob createJob(HadoopJobId jobId, IgniteLogger log) throws IgniteCheckedException {
+        @Override public HadoopJob createJob(Class<? extends HadoopJob> jobCls,
+                HadoopJobId jobId, IgniteLogger log) throws IgniteCheckedException {
             assert false;
 
             return null;

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java
index 4be5d72..2ab3e8c 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java
@@ -50,7 +50,7 @@ public class IgniteHadoopTestSuite extends TestSuite {
         downloadHadoop();
         downloadHive();
 
-        HadoopClassLoader ldr = new HadoopClassLoader(null, "test");
+        final ClassLoader ldr = TestSuite.class.getClassLoader();
 
         TestSuite suite = new TestSuite("Ignite Hadoop MR Test Suite");
 

http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/3b49184b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteIgfsLinuxAndMacOSTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteIgfsLinuxAndMacOSTestSuite.java b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteIgfsLinuxAndMacOSTestSuite.java
index 8982d83..22beea6 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteIgfsLinuxAndMacOSTestSuite.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteIgfsLinuxAndMacOSTestSuite.java
@@ -20,7 +20,6 @@ package org.apache.ignite.testsuites;
 import junit.framework.*;
 import org.apache.ignite.igfs.*;
 import org.apache.ignite.internal.processors.igfs.*;
-import org.apache.ignite.internal.processors.hadoop.*;
 
 import static org.apache.ignite.testsuites.IgniteHadoopTestSuite.*;
 
@@ -36,7 +35,7 @@ public class IgniteIgfsLinuxAndMacOSTestSuite extends TestSuite {
     public static TestSuite suite() throws Exception {
         downloadHadoop();
 
-        HadoopClassLoader ldr = new HadoopClassLoader(null, "test");
+        ClassLoader ldr = TestSuite.class.getClassLoader();
 
         TestSuite suite = new TestSuite("Ignite IGFS Test Suite For Linux And Mac OS");