You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ignite.apache.org by ak...@apache.org on 2016/09/16 08:44:31 UTC

[5/9] ignite git commit: IGNITE-3906: Hadoop: implemented additional user libs facility.

IGNITE-3906: Hadoop: implemented additional user libs facility.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/0dc9713a
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/0dc9713a
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/0dc9713a

Branch: refs/heads/master
Commit: 0dc9713ac3124c0ea34d949271a813c992873656
Parents: 3a94f27
Author: vozerov-gridgain <vo...@gridgain.com>
Authored: Thu Sep 15 14:01:19 2016 +0300
Committer: vozerov-gridgain <vo...@gridgain.com>
Committed: Thu Sep 15 14:01:19 2016 +0300

----------------------------------------------------------------------
 .../processors/hadoop/HadoopClassLoader.java    |   6 +-
 .../processors/hadoop/HadoopClasspathMain.java  |   2 +-
 .../processors/hadoop/HadoopClasspathUtils.java | 230 +++++++++++++---
 .../processors/hadoop/HadoopTestUtils.java      |  73 +++++-
 .../hadoop/HadoopUserLibsSelfTest.java          | 260 +++++++++++++++++++
 .../testsuites/IgniteHadoopTestSuite.java       |   3 +
 6 files changed, 536 insertions(+), 38 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/0dc9713a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
index 389de8c..2e0e271 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
@@ -61,10 +61,8 @@ import org.objectweb.asm.commons.RemappingClassAdapter;
  * unavailable for parent.
  */
 public class HadoopClassLoader extends URLClassLoader implements ClassCache {
-    /**
-     * We are very parallel capable.
-     */
     static {
+        // We are very parallel capable.
         registerAsParallelCapable();
     }
 
@@ -498,7 +496,7 @@ public class HadoopClassLoader extends URLClassLoader implements ClassCache {
                 return hadoopUrls;
 
             try {
-                hadoopUrls = HadoopClasspathUtils.classpathUrls();
+                hadoopUrls = HadoopClasspathUtils.classpathForClassLoader();
             }
             catch (IOException e) {
                 throw new IgniteCheckedException("Failed to resolve Hadoop JAR locations: " + e.getMessage(), e);

http://git-wip-us.apache.org/repos/asf/ignite/blob/0dc9713a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
index 5279b7d..4069496 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
@@ -36,7 +36,7 @@ public class HadoopClasspathMain {
 
         StringBuilder sb = new StringBuilder();
 
-        for (String path : HadoopClasspathUtils.classpathForJavaProcess())
+        for (String path : HadoopClasspathUtils.classpathForProcess())
             sb.append(path).append(separator);
 
         System.out.println(sb);

http://git-wip-us.apache.org/repos/asf/ignite/blob/0dc9713a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
index 121fcab..f5c2814 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
@@ -27,6 +27,7 @@ import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.LinkedList;
 import java.util.List;
 
 /**
@@ -36,18 +37,21 @@ public class HadoopClasspathUtils {
     /** Prefix directory. */
     public static final String PREFIX = "HADOOP_PREFIX";
 
-    /** Home directory. */
+    /** Hadoop home directory. */
     public static final String HOME = "HADOOP_HOME";
 
-    /** Home directory. */
+    /** Hadoop common directory. */
     public static final String COMMON_HOME = "HADOOP_COMMON_HOME";
 
-    /** Home directory. */
+    /** Hadoop HDFS directory. */
     public static final String HDFS_HOME = "HADOOP_HDFS_HOME";
 
-    /** Home directory. */
+    /** Hadoop mapred directory. */
     public static final String MAPRED_HOME = "HADOOP_MAPRED_HOME";
 
+    /** Arbitrary additional dependencies. Compliant with standard Java classpath resolution. */
+    public static final String HADOOP_USER_LIBS = "HADOOP_USER_LIBS";
+
     /** Empty string. */
     private static final String EMPTY_STR = "";
 
@@ -57,16 +61,20 @@ public class HadoopClasspathUtils {
      * @return List of the class path elements.
      * @throws IOException If failed.
      */
-    public static List<String> classpathForJavaProcess() throws IOException {
+    public static List<String> classpathForProcess() throws IOException {
         List<String> res = new ArrayList<>();
 
         for (final SearchDirectory dir : classpathDirectories()) {
-            if (dir.hasFilter()) {
-                for (File file : dir.files())
+            File[] files = dir.files();
+
+            if (dir.useWildcard()) {
+                if (files.length > 0)
+                    res.add(dir.absolutePath() + File.separator + '*');
+            }
+            else {
+                for (File file : files)
                     res.add(file.getAbsolutePath());
             }
-            else
-                res.add(dir.absolutePath() + File.separator + '*');
         }
 
         return res;
@@ -78,7 +86,7 @@ public class HadoopClasspathUtils {
      * @return List of class path URLs.
      * @throws IOException If failed.
      */
-    public static List<URL> classpathUrls() throws IOException {
+    public static List<URL> classpathForClassLoader() throws IOException {
         List<URL> res = new ArrayList<>();
 
         for (SearchDirectory dir : classpathDirectories()) {
@@ -182,17 +190,70 @@ public class HadoopClasspathUtils {
 
         Collection<SearchDirectory> res = new ArrayList<>();
 
-        res.add(new SearchDirectory(new File(loc.common(), "lib"), null));
-        res.add(new SearchDirectory(new File(loc.hdfs(), "lib"), null));
-        res.add(new SearchDirectory(new File(loc.mapred(), "lib"), null));
+        res.add(new SearchDirectory(new File(loc.common(), "lib"), AcceptAllDirectoryFilter.INSTANCE));
+        res.add(new SearchDirectory(new File(loc.hdfs(), "lib"), AcceptAllDirectoryFilter.INSTANCE));
+        res.add(new SearchDirectory(new File(loc.mapred(), "lib"), AcceptAllDirectoryFilter.INSTANCE));
+
+        res.add(new SearchDirectory(new File(loc.common()), new PrefixDirectoryFilter("hadoop-common-")));
+        res.add(new SearchDirectory(new File(loc.common()), new PrefixDirectoryFilter("hadoop-auth-")));
 
-        res.add(new SearchDirectory(new File(loc.common()), "hadoop-common-"));
-        res.add(new SearchDirectory(new File(loc.common()), "hadoop-auth-"));
+        res.add(new SearchDirectory(new File(loc.hdfs()), new PrefixDirectoryFilter("hadoop-hdfs-")));
 
-        res.add(new SearchDirectory(new File(loc.hdfs()), "hadoop-hdfs-"));
+        res.add(new SearchDirectory(new File(loc.mapred()),
+            new PrefixDirectoryFilter("hadoop-mapreduce-client-common")));
+        res.add(new SearchDirectory(new File(loc.mapred()),
+            new PrefixDirectoryFilter("hadoop-mapreduce-client-core")));
 
-        res.add(new SearchDirectory(new File(loc.mapred()), "hadoop-mapreduce-client-common"));
-        res.add(new SearchDirectory(new File(loc.mapred()), "hadoop-mapreduce-client-core"));
+        res.addAll(parseUserLibs());
+
+        return res;
+    }
+
+    /**
+     * Parse user libs.
+     *
+     * @return Parsed libs search patterns.
+     * @throws IOException If failed.
+     */
+    static Collection<SearchDirectory> parseUserLibs() throws IOException {
+        return parseUserLibs(systemOrEnv(HADOOP_USER_LIBS, null));
+    }
+
+    /**
+     * Parse user libs.
+     *
+     * @param str String.
+     * @return Result.
+     * @throws IOException If failed.
+     */
+    static Collection<SearchDirectory> parseUserLibs(String str) throws IOException {
+        Collection<SearchDirectory> res = new LinkedList<>();
+
+        if (!isEmpty(str)) {
+            String[] tokens = normalize(str).split(File.pathSeparator);
+
+            for (String token : tokens) {
+                // Skip empty tokens.
+                if (isEmpty(token))
+                    continue;
+
+                File file = new File(token);
+                File dir = file.getParentFile();
+
+                if (token.endsWith("*")) {
+                    assert dir != null;
+
+                    res.add(new SearchDirectory(dir, AcceptAllDirectoryFilter.INSTANCE, false));
+                }
+                else {
+                    // Met "/" or "C:\" pattern, nothing to do with it.
+                    if (dir == null)
+                        continue;
+
+                    res.add(new SearchDirectory(dir, new ExactDirectoryFilter(file.getName()), false));
+                }
+            }
+        }
 
         return res;
     }
@@ -239,57 +300,162 @@ public class HadoopClasspathUtils {
     }
 
     /**
+     * NOramlize the string.
+     *
+     * @param str String.
+     * @return Normalized string.
+     */
+    private static String normalize(String str) {
+        assert str != null;
+
+        return str.trim().toLowerCase();
+    }
+
+    /**
      * Simple pair-like structure to hold directory name and a mask assigned to it.
      */
-    private static class SearchDirectory {
+    static class SearchDirectory {
         /** File. */
         private final File dir;
 
-        /** The mask. */
-        private final String filter;
+        /** Filter. */
+        private final DirectoryFilter filter;
+
+        /** Whether directory must exist. */
+        private final boolean strict;
+
+        /**
+         * Constructor for directory search with strict rule.
+         *
+         * @param dir Directory.
+         * @param filter Filter.
+         * @throws IOException If failed.
+         */
+        private SearchDirectory(File dir, DirectoryFilter filter) throws IOException {
+            this(dir, filter, true);
+        }
 
         /**
          * Constructor.
          *
          * @param dir Directory.
          * @param filter Filter.
+         * @param strict Whether directory must exist.
+         * @throws IOException If failed.
          */
-        private SearchDirectory(File dir, String filter) throws IOException {
+        private SearchDirectory(File dir, DirectoryFilter filter, boolean strict) throws IOException {
             this.dir = dir;
             this.filter = filter;
+            this.strict = strict;
 
-            if (!exists(dir.getAbsolutePath()))
+            if (strict && !exists(dir.getAbsolutePath()))
                 throw new IOException("Directory cannot be read: " + dir.getAbsolutePath());
         }
 
         /**
          * @return Absolute path.
          */
-        private String absolutePath() {
+        String absolutePath() {
             return dir.getAbsolutePath();
         }
 
         /**
          * @return Child files.
          */
-        private File[] files() throws IOException {
+        File[] files() throws IOException {
             File[] files = dir.listFiles(new FilenameFilter() {
                 @Override public boolean accept(File dir, String name) {
-                    return filter == null || name.startsWith(filter);
+                    return filter.test(name);
                 }
             });
 
-            if (files == null)
-                throw new IOException("Path is not a directory. [dir=" + dir + ']');
+            if (files == null) {
+                if (strict)
+                    throw new IOException("Failed to get directory files [dir=" + dir + ']');
+                else
+                    return new File[0];
+            }
+            else
+                return files;
+        }
 
-            return files;
+        /**
+         * @return {@code True} if wildcard can be used.
+         */
+        boolean useWildcard() {
+            return filter instanceof AcceptAllDirectoryFilter;
         }
+    }
 
+    /**
+     * Directory filter interface.
+     */
+    static interface DirectoryFilter {
         /**
-         * @return {@code True} if filter exists.
+         * Test if file with this name should be included.
+         *
+         * @param name File name.
+         * @return {@code True} if passed.
          */
-        private boolean hasFilter() {
-            return filter != null;
+        public boolean test(String name);
+    }
+
+    /**
+     * Filter to accept all files.
+     */
+    static class AcceptAllDirectoryFilter implements DirectoryFilter {
+        /** Singleton instance. */
+        public static final AcceptAllDirectoryFilter INSTANCE = new AcceptAllDirectoryFilter();
+
+        /** {@inheritDoc} */
+        @Override public boolean test(String name) {
+            return true;
+        }
+    }
+
+    /**
+     * Filter which uses prefix to filter files.
+     */
+    static class PrefixDirectoryFilter implements DirectoryFilter {
+        /** Prefix. */
+        private final String prefix;
+
+        /**
+         * Constructor.
+         *
+         * @param prefix Prefix.
+         */
+        public PrefixDirectoryFilter(String prefix) {
+            assert prefix != null;
+
+            this.prefix = normalize(prefix);
+        }
+
+        /** {@inheritDoc} */
+        @Override public boolean test(String name) {
+            return normalize(name).startsWith(prefix);
+        }
+    }
+
+    /**
+     * Filter which uses exact comparison.
+     */
+    static class ExactDirectoryFilter implements DirectoryFilter {
+        /** Name. */
+        private final String name;
+
+        /**
+         * Constructor.
+         *
+         * @param name Name.
+         */
+        public ExactDirectoryFilter(String name) {
+            this.name = normalize(name);
+        }
+
+        /** {@inheritDoc} */
+        @Override public boolean test(String name) {
+            return normalize(name).equals(this.name);
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/0dc9713a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestUtils.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestUtils.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestUtils.java
index 9ebad78..da0d922 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestUtils.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopTestUtils.java
@@ -17,14 +17,18 @@
 
 package org.apache.ignite.internal.processors.hadoop;
 
+import org.apache.ignite.internal.util.typedef.F;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.jetbrains.annotations.Nullable;
+
 import java.io.BufferedReader;
+import java.io.File;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
-import org.apache.ignite.internal.util.typedef.F;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
@@ -33,6 +37,41 @@ import static org.junit.Assert.assertTrue;
  * Utility class for tests.
  */
 public class HadoopTestUtils {
+    /** Base test directory. */
+    private static final File BASE_TEST_DIR = new File(U.getIgniteHome() + "/work/test/hadoop/");
+
+    /**
+     * @return Base directory for tests.
+     */
+    public static File baseTestDir() {
+        return BASE_TEST_DIR;
+    }
+
+    /**
+     * Get test directory.
+     *
+     * @param parts Parts.
+     * @return Directory.
+     */
+    public static File testDir(String... parts) {
+        File res = BASE_TEST_DIR;
+
+        if (parts != null) {
+            for (String part : parts)
+                res = new File(res, part);
+        }
+
+        return res;
+    }
+
+    /**
+     * Clear base test directory.
+     */
+    public static void clearBaseTestDir() {
+        if (baseTestDir().exists())
+            assert delete(baseTestDir());
+    }
+
     /**
      * Checks that job statistics file contains valid strings only.
      *
@@ -40,6 +79,7 @@ public class HadoopTestUtils {
      * @return Amount of events.
      * @throws IOException If failed.
      */
+    @SuppressWarnings("ResultOfMethodCallIgnored")
     public static long simpleCheckJobStatFile(BufferedReader reader) throws IOException {
         Collection<String> phases = new HashSet<>();
 
@@ -104,4 +144,35 @@ public class HadoopTestUtils {
 
         return evtCnt;
     }
+
+    /**
+     * Deletes file or directory with all sub-directories and files.
+     *
+     * @param file File or directory to delete.
+     * @return {@code true} if and only if the file or directory is successfully deleted,
+     *      {@code false} otherwise
+     */
+    public static boolean delete(@Nullable File file) {
+        if (file == null)
+            return false;
+
+        boolean res = true;
+
+        if (file.isDirectory()) {
+            File[] files = file.listFiles();
+
+            if (files != null && files.length > 0)
+                for (File file1 : files)
+                    if (file1.isDirectory())
+                        res &= delete(file1);
+                    else
+                        res &= file1.delete();
+
+            res &= file.delete();
+        }
+        else
+            res = file.delete();
+
+        return res;
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/0dc9713a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopUserLibsSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopUserLibsSelfTest.java b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopUserLibsSelfTest.java
new file mode 100644
index 0000000..9e3c8f4
--- /dev/null
+++ b/modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopUserLibsSelfTest.java
@@ -0,0 +1,260 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.hadoop;
+
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+
+/**
+ * Tests for user libs parsing.
+ */
+public class HadoopUserLibsSelfTest extends GridCommonAbstractTest {
+    /** Directory 1. */
+    private static final File DIR_1 = HadoopTestUtils.testDir("dir1");
+
+    /** File 1 in directory 1. */
+    private static final File FILE_1_1 = new File(DIR_1, "file1.jar");
+
+    /** File 2 in directory 1. */
+    private static final File FILE_1_2 = new File(DIR_1, "file2.jar");
+
+    /** Directory 2. */
+    private static final File DIR_2 = HadoopTestUtils.testDir("dir2");
+
+    /** File 1 in directory 2. */
+    private static final File FILE_2_1 = new File(DIR_2, "file1.jar");
+
+    /** File 2 in directory 2. */
+    private static final File FILE_2_2 = new File(DIR_2, "file2.jar");
+
+    /** Missing directory. */
+    private static final File MISSING_DIR = HadoopTestUtils.testDir("missing_dir");
+
+    /** Missing file. */
+    private static final File MISSING_FILE = new File(MISSING_DIR, "file.jar");
+
+    /** {@inheritDoc} */
+    @Override protected void beforeTestsStarted() throws Exception {
+        HadoopTestUtils.clearBaseTestDir();
+
+        assert DIR_1.mkdirs();
+        assert DIR_2.mkdirs();
+
+        assert FILE_1_1.createNewFile();
+        assert FILE_1_2.createNewFile();
+        assert FILE_2_1.createNewFile();
+        assert FILE_2_2.createNewFile();
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void beforeTest() throws Exception {
+        // Sanity checks before test start.
+        ensureExists(FILE_1_1);
+        ensureExists(FILE_1_2);
+        ensureExists(FILE_2_1);
+        ensureExists(FILE_2_2);
+
+        ensureNotExists(MISSING_DIR);
+        ensureNotExists(MISSING_FILE);
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void afterTestsStopped() throws Exception {
+        HadoopTestUtils.clearBaseTestDir();
+    }
+
+    /**
+     * Test null or empty user libs.
+     *
+     * @throws Exception If failed.
+     */
+    public void testNullOrEmptyUserLibs() throws Exception {
+        assert parse(null).isEmpty();
+        assert parse("").isEmpty();
+    }
+
+    /**
+     * Test single file.
+     *
+     * @throws Exception If failed.
+     */
+    public void testSingle() throws Exception {
+        Collection<File> res = parse(single(FILE_1_1));
+
+        assert res.size() == 1;
+        assert res.contains(FILE_1_1);
+
+        res = parse(single(MISSING_FILE));
+
+        assert res.size() == 0;
+    }
+
+    /**
+     * Test multiple files.
+     *
+     * @throws Exception If failed.
+     */
+    public void testMultiple() throws Exception {
+        Collection<File> res =
+            parse(merge(single(FILE_1_1), single(FILE_1_2), single(FILE_2_1), single(FILE_2_2), single(MISSING_FILE)));
+
+        assert res.size() == 4;
+        assert res.contains(FILE_1_1);
+        assert res.contains(FILE_1_2);
+        assert res.contains(FILE_2_1);
+        assert res.contains(FILE_2_2);
+    }
+
+    /**
+     * Test single wildcard.
+     *
+     * @throws Exception If failed.
+     */
+    public void testSingleWildcard() throws Exception {
+        Collection<File> res = parse(wildcard(DIR_1));
+
+        assert res.size() == 2;
+        assert res.contains(FILE_1_1);
+        assert res.contains(FILE_1_2);
+
+        res = parse(wildcard(MISSING_DIR));
+
+        assert res.size() == 0;
+    }
+
+    /**
+     * Test multiple wildcards.
+     *
+     * @throws Exception If failed.
+     */
+    public void testMultipleWildcards() throws Exception {
+        Collection<File> res = parse(merge(wildcard(DIR_1), wildcard(DIR_2), wildcard(MISSING_DIR)));
+
+        assert res.size() == 4;
+        assert res.contains(FILE_1_1);
+        assert res.contains(FILE_1_2);
+        assert res.contains(FILE_2_1);
+        assert res.contains(FILE_2_2);
+    }
+
+    /**
+     * Test mixed tokens.
+     *
+     * @throws Exception If failed.
+     */
+    public void testMixed() throws Exception {
+        String str = merge(
+            single(FILE_1_1),
+            wildcard(DIR_2),
+            single(MISSING_FILE),
+            wildcard(MISSING_DIR)
+        );
+
+        Collection<File> res = parse(str);
+
+        assert res.size() == 3;
+        assert res.contains(FILE_1_1);
+        assert res.contains(FILE_2_1);
+        assert res.contains(FILE_2_2);
+    }
+    /**
+     * Ensure provided file exists.
+     *
+     * @param file File.
+     */
+    private static void ensureExists(File file) {
+        assert file.exists();
+    }
+
+    /**
+     * Ensure provided file doesn't exist.
+     *
+     * @param file File.
+     */
+    private static void ensureNotExists(File file) {
+        assert !file.exists();
+    }
+
+    /**
+     * Merge string using path separator.
+     *
+     * @param vals Values.
+     * @return Result.
+     */
+    private static String merge(String... vals) {
+        StringBuilder res = new StringBuilder();
+
+        if (vals != null) {
+            boolean first = true;
+
+            for (String val : vals) {
+                if (first)
+                    first = false;
+                else
+                    res.append(File.pathSeparatorChar);
+
+                res.append(val);
+            }
+        }
+
+        return res.toString();
+    }
+
+    /**
+     * Parse string.
+     *
+     * @param str String.
+     * @return Files.
+     * @throws IOException If failed.
+     */
+    Collection<File> parse(String str) throws IOException {
+        Collection<HadoopClasspathUtils.SearchDirectory> dirs = HadoopClasspathUtils.parseUserLibs(str);
+
+        Collection<File> res = new HashSet<>();
+
+        for (HadoopClasspathUtils.SearchDirectory dir : dirs)
+            Collections.addAll(res, dir.files());
+
+        return res;
+    }
+
+    /**
+     * Get absolute path to a single file.
+     *
+     * @param file File.
+     * @return Path.
+     */
+    private static String single(File file) {
+        return file.getAbsolutePath();
+    }
+
+    /**
+     * Create a wildcard.
+     *
+     * @param file File.
+     * @return Wildcard.
+     */
+    private static String wildcard(File file) {
+        return file.getAbsolutePath() + File.separatorChar + "*";
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/0dc9713a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java
index 3374547..603fd5b 100644
--- a/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java
+++ b/modules/hadoop/src/test/java/org/apache/ignite/testsuites/IgniteHadoopTestSuite.java
@@ -68,6 +68,7 @@ import org.apache.ignite.internal.processors.hadoop.HadoopSplitWrapperSelfTest;
 import org.apache.ignite.internal.processors.hadoop.HadoopTaskExecutionSelfTest;
 import org.apache.ignite.internal.processors.hadoop.HadoopTasksV1Test;
 import org.apache.ignite.internal.processors.hadoop.HadoopTasksV2Test;
+import org.apache.ignite.internal.processors.hadoop.HadoopUserLibsSelfTest;
 import org.apache.ignite.internal.processors.hadoop.HadoopV2JobSelfTest;
 import org.apache.ignite.internal.processors.hadoop.HadoopValidationSelfTest;
 import org.apache.ignite.internal.processors.hadoop.HadoopWeightedMapReducePlannerTest;
@@ -110,6 +111,8 @@ public class IgniteHadoopTestSuite extends TestSuite {
 
         TestSuite suite = new TestSuite("Ignite Hadoop MR Test Suite");
 
+        suite.addTest(new TestSuite(ldr.loadClass(HadoopUserLibsSelfTest.class.getName())));
+
         suite.addTest(new TestSuite(ldr.loadClass(HadoopDefaultMapReducePlannerSelfTest.class.getName())));
         suite.addTest(new TestSuite(ldr.loadClass(HadoopWeightedMapReducePlannerTest.class.getName())));