You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ignite.apache.org by vo...@apache.org on 2016/01/04 08:25:45 UTC

[4/9] ignite git commit: IGNITE-2308: Fixing styling.

IGNITE-2308: Fixing styling.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/dda6b27c
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/dda6b27c
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/dda6b27c

Branch: refs/heads/ignite-2308
Commit: dda6b27c86a3e8998bba4a37ff8fd0bf455b4c56
Parents: 67c1e0f
Author: vozerov-gridgain <vo...@gridgain.com>
Authored: Mon Jan 4 09:47:12 2016 +0400
Committer: vozerov-gridgain <vo...@gridgain.com>
Committed: Mon Jan 4 09:47:12 2016 +0400

----------------------------------------------------------------------
 .../hadoop/fs/v1/IgniteHadoopFileSystem.java    |   1 -
 .../processors/hadoop/HadoopClassLoader.java    | 468 +++++++++----------
 2 files changed, 215 insertions(+), 254 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/dda6b27c/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/v1/IgniteHadoopFileSystem.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/v1/IgniteHadoopFileSystem.java b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/v1/IgniteHadoopFileSystem.java
index a72452b..5dce67f 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/v1/IgniteHadoopFileSystem.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/hadoop/fs/v1/IgniteHadoopFileSystem.java
@@ -328,7 +328,6 @@ public class IgniteHadoopFileSystem extends FileSystem {
 
             if (initSecondary) {
                 Map<String, String> props = paths.properties();
-                //Object payload0 = paths.getPayload(getClass().getClassLoader());
 
                 String secUri = props.get(SECONDARY_FS_URI);
                 String secConfPath = props.get(SECONDARY_FS_CONFIG_PATH);

http://git-wip-us.apache.org/repos/asf/ignite/blob/dda6b27c/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
index 89a55e4..a2c9df4 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
@@ -128,14 +128,14 @@ public class HadoopClassLoader extends URLClassLoader {
      * @return {@code true} if we need to check this class.
      */
     private static boolean isHadoopIgfs(String cls) {
-        String ignitePackPrefix = "org.apache.ignite";
+        String ignitePkgPrefix = "org.apache.ignite";
 
-        int len = ignitePackPrefix.length();
+        int len = ignitePkgPrefix.length();
 
-        return cls.startsWith(ignitePackPrefix)
-            && (cls.indexOf("igfs.", len) != -1
-                || cls.indexOf(".fs.", len) != -1
-                || cls.indexOf("hadoop.", len) != -1);
+        return cls.startsWith(ignitePkgPrefix) && (
+            cls.indexOf("igfs.", len) != -1 ||
+            cls.indexOf(".fs.", len) != -1 ||
+            cls.indexOf("hadoop.", len) != -1);
     }
 
     /**
@@ -276,7 +276,7 @@ public class HadoopClassLoader extends URLClassLoader {
      * @param clsName Class name.
      * @return {@code true} If the class has external dependencies.
      */
-    boolean hasExternalDependencies(final String clsName, final Set<String> visited) {
+    boolean hasExternalDependencies(String clsName, Set<String> visited) {
         if (isHadoop(clsName)) // Hadoop must not be in classpath but Idea sucks, so filtering explicitly as external.
             return true;
 
@@ -304,13 +304,10 @@ public class HadoopClassLoader extends URLClassLoader {
 
         Collector c = new Collector(hasDeps, visited);
 
-        AnnotationVisitor annotationVisitor = new CollectingAnnotationVisitor(c);
-
-        MethodVisitor methVisitor = new CollectingMethodVisitor(c, annotationVisitor);
-
-        FieldVisitor fieldVisitor = new CollectingFieldVisitor(c, annotationVisitor);
-
-        ClassVisitor clsVisitor = new CollectingClassVisitor(c, annotationVisitor, methVisitor, fieldVisitor);
+        AnnotationVisitor annVisitor = new CollectingAnnotationVisitor(c);
+        MethodVisitor mthdVisitor = new CollectingMethodVisitor(c, annVisitor);
+        FieldVisitor fldVisitor = new CollectingFieldVisitor(c, annVisitor);
+        ClassVisitor clsVisitor = new CollectingClassVisitor(c, annVisitor, mthdVisitor, fldVisitor);
 
         rdr.accept(clsVisitor, 0);
 
@@ -337,27 +334,186 @@ public class HadoopClassLoader extends URLClassLoader {
     }
 
     /**
+     * @param name Class name.
+     * @return {@code true} If this is a valid class name.
+     */
+    private static boolean validateClassName(String name) {
+        int len = name.length();
+
+        if (len <= 1)
+            return false;
+
+        if (!Character.isJavaIdentifierStart(name.charAt(0)))
+            return false;
+
+        boolean hasDot = false;
+
+        for (int i = 1; i < len; i++) {
+            char c = name.charAt(i);
+
+            if (c == '.')
+                hasDot = true;
+            else if (!Character.isJavaIdentifierPart(c))
+                return false;
+        }
+
+        return hasDot;
+    }
+
+    /**
+     * @param name Variable name.
+     * @param dflt Default.
+     * @return Value.
+     */
+    private static String getEnv(String name, String dflt) {
+        String res = System.getProperty(name);
+
+        if (F.isEmpty(res))
+            res = System.getenv(name);
+
+        return F.isEmpty(res) ? dflt : res;
+    }
+
+    /**
+     * @param res Result.
+     * @param dir Directory.
+     * @param startsWith Starts with prefix.
+     * @throws MalformedURLException If failed.
+     */
+    private static void addUrls(Collection<URL> res, File dir, final String startsWith) throws Exception {
+        File[] files = dir.listFiles(new FilenameFilter() {
+            @Override public boolean accept(File dir, String name) {
+                return startsWith == null || name.startsWith(startsWith);
+            }
+        });
+
+        if (files == null)
+            throw new IOException("Path is not a directory: " + dir);
+
+        for (File file : files)
+            res.add(file.toURI().toURL());
+    }
+
+    /**
+     * @param urls URLs.
+     * @return URLs.
+     */
+    private static URL[] addHadoopUrls(URL[] urls) {
+        Collection<URL> hadoopJars;
+
+        try {
+            hadoopJars = hadoopUrls();
+        }
+        catch (IgniteCheckedException e) {
+            throw new RuntimeException(e);
+        }
+
+        ArrayList<URL> list = new ArrayList<>(hadoopJars.size() + appJars.size() + (urls == null ? 0 : urls.length));
+
+        list.addAll(appJars);
+        list.addAll(hadoopJars);
+
+        if (!F.isEmpty(urls))
+            list.addAll(F.asList(urls));
+
+        return list.toArray(new URL[list.size()]);
+    }
+
+    /**
+     * @return HADOOP_HOME Variable.
+     */
+    @Nullable public static String hadoopHome() {
+        return getEnv("HADOOP_PREFIX", getEnv("HADOOP_HOME", null));
+    }
+
+    /**
+     * @return Collection of jar URLs.
+     * @throws IgniteCheckedException If failed.
+     */
+    public static Collection<URL> hadoopUrls() throws IgniteCheckedException {
+        Collection<URL> hadoopUrls = hadoopJars;
+
+        if (hadoopUrls != null)
+            return hadoopUrls;
+
+        synchronized (HadoopClassLoader.class) {
+            hadoopUrls = hadoopJars;
+
+            if (hadoopUrls != null)
+                return hadoopUrls;
+
+            hadoopUrls = new ArrayList<>();
+
+            String hadoopPrefix = hadoopHome();
+
+            if (F.isEmpty(hadoopPrefix))
+                throw new IgniteCheckedException("Failed resolve Hadoop installation location. Either HADOOP_PREFIX or " +
+                    "HADOOP_HOME environment variables must be set.");
+
+            String commonHome = getEnv("HADOOP_COMMON_HOME", hadoopPrefix + "/share/hadoop/common");
+            String hdfsHome = getEnv("HADOOP_HDFS_HOME", hadoopPrefix + "/share/hadoop/hdfs");
+            String mapredHome = getEnv("HADOOP_MAPRED_HOME", hadoopPrefix + "/share/hadoop/mapreduce");
+
+            try {
+                addUrls(hadoopUrls, new File(commonHome + "/lib"), null);
+                addUrls(hadoopUrls, new File(hdfsHome + "/lib"), null);
+                addUrls(hadoopUrls, new File(mapredHome + "/lib"), null);
+
+                addUrls(hadoopUrls, new File(hdfsHome), "hadoop-hdfs-");
+
+                addUrls(hadoopUrls, new File(commonHome), "hadoop-common-");
+                addUrls(hadoopUrls, new File(commonHome), "hadoop-auth-");
+                addUrls(hadoopUrls, new File(commonHome + "/lib"), "hadoop-auth-");
+
+                addUrls(hadoopUrls, new File(mapredHome), "hadoop-mapreduce-client-common");
+                addUrls(hadoopUrls, new File(mapredHome), "hadoop-mapreduce-client-core");
+            }
+            catch (Exception e) {
+                throw new IgniteCheckedException(e);
+            }
+
+            hadoopJars = hadoopUrls;
+
+            return hadoopUrls;
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override public String toString() {
+        return S.toString(HadoopClassLoader.class, this);
+    }
+
+    /**
+     * Getter for name field.
+     */
+    public String name() {
+        return name;
+    }
+
+    /**
      * Implement business logic of the dependency analysis.
      * Keeps the necessary state.
      */
     private class Collector {
         /** Attribute gets 'true' if the positive answer is found. */
-        final AtomicBoolean hasDeps;
+        private final AtomicBoolean hasDeps;
 
         /** Collection of visited class names to prevent infinite loops in case of
          * circular dependencies. */
-        final Set<String> visited;
+        private final Set<String> visited;
 
         /**
          * Constructor.
          *
          * @param hasDeps has dependencies initial value.
-         * @param visitedSet visited set initial value.
+         * @param visited visited set initial value.
          */
-        Collector(AtomicBoolean hasDeps, Set<String> visitedSet) {
-            this.hasDeps = hasDeps;
+        Collector(AtomicBoolean hasDeps, Set<String> visited) {
+            assert hasDeps != null;
+            assert visited != null;
 
-            this.visited = visitedSet;
+            this.hasDeps = hasDeps;
+            this.visited = visited;
         }
 
         /**
@@ -407,8 +563,7 @@ public class HadoopClassLoader extends URLClassLoader {
 
             Boolean res = cache.get(depCls);
 
-            if (res == Boolean.TRUE
-                || (res == null && hasExternalDependencies(depCls, visited)))
+            if (res == Boolean.TRUE || (res == null && hasExternalDependencies(depCls, visited)))
                 hasDeps.set(true);
         }
 
@@ -479,7 +634,7 @@ public class HadoopClassLoader extends URLClassLoader {
     /**
      * Annotation visitor.
      */
-    static class CollectingAnnotationVisitor extends AnnotationVisitor {
+    private static class CollectingAnnotationVisitor extends AnnotationVisitor {
         /** */
         final Collector c;
 
@@ -513,29 +668,32 @@ public class HadoopClassLoader extends URLClassLoader {
 
         /** {@inheritDoc} */
         @Override public AnnotationVisitor visitArray(String name) {
-            return this;
+            return c.isDone() ? null : this;
         }
 
         /** {@inheritDoc} */
         @Override public void visit(String name, Object val) {
+            if (c.isDone())
+                return;
+
             if (val instanceof Type)
                 c.onType((Type)val);
         }
 
         /** {@inheritDoc} */
         @Override public void visitEnd() {
-            // noop
+            // No-op.
         }
     }
 
     /**
      * Field visitor.
      */
-    static class CollectingFieldVisitor extends FieldVisitor {
-        /** */
+    private static class CollectingFieldVisitor extends FieldVisitor {
+        /** Collector. */
         private final Collector c;
 
-        /** */
+        /** Annotation visitor. */
         private final AnnotationVisitor av;
 
         /**
@@ -545,7 +703,6 @@ public class HadoopClassLoader extends URLClassLoader {
             super(Opcodes.ASM4);
 
             this.c = c;
-
             this.av = av;
         }
 
@@ -556,38 +713,43 @@ public class HadoopClassLoader extends URLClassLoader {
 
             c.onType(desc);
 
-            return av;
+            return c.isDone() ? null : av;
         }
 
         /** {@inheritDoc} */
         @Override public void visitAttribute(Attribute attr) {
-            // noop
+            // No-op.
         }
 
         /** {@inheritDoc} */
         @Override public void visitEnd() {
-            // noop
+            // No-op.
         }
     }
 
     /**
      * Class visitor.
      */
-    static class CollectingClassVisitor extends ClassVisitor {
-        /** */
+    private static class CollectingClassVisitor extends ClassVisitor {
+        /** Collector. */
         private final Collector c;
 
-        /** */
+        /** Annotation visitor. */
         private final AnnotationVisitor av;
 
-        /** */
+        /** Method visitor. */
         private final MethodVisitor mv;
 
-        /** */
+        /** Field visitor. */
         private final FieldVisitor fv;
 
         /**
          * Constructor.
+         *
+         * @param c Collector.
+         * @param av Annotation visitor.
+         * @param mv Method visitor.
+         * @param fv Field visitor.
          */
         CollectingClassVisitor(Collector c, AnnotationVisitor av, MethodVisitor mv, FieldVisitor fv) {
             super(Opcodes.ASM4);
@@ -599,8 +761,7 @@ public class HadoopClassLoader extends URLClassLoader {
         }
 
         /** {@inheritDoc} */
-        @Override public void visit(int i, int i2, String name, String signature, String superName,
-            String[] ifaces) {
+        @Override public void visit(int i, int i2, String name, String signature, String superName, String[] ifaces) {
             if (c.isDone())
                 return;
 
@@ -626,7 +787,7 @@ public class HadoopClassLoader extends URLClassLoader {
 
             c.onType(desc);
 
-            return av;
+            return c.isDone() ? null : av;
         }
 
         /** {@inheritDoc} */
@@ -644,7 +805,7 @@ public class HadoopClassLoader extends URLClassLoader {
 
             c.onType(desc);
 
-            return fv;
+            return c.isDone() ? null : fv;
         }
 
         /** {@inheritDoc} */
@@ -661,27 +822,27 @@ public class HadoopClassLoader extends URLClassLoader {
                     c.onInternalTypeName(e);
             }
 
-            return mv;
+            return c.isDone() ? null : mv;
         }
     }
 
     /**
      * Method visitor.
      */
-    static class CollectingMethodVisitor extends MethodVisitor {
-        /** */
+    private static class CollectingMethodVisitor extends MethodVisitor {
+        /** Collector. */
         private final Collector c;
 
-        /** */
+        /** Annotation visitor. */
         private final AnnotationVisitor av;
 
         /**
          * Constructor.
          *
-         * @param c The collector.
-         * @param av The annotation visitor.
+         * @param c Collector.
+         * @param av Annotation visitor.
          */
-        CollectingMethodVisitor(Collector c, AnnotationVisitor av) {
+        private CollectingMethodVisitor(Collector c, AnnotationVisitor av) {
             super(Opcodes.ASM4);
 
             this.c = c;
@@ -695,7 +856,7 @@ public class HadoopClassLoader extends URLClassLoader {
 
             c.onType(desc);
 
-            return av;
+            return c.isDone() ? null : av;
         }
 
         /** {@inheritDoc} */
@@ -705,12 +866,12 @@ public class HadoopClassLoader extends URLClassLoader {
 
             c.onType(desc);
 
-            return av;
+            return c.isDone() ? null : av;
         }
 
         /** {@inheritDoc} */
         @Override public AnnotationVisitor visitAnnotationDefault() {
-            return av;
+            return c.isDone() ? null : av;
         }
 
         /** {@inheritDoc} */
@@ -728,54 +889,12 @@ public class HadoopClassLoader extends URLClassLoader {
 
         /** {@inheritDoc} */
         @Override public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) {
-            if (c.isDone())
-                return;
-
-            c.onMethodsDesc(desc);
-
-            if (c.isDone())
-                return;
-
-            if (bsmArgs != null) {
-                for (Object b: bsmArgs) {
-                    if (b instanceof Type)
-                        c.onType((Type) b);
-
-                    if (c.isDone())
-                        return;
-                }
-            }
+            // No-op.
         }
 
         /** {@inheritDoc} */
-        @Override public void visitFrame(int type, int nLoc, Object[] locTypes, int nStack,
-        Object[] stackTypes) {
-            // TODO: not sure we need that to detect dependencies. This seems to be superfluous.
-            // Optimizations:
-            // 1) Do not consider frames that are exactly equal to the previous;
-            // 2) Return if local and stack arrays are empty ('nLock' and 'nStack' correspond to the number
-            // of non-null array elements).
-            if (type == Opcodes.F_SAME || (nLoc == 0 && nStack == 0))
-                return;
-
-            if (c.isDone())
-                return;
-
-            for (Object o : locTypes) {
-                if (o instanceof String)
-                    c.onInternalTypeName((String) o);
-
-                if (c.isDone())
-                    return;
-            }
-
-            for (Object o : stackTypes) {
-                if (o instanceof String)
-                    c.onInternalTypeName((String) o);
-
-                if (c.isDone())
-                    return;
-            }
+        @Override public void visitFrame(int type, int nLoc, Object[] locTypes, int nStack, Object[] stackTypes) {
+            // No-op.
         }
 
         /** {@inheritDoc} */
@@ -824,161 +943,4 @@ public class HadoopClassLoader extends URLClassLoader {
             c.onInternalTypeName(type);
         }
     }
-
-    /**
-     * @param name Class name.
-     * @return {@code true} If this is a valid class name.
-     */
-    private static boolean validateClassName(String name) {
-        int len = name.length();
-
-        if (len <= 1)
-            return false;
-
-        if (!Character.isJavaIdentifierStart(name.charAt(0)))
-            return false;
-
-        boolean hasDot = false;
-
-        for (int i = 1; i < len; i++) {
-            char c = name.charAt(i);
-
-            if (c == '.')
-                hasDot = true;
-            else if (!Character.isJavaIdentifierPart(c))
-                return false;
-        }
-
-        return hasDot;
-    }
-
-    /**
-     * @param name Variable name.
-     * @param dflt Default.
-     * @return Value.
-     */
-    private static String getEnv(String name, String dflt) {
-        String res = System.getProperty(name);
-
-        if (F.isEmpty(res))
-            res = System.getenv(name);
-
-        return F.isEmpty(res) ? dflt : res;
-    }
-
-    /**
-     * @param res Result.
-     * @param dir Directory.
-     * @param startsWith Starts with prefix.
-     * @throws MalformedURLException If failed.
-     */
-    private static void addUrls(Collection<URL> res, File dir, final String startsWith) throws Exception {
-        File[] files = dir.listFiles(new FilenameFilter() {
-            @Override public boolean accept(File dir, String name) {
-                return startsWith == null || name.startsWith(startsWith);
-            }
-        });
-
-        if (files == null)
-            throw new IOException("Path is not a directory: " + dir);
-
-        for (File file : files)
-            res.add(file.toURI().toURL());
-    }
-
-    /**
-     * @param urls URLs.
-     * @return URLs.
-     */
-    private static URL[] addHadoopUrls(URL[] urls) {
-        Collection<URL> hadoopJars;
-
-        try {
-            hadoopJars = hadoopUrls();
-        }
-        catch (IgniteCheckedException e) {
-            throw new RuntimeException(e);
-        }
-
-        ArrayList<URL> list = new ArrayList<>(hadoopJars.size() + appJars.size() + (urls == null ? 0 : urls.length));
-
-        list.addAll(appJars);
-        list.addAll(hadoopJars);
-
-        if (!F.isEmpty(urls))
-            list.addAll(F.asList(urls));
-
-        return list.toArray(new URL[list.size()]);
-    }
-
-    /**
-     * @return HADOOP_HOME Variable.
-     */
-    @Nullable public static String hadoopHome() {
-        return getEnv("HADOOP_PREFIX", getEnv("HADOOP_HOME", null));
-    }
-
-    /**
-     * @return Collection of jar URLs.
-     * @throws IgniteCheckedException If failed.
-     */
-    public static Collection<URL> hadoopUrls() throws IgniteCheckedException {
-        Collection<URL> hadoopUrls = hadoopJars;
-
-        if (hadoopUrls != null)
-            return hadoopUrls;
-
-        synchronized (HadoopClassLoader.class) {
-            hadoopUrls = hadoopJars;
-
-            if (hadoopUrls != null)
-                return hadoopUrls;
-
-            hadoopUrls = new ArrayList<>();
-
-            String hadoopPrefix = hadoopHome();
-
-            if (F.isEmpty(hadoopPrefix))
-                throw new IgniteCheckedException("Failed resolve Hadoop installation location. Either HADOOP_PREFIX or " +
-                    "HADOOP_HOME environment variables must be set.");
-
-            String commonHome = getEnv("HADOOP_COMMON_HOME", hadoopPrefix + "/share/hadoop/common");
-            String hdfsHome = getEnv("HADOOP_HDFS_HOME", hadoopPrefix + "/share/hadoop/hdfs");
-            String mapredHome = getEnv("HADOOP_MAPRED_HOME", hadoopPrefix + "/share/hadoop/mapreduce");
-
-            try {
-                addUrls(hadoopUrls, new File(commonHome + "/lib"), null);
-                addUrls(hadoopUrls, new File(hdfsHome + "/lib"), null);
-                addUrls(hadoopUrls, new File(mapredHome + "/lib"), null);
-
-                addUrls(hadoopUrls, new File(hdfsHome), "hadoop-hdfs-");
-
-                addUrls(hadoopUrls, new File(commonHome), "hadoop-common-");
-                addUrls(hadoopUrls, new File(commonHome), "hadoop-auth-");
-                addUrls(hadoopUrls, new File(commonHome + "/lib"), "hadoop-auth-");
-
-                addUrls(hadoopUrls, new File(mapredHome), "hadoop-mapreduce-client-common");
-                addUrls(hadoopUrls, new File(mapredHome), "hadoop-mapreduce-client-core");
-            }
-            catch (Exception e) {
-                throw new IgniteCheckedException(e);
-            }
-
-            hadoopJars = hadoopUrls;
-
-            return hadoopUrls;
-        }
-    }
-
-    /** {@inheritDoc} */
-    @Override public String toString() {
-        return S.toString(HadoopClassLoader.class, this);
-    }
-
-    /**
-     * Getter for name field.
-     */
-    public String name() {
-        return name;
-    }
 }
\ No newline at end of file