You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by an...@apache.org on 2014/03/19 20:06:38 UTC

svn commit: r1579354 - /pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java

Author: aniket486
Date: Wed Mar 19 19:06:38 2014
New Revision: 1579354

URL: http://svn.apache.org/r1579354
Log:
PIG-3815: Hadoop bug causes to pig to fail silently with jar cache- minor refactor

Modified:
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java?rev=1579354&r1=1579353&r2=1579354&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java Wed Mar 19 19:06:38 2014
@@ -1632,25 +1632,16 @@ public class JobControlCompiler{
             String checksum = DigestUtils.shaHex(url.openStream());
             FileSystem fs = FileSystem.get(conf);
             Path cacheDir = new Path(stagingDir, checksum);
-            FileStatus [] statuses = fs.listStatus(cacheDir);
-            if (statuses != null) {
-                for (FileStatus stat : statuses) {
-                    Path jarPath = stat.getPath();
-                    if(jarPath.getName().equals(filename)) {
-                        log.info("Found " + url + " in jar cache at "+ stagingDir);
-                        long curTime = System.currentTimeMillis();
-                        fs.setTimes(jarPath, -1, curTime);
-                        // PIG-3815 In hadoop 0.20, addFileToClassPath uses : as separator
-                        // jarPath has full uri at this point, we need to remove hdfs://nn:port
-                        // part to avoid parsing errors on backend
-                        return new Path(jarPath.toUri().getPath());
-                    }
-                }
+            Path cacheFile = new Path(cacheDir, filename);
+            if (fs.exists(cacheFile)) {
+               log.info("Found " + url + " in jar cache at "+ stagingDir);
+               long curTime = System.currentTimeMillis();
+               fs.setTimes(cacheFile, -1, curTime);
+               return cacheFile;
             }
             log.info("Url "+ url + " was not found in jarcache at "+ stagingDir);
             // attempt to copy to cache else return null
             fs.mkdirs(cacheDir, FileLocalizer.OWNER_ONLY_PERMS);
-            Path cacheFile = new Path(cacheDir, filename);
             OutputStream os = null;
             InputStream is = null;
             try {