You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by ga...@apache.org on 2008/03/04 22:20:55 UTC

svn commit: r633652 - in /incubator/pig/trunk: CHANGES.txt src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java

Author: gates
Date: Tue Mar  4 13:20:52 2008
New Revision: 633652

URL: http://svn.apache.org/viewvc?rev=633652&view=rev
Log:
PIG-120:  Support map reduce in local mode.  To do this user needs to specify execution type as mapreduce and cluster name as local. 

Modified:
    incubator/pig/trunk/CHANGES.txt
    incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java

Modified: incubator/pig/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/CHANGES.txt?rev=633652&r1=633651&r2=633652&view=diff
==============================================================================
--- incubator/pig/trunk/CHANGES.txt (original)
+++ incubator/pig/trunk/CHANGES.txt Tue Mar  4 13:20:52 2008
@@ -149,3 +149,7 @@
     PIG-13: adding version to the system (joa23 via olgan)
 
 	PIG-113:  Make explain output more understandable (pi_song via gates)
+
+	PIG-120:  Support map reduce in local mode.  To do this user needs to
+	specify execution type as mapreduce and cluster name as local (joa23 via
+	gates).

Modified: incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java?rev=633652&r1=633651&r2=633652&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java Tue Mar  4 13:20:52 2008
@@ -110,6 +110,8 @@
         setSSHFactory();
         
         String hodServer = System.getProperty("hod.server");
+        String cluster = System.getProperty("cluster");
+        String nameNode = System.getProperty("namenode");
     
         if (hodServer != null && hodServer.length() > 0) {
             String hdfsAndMapred[] = doHod(hodServer);
@@ -117,17 +119,15 @@
             setJobtrackerLocation(hdfsAndMapred[1]);
         }
         else {
-            String cluster = System.getProperty("cluster");
             if (cluster != null && cluster.length() > 0) {
-                if(cluster.indexOf(':') < 0) {
+                if(cluster.indexOf(':') < 0 && !cluster.equalsIgnoreCase("local")) {
                     cluster = cluster + ":50020";
                 }
                 setJobtrackerLocation(cluster);
             }
 
-            String nameNode = System.getProperty("namenode");
             if (nameNode!=null && nameNode.length() > 0) {
-                if(nameNode.indexOf(':') < 0) {
+                if(nameNode.indexOf(':') < 0 && !nameNode.equalsIgnoreCase("local")) {
                     nameNode = nameNode + ":8020";
                 }
                 setFilesystemLocation(nameNode);
@@ -143,16 +143,18 @@
             throw new ExecException("Failed to create DataStorage", e);
         }
             
-        log.info("Connecting to map-reduce job tracker at: " + conf.get("mapred.job.tracker"));
-        
-        try {
-            jobTracker = (JobSubmissionProtocol) RPC.getProxy(JobSubmissionProtocol.class,
-                                                              JobSubmissionProtocol.versionID, 
-                                                              JobTracker.getAddress(conf.getConfiguration()),
-                                                              conf.getConfiguration());
-        }
-        catch (IOException e) {
-            throw new ExecException("Failed to crate job tracker", e);
+        if(cluster != null && !cluster.equalsIgnoreCase("local")){
+	        log.info("Connecting to map-reduce job tracker at: " + conf.get("mapred.job.tracker"));
+	        
+	        try {
+	            jobTracker = (JobSubmissionProtocol) RPC.getProxy(JobSubmissionProtocol.class,
+	                                                              JobSubmissionProtocol.versionID, 
+	                                                              JobTracker.getAddress(conf.getConfiguration()),
+	                                                              conf.getConfiguration());
+	        }
+	        catch (IOException e) {
+	            throw new ExecException("Failed to crate job tracker", e);
+	        }
         }
 
         try {