You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tuscany.apache.org by js...@apache.org on 2008/07/15 19:36:42 UTC

svn commit: r676988 - in /tuscany/sandbox/ctrezzo: ./ src/ src/org/ src/org/apache/ src/org/apache/hadoop/ src/org/apache/hadoop/myExamples/ src/services/

Author: jsdelfino
Date: Tue Jul 15 10:36:41 2008
New Revision: 676988

URL: http://svn.apache.org/viewvc?rev=676988&view=rev
Log:
Applied patch from JIRA TUSCANY-2471.

Added:
    tuscany/sandbox/ctrezzo/file01.txt
    tuscany/sandbox/ctrezzo/src/
    tuscany/sandbox/ctrezzo/src/Test.java
    tuscany/sandbox/ctrezzo/src/Test2.java
    tuscany/sandbox/ctrezzo/src/org/
    tuscany/sandbox/ctrezzo/src/org/apache/
    tuscany/sandbox/ctrezzo/src/org/apache/hadoop/
    tuscany/sandbox/ctrezzo/src/org/apache/hadoop/myExamples/
    tuscany/sandbox/ctrezzo/src/org/apache/hadoop/myExamples/WordCount.java
    tuscany/sandbox/ctrezzo/src/services/
    tuscany/sandbox/ctrezzo/src/services/WordCount.java
    tuscany/sandbox/ctrezzo/src/services/WordCountImpl.java

Added: tuscany/sandbox/ctrezzo/file01.txt
URL: http://svn.apache.org/viewvc/tuscany/sandbox/ctrezzo/file01.txt?rev=676988&view=auto
==============================================================================
--- tuscany/sandbox/ctrezzo/file01.txt (added)
+++ tuscany/sandbox/ctrezzo/file01.txt Tue Jul 15 10:36:41 2008
@@ -0,0 +1 @@
+Hello World Bye World
\ No newline at end of file

Added: tuscany/sandbox/ctrezzo/src/Test.java
URL: http://svn.apache.org/viewvc/tuscany/sandbox/ctrezzo/src/Test.java?rev=676988&view=auto
==============================================================================
--- tuscany/sandbox/ctrezzo/src/Test.java (added)
+++ tuscany/sandbox/ctrezzo/src/Test.java Tue Jul 15 10:36:41 2008
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.    
+ */
+
+import org.apache.hadoop.util.RunJar;
+
+public class Test {
+
+	/**
+	 * @param args
+	 * 
+	 * The following should be entered as parameters for the main method:
+	 * Jar File, Name of Main Class, Input File, Output directory
+	 * 
+	 * Example:
+	 * wordcount.jar org.apache.hadoop.examples.WordCount /file01.txt /output1
+	 * 
+	 * The jar file has to be in the same directory as the executable. The input/output paths
+	 * refer to either a local directory, or an HDFS directory, depending on which job tracker
+	 * is used.
+	 * 
+	 * Also, all Hadoop jar files must be on the class path, as well as the Hadoop conf directory.
+	 */
+	public static void main(String[] args) {
+		
+		try {
+			RunJar.main(args);
+		}
+		catch(Throwable e) {
+			System.out.println(e);
+		}
+		
+	}
+
+}

Added: tuscany/sandbox/ctrezzo/src/Test2.java
URL: http://svn.apache.org/viewvc/tuscany/sandbox/ctrezzo/src/Test2.java?rev=676988&view=auto
==============================================================================
--- tuscany/sandbox/ctrezzo/src/Test2.java (added)
+++ tuscany/sandbox/ctrezzo/src/Test2.java Tue Jul 15 10:36:41 2008
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.    
+ */
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+
+
+public class Test2 {
+	
+	/*
+	 * This submits a Map-Reduce job without using runJar, or the main method in WordCount.
+	 */
+	public static void main(String[] args) {
+		
+		//this path is a HDFS path
+	    Path inputPath = new Path("/file01.txt");
+	    //this path is a HDFS path
+	    Path outputPath = new Path("/output7");
+	    
+	    org.apache.hadoop.examples.WordCount myCount = new org.apache.hadoop.examples.WordCount();
+		Configuration conf = new Configuration();
+		
+		myCount.setConf(conf);
+		
+		JobConf mapredConf = new JobConf(myCount.getConf(), org.apache.hadoop.examples.WordCount.class);
+	    mapredConf.setJobName("wordcount");
+	 
+	    // the keys are words (strings)
+	    mapredConf.setOutputKeyClass(Text.class);
+	    // the values are counts (ints)
+	    mapredConf.setOutputValueClass(IntWritable.class);
+	    
+	    mapredConf.setMapperClass(org.apache.hadoop.examples.WordCount.MapClass.class);        
+	    mapredConf.setCombinerClass(org.apache.hadoop.examples.WordCount.Reduce.class);
+	    mapredConf.setReducerClass(org.apache.hadoop.examples.WordCount.Reduce.class);
+	    
+	    mapredConf.setInputPath(inputPath);
+	    mapredConf.setOutputPath(outputPath);
+	    
+	    try {
+	    	JobClient.runJob(mapredConf);
+	    }
+	    catch(Exception e) {
+	    	System.out.println("ERROR: " + e);
+	    }
+	}
+	
+}
+
+

Added: tuscany/sandbox/ctrezzo/src/org/apache/hadoop/myExamples/WordCount.java
URL: http://svn.apache.org/viewvc/tuscany/sandbox/ctrezzo/src/org/apache/hadoop/myExamples/WordCount.java?rev=676988&view=auto
==============================================================================
--- tuscany/sandbox/ctrezzo/src/org/apache/hadoop/myExamples/WordCount.java (added)
+++ tuscany/sandbox/ctrezzo/src/org/apache/hadoop/myExamples/WordCount.java Tue Jul 15 10:36:41 2008
@@ -0,0 +1,157 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.myExamples;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.StringTokenizer;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+
+/**
+ * This is an example Hadoop Map/Reduce application.
+ * It reads the text input files, breaks each line into words
+ * and counts them. The output is a locally sorted list of words and the 
+ * count of how often they occurred.
+ *
+ * To run: bin/hadoop jar build/hadoop-examples.jar wordcount
+ *            [-m <i>maps</i>] [-r <i>reduces</i>] <i>in-dir</i> <i>out-dir</i> 
+ */
+public class WordCount extends Configured implements Tool {
+  
+  /**
+   * Counts the words in each line.
+   * For each line of input, break the line into words and emit them as
+   * (<b>word</b>, <b>1</b>).
+   */
+  public static class MapClass extends MapReduceBase
+    implements Mapper<LongWritable, Text, Text, IntWritable> {
+    
+    private final static IntWritable one = new IntWritable(1);
+    private Text word = new Text();
+    
+    public void map(LongWritable key, Text value, 
+                    OutputCollector<Text, IntWritable> output, 
+                    Reporter reporter) throws IOException {
+      String line = value.toString();
+      StringTokenizer itr = new StringTokenizer(line);
+      while (itr.hasMoreTokens()) {
+        word.set(itr.nextToken());
+        output.collect(word, one);
+      }
+    }
+  }
+  
+  /**
+   * A reducer class that just emits the sum of the input values.
+   */
+  public static class Reduce extends MapReduceBase
+    implements Reducer<Text, IntWritable, Text, IntWritable> {
+    
+    public void reduce(Text key, Iterator<IntWritable> values,
+                       OutputCollector<Text, IntWritable> output, 
+                       Reporter reporter) throws IOException {
+      int sum = 0;
+      while (values.hasNext()) {
+        sum += values.next().get();
+      }
+      output.collect(key, new IntWritable(sum));
+    }
+  }
+  
+  static int printUsage() {
+    System.out.println("wordcount [-m <maps>] [-r <reduces>] <input> <output>");
+    ToolRunner.printGenericCommandUsage(System.out);
+    return -1;
+  }
+  
+  /**
+   * The main driver for word count map/reduce program.
+   * Invoke this method to submit the map/reduce job.
+   * @throws IOException When there is communication problems with the 
+   *                     job tracker.
+   */
+  public int run(String[] args) throws Exception {
+    JobConf conf = new JobConf(getConf(), WordCount.class);
+    conf.setJobName("wordcount");
+ 
+    // the keys are words (strings)
+    conf.setOutputKeyClass(Text.class);
+    // the values are counts (ints)
+    conf.setOutputValueClass(IntWritable.class);
+    
+    conf.setMapperClass(MapClass.class);        
+    conf.setCombinerClass(Reduce.class);
+    conf.setReducerClass(Reduce.class);
+    
+    List<String> other_args = new ArrayList<String>();
+    for(int i=0; i < args.length; ++i) {
+      try {
+        if ("-m".equals(args[i])) {
+          conf.setNumMapTasks(Integer.parseInt(args[++i]));
+        } else if ("-r".equals(args[i])) {
+          conf.setNumReduceTasks(Integer.parseInt(args[++i]));
+        } else {
+          other_args.add(args[i]);
+        }
+      } catch (NumberFormatException except) {
+        System.out.println("ERROR: Integer expected instead of " + args[i]);
+        return printUsage();
+      } catch (ArrayIndexOutOfBoundsException except) {
+        System.out.println("ERROR: Required parameter missing from " +
+                           args[i-1]);
+        return printUsage();
+      }
+    }
+    // Make sure there are exactly 2 parameters left.
+    if (other_args.size() != 2) {
+      System.out.println("ERROR: Wrong number of parameters: " +
+                         other_args.size() + " instead of 2.");
+      return printUsage();
+    }
+    conf.setInputPath(new Path(other_args.get(0)));
+    conf.setOutputPath(new Path(other_args.get(1)));
+        
+    JobClient.runJob(conf);
+    return 0;
+  }
+  
+  
+  public static void main(String[] args) throws Exception {
+    int res = ToolRunner.run(new Configuration(), new WordCount(), args);
+    System.exit(res);
+  }
+
+}

Added: tuscany/sandbox/ctrezzo/src/services/WordCount.java
URL: http://svn.apache.org/viewvc/tuscany/sandbox/ctrezzo/src/services/WordCount.java?rev=676988&view=auto
==============================================================================
--- tuscany/sandbox/ctrezzo/src/services/WordCount.java (added)
+++ tuscany/sandbox/ctrezzo/src/services/WordCount.java Tue Jul 15 10:36:41 2008
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.    
+ */
+
+package services;
+import org.osoa.sca.annotations.Remotable;
+
+@Remotable
+public interface WordCount {
+	
+	/*
+	 * Runs the Map-Reduce job.
+	 */
+	void runJob();
+	
+}

Added: tuscany/sandbox/ctrezzo/src/services/WordCountImpl.java
URL: http://svn.apache.org/viewvc/tuscany/sandbox/ctrezzo/src/services/WordCountImpl.java?rev=676988&view=auto
==============================================================================
--- tuscany/sandbox/ctrezzo/src/services/WordCountImpl.java (added)
+++ tuscany/sandbox/ctrezzo/src/services/WordCountImpl.java Tue Jul 15 10:36:41 2008
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.    
+ */
+
+package services;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.osoa.sca.annotations.Property;
+
+public class WordCountImpl implements services.WordCount {
+	
+	//this path is a HDFS path
+    @Property
+    Path inputPath = new Path("/file01.txt");
+    
+    //this path is a HDFS path
+    @Property
+    Path outputPath = new Path("/output1");
+	
+	public void runJob() {
+		
+		org.apache.hadoop.examples.WordCount myCount = new org.apache.hadoop.examples.WordCount();
+		Configuration conf = new Configuration();
+		
+		myCount.setConf(conf);
+		
+		JobConf mapredConf = new JobConf(myCount.getConf(), org.apache.hadoop.examples.WordCount.class);
+	    mapredConf.setJobName("wordcount");
+	 
+	    // the keys are words (strings)
+	    mapredConf.setOutputKeyClass(Text.class);
+	    // the values are counts (ints)
+	    mapredConf.setOutputValueClass(IntWritable.class);
+	    
+	    mapredConf.setMapperClass(org.apache.hadoop.examples.WordCount.MapClass.class);        
+	    mapredConf.setCombinerClass(org.apache.hadoop.examples.WordCount.Reduce.class);
+	    mapredConf.setReducerClass(org.apache.hadoop.examples.WordCount.Reduce.class);
+	    
+	    mapredConf.setInputPath(inputPath);
+	    mapredConf.setOutputPath(outputPath);
+	    
+	    try {
+	    	JobClient.runJob(mapredConf);
+	    }
+	    catch(Exception e) {
+	    	System.out.println("ERROR: " + e);
+	    }
+	}
+}