You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hdt.apache.org by ad...@apache.org on 2013/01/08 23:26:26 UTC

[4/11] Import of source from Apache Hadoop MapReduce contrib, this is the plugin as it existed in the Hadoop 0.23.4 release.

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/src/java/org/apache/hadoop/eclipse/ImageLibrary.java
----------------------------------------------------------------------
diff --git a/eclipse-plugin/src/java/org/apache/hadoop/eclipse/ImageLibrary.java b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/ImageLibrary.java
new file mode 100644
index 0000000..5fa5d69
--- /dev/null
+++ b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/ImageLibrary.java
@@ -0,0 +1,252 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import java.net.URL;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.eclipse.core.runtime.FileLocator;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.swt.graphics.Image;
+import org.eclipse.ui.ISharedImages;
+import org.eclipse.ui.PlatformUI;
+import org.eclipse.ui.plugin.AbstractUIPlugin;
+import org.osgi.framework.Bundle;
+
+/**
+ * Icons manager
+ */
+public class ImageLibrary {
+
+  private final Bundle bundle = Activator.getDefault().getBundle();
+
+  /**
+   * Singleton instance
+   */
+  private static volatile ImageLibrary instance = null;
+
+  private ISharedImages sharedImages =
+      PlatformUI.getWorkbench().getSharedImages();
+
+  /**
+   * Where resources (icons, images...) are available in the Bundle
+   */
+  private static final String RESOURCE_DIR = "resources/";
+
+  /**
+   * Public access to image descriptors
+   * 
+   * @param name
+   * @return the image descriptor
+   */
+  public static ImageDescriptor get(String name) {
+    return getInstance().getImageDescriptorByName(name);
+  }
+
+  /**
+   * Public access to images
+   * 
+   * @param name
+   * @return the image
+   */
+  public static Image getImage(String name) {
+    return getInstance().getImageByName(name);
+  }
+
+  /**
+   * Singleton access
+   * 
+   * @return the Image library
+   */
+  public static ImageLibrary getInstance() {
+    if (instance == null) {
+      synchronized (ImageLibrary.class) {
+        if (instance == null)
+          instance = new ImageLibrary();
+      }
+    }
+    return instance;
+  }
+
+  /**
+   * Map of registered resources (ImageDescriptor and Image)
+   */
+  private Map<String, ImageDescriptor> descMap =
+      new HashMap<String, ImageDescriptor>();
+
+  private Map<String, Image> imageMap = new HashMap<String, Image>();
+
+  /**
+   * Image library constructor: put image definitions here.
+   */
+  private ImageLibrary() {
+    /*
+     * Servers view
+     */
+    newImage("server.view.location.entry", "Elephant-24x24.png");
+    newImage("server.view.job.entry", "job.gif");
+    newImage("server.view.action.location.new", "location-new-16x16.png");
+    newImage("server.view.action.location.edit", "location-edit-16x16.png");
+    newSharedImage("server.view.action.delete",
+        ISharedImages.IMG_TOOL_DELETE);
+
+    /*
+     * DFS Browser
+     */
+    newImage("dfs.browser.root.entry", "files.gif");
+    newImage("dfs.browser.location.entry", "Elephant-16x16.png");
+    newSharedImage("dfs.browser.folder.entry", ISharedImages.IMG_OBJ_FOLDER);
+    newSharedImage("dfs.browser.file.entry", ISharedImages.IMG_OBJ_FILE);
+    // DFS files in editor
+    newSharedImage("dfs.file.editor", ISharedImages.IMG_OBJ_FILE);
+    // Actions
+    newImage("dfs.browser.action.mkdir", "new-folder.png");
+    newImage("dfs.browser.action.download", "download.png");
+    newImage("dfs.browser.action.upload_files", "upload.png");
+    newImage("dfs.browser.action.upload_dir", "upload.png");
+    newSharedImage("dfs.browser.action.delete",
+        ISharedImages.IMG_TOOL_DELETE);
+    newImage("dfs.browser.action.refresh", "refresh.png");
+
+    /*
+     * Wizards
+     */
+    newImage("wizard.mapper.new", "mapwiz.png");
+    newImage("wizard.reducer.new", "reducewiz.png");
+    newImage("wizard.driver.new", "driverwiz.png");
+    newImage("wizard.mapreduce.project.new", "projwiz.png");
+  }
+
+  /**
+   * Accessor to images
+   * 
+   * @param name
+   * @return
+   */
+  private ImageDescriptor getImageDescriptorByName(String name) {
+    return this.descMap.get(name);
+  }
+
+  /**
+   * Accessor to images
+   * 
+   * @param name
+   * @return
+   */
+  private Image getImageByName(String name) {
+    return this.imageMap.get(name);
+  }
+
+  /**
+   * Access to platform shared images
+   * 
+   * @param name
+   * @return
+   */
+  private ImageDescriptor getSharedByName(String name) {
+    return sharedImages.getImageDescriptor(name);
+  }
+
+  /**
+   * Load and register a new image. If the image resource does not exist or
+   * fails to load, a default "error" resource is supplied.
+   * 
+   * @param name name of the image
+   * @param filename name of the file containing the image
+   * @return whether the image has correctly been loaded
+   */
+  private boolean newImage(String name, String filename) {
+    ImageDescriptor id;
+    boolean success;
+
+    try {
+      URL fileURL =
+          FileLocator.find(bundle, new Path(RESOURCE_DIR + filename), null);
+      id = ImageDescriptor.createFromURL(FileLocator.toFileURL(fileURL));
+      success = true;
+
+    } catch (Exception e) {
+
+      e.printStackTrace();
+      id = ImageDescriptor.getMissingImageDescriptor();
+      // id = getSharedByName(ISharedImages.IMG_OBJS_ERROR_TSK);
+      success = false;
+    }
+
+    descMap.put(name, id);
+    imageMap.put(name, id.createImage(true));
+
+    return success;
+  }
+
+  /**
+   * Register an image from the workspace shared image pool. If the image
+   * resource does not exist or fails to load, a default "error" resource is
+   * supplied.
+   * 
+   * @param name name of the image
+   * @param sharedName name of the shared image ({@link ISharedImages})
+   * @return whether the image has correctly been loaded
+   */
+  private boolean newSharedImage(String name, String sharedName) {
+    boolean success = true;
+    ImageDescriptor id = getSharedByName(sharedName);
+
+    if (id == null) {
+      id = ImageDescriptor.getMissingImageDescriptor();
+      // id = getSharedByName(ISharedImages.IMG_OBJS_ERROR_TSK);
+      success = false;
+    }
+
+    descMap.put(name, id);
+    imageMap.put(name, id.createImage(true));
+
+    return success;
+  }
+
+  /**
+   * Register an image from the workspace shared image pool. If the image
+   * resource does not exist or fails to load, a default "error" resource is
+   * supplied.
+   * 
+   * @param name name of the image
+   * @param sharedName name of the shared image ({@link ISharedImages})
+   * @return whether the image has correctly been loaded
+   */
+  private boolean newPluginImage(String name, String pluginId,
+      String filename) {
+
+    boolean success = true;
+    ImageDescriptor id =
+        AbstractUIPlugin.imageDescriptorFromPlugin(pluginId, filename);
+
+    if (id == null) {
+      id = ImageDescriptor.getMissingImageDescriptor();
+      // id = getSharedByName(ISharedImages.IMG_OBJS_ERROR_TSK);
+      success = false;
+    }
+
+    descMap.put(name, id);
+    imageMap.put(name, id.createImage(true));
+
+    return success;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/src/java/org/apache/hadoop/eclipse/MapReduceNature.java
----------------------------------------------------------------------
diff --git a/eclipse-plugin/src/java/org/apache/hadoop/eclipse/MapReduceNature.java b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/MapReduceNature.java
new file mode 100644
index 0000000..fdfdcfb
--- /dev/null
+++ b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/MapReduceNature.java
@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import java.io.File;
+import java.io.FileFilter;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.eclipse.core.resources.IProject;
+import org.eclipse.core.resources.IProjectNature;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.NullProgressMonitor;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.core.runtime.QualifiedName;
+import org.eclipse.jdt.core.IClasspathEntry;
+import org.eclipse.jdt.core.IJavaProject;
+import org.eclipse.jdt.core.JavaCore;
+
+/**
+ * Class to configure and deconfigure an Eclipse project with the MapReduce
+ * project nature.
+ */
+
+public class MapReduceNature implements IProjectNature {
+
+  public static final String ID = "org.apache.hadoop.eclipse.Nature";
+
+  private IProject project;
+
+  static Logger log = Logger.getLogger(MapReduceNature.class.getName());
+
+  /**
+   * Configures an Eclipse project as a Map/Reduce project by adding the
+   * Hadoop libraries to a project's classpath.
+   */
+  public void configure() throws CoreException {
+    String path =
+        project.getPersistentProperty(new QualifiedName(Activator.PLUGIN_ID,
+            "hadoop.runtime.path"));
+
+    File dir = new File(path);
+    final ArrayList<File> coreJars = new ArrayList<File>();
+    dir.listFiles(new FileFilter() {
+      public boolean accept(File pathname) {
+        String fileName = pathname.getName();
+
+        // get the hadoop core jar without touching test or examples
+        // older version of hadoop don't use the word "core" -- eyhung
+        if ((fileName.indexOf("hadoop") != -1) && (fileName.endsWith("jar"))
+            && (fileName.indexOf("test") == -1)
+            && (fileName.indexOf("examples") == -1)) {
+          coreJars.add(pathname);
+        }
+
+        return false; // we don't care what this returns
+      }
+    });
+    File dir2 = new File(path + File.separatorChar + "lib");
+    if (dir2.exists() && dir2.isDirectory()) {
+      dir2.listFiles(new FileFilter() {
+        public boolean accept(File pathname) {
+          if ((!pathname.isDirectory())
+              && (pathname.getName().endsWith("jar"))) {
+            coreJars.add(pathname);
+          }
+
+          return false; // we don't care what this returns
+        }
+      });
+    }
+
+    // Add Hadoop libraries onto classpath
+    IJavaProject javaProject = JavaCore.create(getProject());
+    // Bundle bundle = Activator.getDefault().getBundle();
+    try {
+      IClasspathEntry[] currentCp = javaProject.getRawClasspath();
+      IClasspathEntry[] newCp =
+          new IClasspathEntry[currentCp.length + coreJars.size()];
+      System.arraycopy(currentCp, 0, newCp, 0, currentCp.length);
+
+      final Iterator<File> i = coreJars.iterator();
+      int count = 0;
+      while (i.hasNext()) {
+        // for (int i = 0; i < s_coreJarNames.length; i++) {
+
+        final File f = (File) i.next();
+        // URL url = FileLocator.toFileURL(FileLocator.find(bundle, new
+        // Path("lib/" + s_coreJarNames[i]), null));
+        URL url = f.toURI().toURL();
+        log.finer("hadoop library url.getPath() = " + url.getPath());
+
+        newCp[newCp.length - 1 - count] =
+            JavaCore.newLibraryEntry(new Path(url.getPath()), null, null);
+        count++;
+      }
+
+      javaProject.setRawClasspath(newCp, new NullProgressMonitor());
+    } catch (Exception e) {
+      log.log(Level.SEVERE, "IOException generated in "
+          + this.getClass().getCanonicalName(), e);
+    }
+  }
+
+  /**
+   * Deconfigure a project from MapReduce status. Currently unimplemented.
+   */
+  public void deconfigure() throws CoreException {
+    // TODO Auto-generated method stub
+  }
+
+  /**
+   * Returns the project to which this project nature applies.
+   */
+  public IProject getProject() {
+    return this.project;
+  }
+
+  /**
+   * Sets the project to which this nature applies. Used when instantiating
+   * this project nature runtime.
+   */
+  public void setProject(IProject project) {
+    this.project = project;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizard.java
----------------------------------------------------------------------
diff --git a/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizard.java b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizard.java
new file mode 100644
index 0000000..7866c30
--- /dev/null
+++ b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizard.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import org.eclipse.core.resources.IFile;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.jdt.core.IJavaElement;
+import org.eclipse.jdt.internal.ui.wizards.NewElementWizard;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.ui.INewWizard;
+import org.eclipse.ui.IWorkbench;
+
+/**
+ * Wizard for creating a new Driver class (a class that runs a MapReduce job).
+ * 
+ */
+
+public class NewDriverWizard extends NewElementWizard implements INewWizard,
+    IRunnableWithProgress {
+  private NewDriverWizardPage page;
+
+  /*
+   * @Override public boolean performFinish() { }
+   */
+  public void run(IProgressMonitor monitor) {
+    try {
+      page.createType(monitor);
+    } catch (CoreException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    } catch (InterruptedException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    }
+  }
+
+  public NewDriverWizard() {
+    setWindowTitle("New MapReduce Driver");
+  }
+
+  @Override
+  public void init(IWorkbench workbench, IStructuredSelection selection) {
+    super.init(workbench, selection);
+
+    page = new NewDriverWizardPage();
+    addPage(page);
+    page.setSelection(selection);
+  }
+
+  @Override
+  /**
+   * Performs any actions appropriate in response to the user having pressed the
+   * Finish button, or refuse if finishing now is not permitted.
+   */
+  public boolean performFinish() {
+    if (super.performFinish()) {
+      if (getCreatedElement() != null) {
+        selectAndReveal(page.getModifiedResource());
+        openResource((IFile) page.getModifiedResource());
+      }
+
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  @Override
+  /**
+   * 
+   */
+  protected void finishPage(IProgressMonitor monitor)
+      throws InterruptedException, CoreException {
+    this.run(monitor);
+  }
+
+  @Override
+  public IJavaElement getCreatedElement() {
+    return page.getCreatedType().getPrimaryElement();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizardPage.java
----------------------------------------------------------------------
diff --git a/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizardPage.java b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizardPage.java
new file mode 100644
index 0000000..94b2bc9
--- /dev/null
+++ b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizardPage.java
@@ -0,0 +1,263 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.eclipse;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.FileLocator;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.core.JavaModelException;
+import org.eclipse.jdt.core.search.SearchEngine;
+import org.eclipse.jdt.ui.IJavaElementSearchConstants;
+import org.eclipse.jdt.ui.JavaUI;
+import org.eclipse.jdt.ui.wizards.NewTypeWizardPage;
+import org.eclipse.jface.dialogs.ProgressMonitorDialog;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.jface.window.Window;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.Event;
+import org.eclipse.swt.widgets.Label;
+import org.eclipse.swt.widgets.Listener;
+import org.eclipse.swt.widgets.Text;
+import org.eclipse.ui.dialogs.SelectionDialog;
+
+/**
+ * Pre-fills the new MapReduce driver class with a template.
+ * 
+ */
+
+public class NewDriverWizardPage extends NewTypeWizardPage {
+  private Button isCreateMapMethod;
+
+  private Text reducerText;
+
+  private Text mapperText;
+
+  private final boolean showContainerSelector;
+
+  public NewDriverWizardPage() {
+    this(true);
+  }
+
+  public NewDriverWizardPage(boolean showContainerSelector) {
+    super(true, "MapReduce Driver");
+
+    this.showContainerSelector = showContainerSelector;
+    setTitle("MapReduce Driver");
+    setDescription("Create a new MapReduce driver.");
+    setImageDescriptor(ImageLibrary.get("wizard.driver.new"));
+  }
+
+  public void setSelection(IStructuredSelection selection) {
+    initContainerPage(getInitialJavaElement(selection));
+    initTypePage(getInitialJavaElement(selection));
+  }
+
+  @Override
+  /**
+   * Creates the new type using the entered field values.
+   */
+  public void createType(IProgressMonitor monitor) throws CoreException,
+      InterruptedException {
+    super.createType(monitor);
+  }
+
+  @Override
+  protected void createTypeMembers(final IType newType, ImportsManager imports,
+      final IProgressMonitor monitor) throws CoreException {
+    super.createTypeMembers(newType, imports, monitor);
+    imports.addImport("org.apache.hadoop.fs.Path");
+    imports.addImport("org.apache.hadoop.io.Text");
+    imports.addImport("org.apache.hadoop.io.IntWritable");
+    imports.addImport("org.apache.hadoop.mapred.JobClient");
+    imports.addImport("org.apache.hadoop.mapred.JobConf");
+    imports.addImport("org.apache.hadoop.mapred.Reducer");
+    imports.addImport("org.apache.hadoop.mapred.Mapper");
+
+    /**
+     * TODO(jz) - move most code out of the runnable
+     */
+    getContainer().getShell().getDisplay().syncExec(new Runnable() {
+      public void run() {
+
+        String method = "public static void main(String[] args) {\n JobClient client = new JobClient();";
+        method += "JobConf conf = new JobConf("
+            + newType.getFullyQualifiedName() + ".class);\n\n";
+
+        method += "// TODO: specify output types\nconf.setOutputKeyClass(Text.class);\nconf.setOutputValueClass(IntWritable.class);\n\n";
+
+        method += "// TODO: specify input and output DIRECTORIES (not files)\nconf.setInputPath(new Path(\"src\"));\nconf.setOutputPath(new Path(\"out\"));\n\n";
+
+        if (mapperText.getText().length() > 0) {
+          method += "conf.setMapperClass(" + mapperText.getText()
+              + ".class);\n\n";
+        } else {
+          method += "// TODO: specify a mapper\nconf.setMapperClass(org.apache.hadoop.mapred.lib.IdentityMapper.class);\n\n";
+        }
+        if (reducerText.getText().length() > 0) {
+          method += "conf.setReducerClass(" + reducerText.getText()
+              + ".class);\n\n";
+        } else {
+          method += "// TODO: specify a reducer\nconf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);\n\n";
+        }
+
+        method += "client.setConf(conf);\n";
+        method += "try {\n\tJobClient.runJob(conf);\n} catch (Exception e) {\n"
+            + "\te.printStackTrace();\n}\n";
+        method += "}\n";
+
+        try {
+          newType.createMethod(method, null, false, monitor);
+        } catch (JavaModelException e) {
+          // TODO Auto-generated catch block
+          e.printStackTrace();
+        }
+      }
+    });
+  }
+
+  public void createControl(Composite parent) {
+    // super.createControl(parent);
+
+    initializeDialogUnits(parent);
+    Composite composite = new Composite(parent, SWT.NONE);
+    GridLayout layout = new GridLayout();
+    layout.numColumns = 4;
+    composite.setLayout(layout);
+
+    createContainerControls(composite, 4);
+
+    createPackageControls(composite, 4);
+    createSeparator(composite, 4);
+    createTypeNameControls(composite, 4);
+
+    createSuperClassControls(composite, 4);
+    createSuperInterfacesControls(composite, 4);
+    createSeparator(composite, 4);
+
+    createMapperControls(composite);
+    createReducerControls(composite);
+
+    if (!showContainerSelector) {
+      setPackageFragmentRoot(null, false);
+      setSuperClass("java.lang.Object", false);
+      setSuperInterfaces(new ArrayList(), false);
+    }
+
+    setControl(composite);
+
+    setFocus();
+    handleFieldChanged(CONTAINER);
+
+    // setSuperClass("org.apache.hadoop.mapred.MapReduceBase", true);
+    // setSuperInterfaces(Arrays.asList(new String[]{
+    // "org.apache.hadoop.mapred.Mapper" }), true);
+  }
+
+  @Override
+  protected void handleFieldChanged(String fieldName) {
+    super.handleFieldChanged(fieldName);
+
+    validate();
+  }
+
+  private void validate() {
+    if (showContainerSelector) {
+      updateStatus(new IStatus[] { fContainerStatus, fPackageStatus,
+          fTypeNameStatus, fSuperClassStatus, fSuperInterfacesStatus });
+    } else {
+      updateStatus(new IStatus[] { fTypeNameStatus, });
+    }
+  }
+
+  private void createMapperControls(Composite composite) {
+    this.mapperText = createBrowseClassControl(composite, "Ma&pper:",
+        "&Browse...", "org.apache.hadoop.mapred.Mapper", "Mapper Selection");
+  }
+
+  private void createReducerControls(Composite composite) {
+    this.reducerText = createBrowseClassControl(composite, "&Reducer:",
+        "Browse&...", "org.apache.hadoop.mapred.Reducer", "Reducer Selection");
+  }
+
+  private Text createBrowseClassControl(final Composite composite,
+      final String string, String browseButtonLabel,
+      final String baseClassName, final String dialogTitle) {
+    Label label = new Label(composite, SWT.NONE);
+    GridData data = new GridData(GridData.FILL_HORIZONTAL);
+    label.setText(string);
+    label.setLayoutData(data);
+
+    final Text text = new Text(composite, SWT.SINGLE | SWT.BORDER);
+    GridData data2 = new GridData(GridData.FILL_HORIZONTAL);
+    data2.horizontalSpan = 2;
+    text.setLayoutData(data2);
+
+    Button browse = new Button(composite, SWT.NONE);
+    browse.setText(browseButtonLabel);
+    GridData data3 = new GridData(GridData.FILL_HORIZONTAL);
+    browse.setLayoutData(data3);
+    browse.addListener(SWT.Selection, new Listener() {
+      public void handleEvent(Event event) {
+        IType baseType;
+        try {
+          baseType = getPackageFragmentRoot().getJavaProject().findType(
+              baseClassName);
+
+          // edit this to limit the scope
+          SelectionDialog dialog = JavaUI.createTypeDialog(
+              composite.getShell(), new ProgressMonitorDialog(composite
+                  .getShell()), SearchEngine.createHierarchyScope(baseType),
+              IJavaElementSearchConstants.CONSIDER_CLASSES, false);
+
+          dialog.setMessage("&Choose a type:");
+          dialog.setBlockOnOpen(true);
+          dialog.setTitle(dialogTitle);
+          dialog.open();
+
+          if ((dialog.getReturnCode() == Window.OK)
+              && (dialog.getResult().length > 0)) {
+            IType type = (IType) dialog.getResult()[0];
+            text.setText(type.getFullyQualifiedName());
+          }
+        } catch (JavaModelException e) {
+          // TODO Auto-generated catch block
+          e.printStackTrace();
+        }
+      }
+    });
+
+    if (!showContainerSelector) {
+      label.setEnabled(false);
+      text.setEnabled(false);
+      browse.setEnabled(false);
+    }
+
+    return text;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapReduceProjectWizard.java
----------------------------------------------------------------------
diff --git a/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapReduceProjectWizard.java b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapReduceProjectWizard.java
new file mode 100644
index 0000000..13ca6c6
--- /dev/null
+++ b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapReduceProjectWizard.java
@@ -0,0 +1,411 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.lang.reflect.InvocationTargetException;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.eclipse.preferences.MapReducePreferencePage;
+import org.apache.hadoop.eclipse.preferences.PreferenceConstants;
+import org.eclipse.core.resources.IProject;
+import org.eclipse.core.resources.IProjectDescription;
+import org.eclipse.core.resources.ResourcesPlugin;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IConfigurationElement;
+import org.eclipse.core.runtime.IExecutableExtension;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.NullProgressMonitor;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.core.runtime.QualifiedName;
+import org.eclipse.core.runtime.SubProgressMonitor;
+import org.eclipse.jdt.ui.wizards.NewJavaProjectWizardPage;
+import org.eclipse.jface.dialogs.IDialogConstants;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.jface.preference.PreferenceDialog;
+import org.eclipse.jface.preference.PreferenceManager;
+import org.eclipse.jface.preference.PreferenceNode;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.jface.wizard.IWizardPage;
+import org.eclipse.jface.wizard.Wizard;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.events.SelectionEvent;
+import org.eclipse.swt.events.SelectionListener;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.DirectoryDialog;
+import org.eclipse.swt.widgets.Group;
+import org.eclipse.swt.widgets.Link;
+import org.eclipse.swt.widgets.Text;
+import org.eclipse.ui.IWorkbench;
+import org.eclipse.ui.IWorkbenchWizard;
+import org.eclipse.ui.PlatformUI;
+import org.eclipse.ui.dialogs.WizardNewProjectCreationPage;
+import org.eclipse.ui.wizards.newresource.BasicNewProjectResourceWizard;
+
+/**
+ * Wizard for creating a new MapReduce Project
+ * 
+ */
+
+public class NewMapReduceProjectWizard extends Wizard implements
+    IWorkbenchWizard, IExecutableExtension {
+  static Logger log =
+      Logger.getLogger(NewMapReduceProjectWizard.class.getName());
+
+  private HadoopFirstPage firstPage;
+
+  private NewJavaProjectWizardPage javaPage;
+
+  public NewDriverWizardPage newDriverPage;
+
+  private IConfigurationElement config;
+
+  public NewMapReduceProjectWizard() {
+    setWindowTitle("New MapReduce Project Wizard");
+  }
+
+  public void init(IWorkbench workbench, IStructuredSelection selection) {
+
+  }
+
+  @Override
+  public boolean canFinish() {
+    return firstPage.isPageComplete() && javaPage.isPageComplete()
+    // && ((!firstPage.generateDriver.getSelection())
+    // || newDriverPage.isPageComplete()
+    ;
+  }
+
+  @Override
+  public IWizardPage getNextPage(IWizardPage page) {
+    // if (page == firstPage
+    // && firstPage.generateDriver.getSelection()
+    // )
+    // {
+    // return newDriverPage; // if "generate mapper" checked, second page is
+    // new driver page
+    // }
+    // else
+    // {
+    IWizardPage answer = super.getNextPage(page);
+    if (answer == newDriverPage) {
+      return null; // dont flip to new driver page unless "generate
+      // driver" is checked
+    } else if (answer == javaPage) {
+      return answer;
+    } else {
+      return answer;
+    }
+    // }
+  }
+
+  @Override
+  public IWizardPage getPreviousPage(IWizardPage page) {
+    if (page == newDriverPage) {
+      return firstPage; // newDriverPage, if it appears, is the second
+      // page
+    } else {
+      return super.getPreviousPage(page);
+    }
+  }
+
+  static class HadoopFirstPage extends WizardNewProjectCreationPage
+      implements SelectionListener {
+    public HadoopFirstPage() {
+      super("New Hadoop Project");
+      setImageDescriptor(ImageLibrary.get("wizard.mapreduce.project.new"));
+    }
+
+    private Link openPreferences;
+
+    private Button workspaceHadoop;
+
+    private Button projectHadoop;
+
+    private Text location;
+
+    private Button browse;
+
+    private String path;
+
+    public String currentPath;
+
+    // private Button generateDriver;
+
+    @Override
+    public void createControl(Composite parent) {
+      super.createControl(parent);
+
+      setTitle("MapReduce Project");
+      setDescription("Create a MapReduce project.");
+
+      Group group = new Group((Composite) getControl(), SWT.NONE);
+      group.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
+      group.setText("Hadoop MapReduce Library Installation Path");
+      GridLayout layout = new GridLayout(3, true);
+      layout.marginLeft =
+          convertHorizontalDLUsToPixels(IDialogConstants.HORIZONTAL_MARGIN);
+      layout.marginRight =
+          convertHorizontalDLUsToPixels(IDialogConstants.HORIZONTAL_MARGIN);
+      layout.marginTop =
+          convertHorizontalDLUsToPixels(IDialogConstants.VERTICAL_MARGIN);
+      layout.marginBottom =
+          convertHorizontalDLUsToPixels(IDialogConstants.VERTICAL_MARGIN);
+      group.setLayout(layout);
+
+      workspaceHadoop = new Button(group, SWT.RADIO);
+      GridData d =
+          new GridData(GridData.BEGINNING, GridData.BEGINNING, false, false);
+      d.horizontalSpan = 2;
+      workspaceHadoop.setLayoutData(d);
+      // workspaceHadoop.setText("Use default workbench Hadoop library
+      // location");
+      workspaceHadoop.setSelection(true);
+
+      updateHadoopDirLabelFromPreferences();
+
+      openPreferences = new Link(group, SWT.NONE);
+      openPreferences
+          .setText("<a>Configure Hadoop install directory...</a>");
+      openPreferences.setLayoutData(new GridData(GridData.END,
+          GridData.CENTER, false, false));
+      openPreferences.addSelectionListener(this);
+
+      projectHadoop = new Button(group, SWT.RADIO);
+      projectHadoop.setLayoutData(new GridData(GridData.BEGINNING,
+          GridData.CENTER, false, false));
+      projectHadoop.setText("Specify Hadoop library location");
+
+      location = new Text(group, SWT.SINGLE | SWT.BORDER);
+      location.setText("");
+      d = new GridData(GridData.END, GridData.CENTER, true, false);
+      d.horizontalSpan = 1;
+      d.widthHint = 250;
+      d.grabExcessHorizontalSpace = true;
+      location.setLayoutData(d);
+      location.setEnabled(false);
+
+      browse = new Button(group, SWT.NONE);
+      browse.setText("Browse...");
+      browse.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER,
+          false, false));
+      browse.setEnabled(false);
+      browse.addSelectionListener(this);
+
+      projectHadoop.addSelectionListener(this);
+      workspaceHadoop.addSelectionListener(this);
+
+      // generateDriver = new Button((Composite) getControl(), SWT.CHECK);
+      // generateDriver.setText("Generate a MapReduce driver");
+      // generateDriver.addListener(SWT.Selection, new Listener()
+      // {
+      // public void handleEvent(Event event) {
+      // getContainer().updateButtons(); }
+      // });
+    }
+
+    @Override
+    public boolean isPageComplete() {
+      boolean validHadoop = validateHadoopLocation();
+
+      if (!validHadoop && isCurrentPage()) {
+        setErrorMessage("Invalid Hadoop Runtime specified; please click 'Configure Hadoop install directory' or fill in library location input field");
+      } else {
+        setErrorMessage(null);
+      }
+
+      return super.isPageComplete() && validHadoop;
+    }
+
+    private boolean validateHadoopLocation() {
+      FilenameFilter gotHadoopJar = new FilenameFilter() {
+        public boolean accept(File dir, String name) {
+          return (name.startsWith("hadoop") && name.endsWith(".jar")
+              && (name.indexOf("test") == -1) && (name.indexOf("examples") == -1));
+        }
+      };
+
+      if (workspaceHadoop.getSelection()) {
+        this.currentPath = path;
+        return new Path(path).toFile().exists()
+            && (new Path(path).toFile().list(gotHadoopJar).length > 0);
+      } else {
+        this.currentPath = location.getText();
+        File file = new Path(location.getText()).toFile();
+        return file.exists()
+            && (new Path(location.getText()).toFile().list(gotHadoopJar).length > 0);
+      }
+    }
+
+    private void updateHadoopDirLabelFromPreferences() {
+      path =
+          Activator.getDefault().getPreferenceStore().getString(
+              PreferenceConstants.P_PATH);
+
+      if ((path != null) && (path.length() > 0)) {
+        workspaceHadoop.setText("Use default Hadoop");
+      } else {
+        workspaceHadoop.setText("Use default Hadoop (currently not set)");
+      }
+    }
+
+    public void widgetDefaultSelected(SelectionEvent e) {
+    }
+
+    public void widgetSelected(SelectionEvent e) {
+      if (e.getSource() == openPreferences) {
+        PreferenceManager manager = new PreferenceManager();
+        manager.addToRoot(new PreferenceNode(
+            "Hadoop Installation Directory", new MapReducePreferencePage()));
+        PreferenceDialog dialog =
+            new PreferenceDialog(this.getShell(), manager);
+        dialog.create();
+        dialog.setMessage("Select Hadoop Installation Directory");
+        dialog.setBlockOnOpen(true);
+        dialog.open();
+
+        updateHadoopDirLabelFromPreferences();
+      } else if (e.getSource() == browse) {
+        DirectoryDialog dialog = new DirectoryDialog(this.getShell());
+        dialog
+            .setMessage("Select a hadoop installation, containing hadoop-X-core.jar");
+        dialog.setText("Select Hadoop Installation Directory");
+        String directory = dialog.open();
+
+        if (directory != null) {
+          location.setText(directory);
+
+          if (!validateHadoopLocation()) {
+            setErrorMessage("No Hadoop jar found in specified directory");
+          } else {
+            setErrorMessage(null);
+          }
+        }
+      } else if (projectHadoop.getSelection()) {
+        location.setEnabled(true);
+        browse.setEnabled(true);
+      } else {
+        location.setEnabled(false);
+        browse.setEnabled(false);
+      }
+
+      getContainer().updateButtons();
+    }
+  }
+
+  @Override
+  public void addPages() {
+    /*
+     * firstPage = new HadoopFirstPage(); addPage(firstPage ); addPage( new
+     * JavaProjectWizardSecondPage(firstPage) );
+     */
+
+    firstPage = new HadoopFirstPage();
+    javaPage =
+        new NewJavaProjectWizardPage(ResourcesPlugin.getWorkspace()
+            .getRoot(), firstPage);
+    // newDriverPage = new NewDriverWizardPage(false);
+    // newDriverPage.setPageComplete(false); // ensure finish button
+    // initially disabled
+    addPage(firstPage);
+    addPage(javaPage);
+
+    // addPage(newDriverPage);
+  }
+
+  @Override
+  public boolean performFinish() {
+    try {
+      PlatformUI.getWorkbench().getProgressService().runInUI(
+          this.getContainer(), new IRunnableWithProgress() {
+            public void run(IProgressMonitor monitor) {
+              try {
+                monitor.beginTask("Create Hadoop Project", 300);
+
+                javaPage.getRunnable().run(
+                    new SubProgressMonitor(monitor, 100));
+
+                // if( firstPage.generateDriver.getSelection())
+                // {
+                // newDriverPage.setPackageFragmentRoot(javaPage.getNewJavaProject().getAllPackageFragmentRoots()[0],
+                // false);
+                // newDriverPage.getRunnable().run(new
+                // SubProgressMonitor(monitor,100));
+                // }
+
+                IProject project =
+                    javaPage.getNewJavaProject().getResource().getProject();
+                IProjectDescription description = project.getDescription();
+                String[] existingNatures = description.getNatureIds();
+                String[] natures = new String[existingNatures.length + 1];
+                for (int i = 0; i < existingNatures.length; i++) {
+                  natures[i + 1] = existingNatures[i];
+                }
+
+                natures[0] = MapReduceNature.ID;
+                description.setNatureIds(natures);
+
+                project.setPersistentProperty(new QualifiedName(
+                    Activator.PLUGIN_ID, "hadoop.runtime.path"),
+                    firstPage.currentPath);
+                project.setDescription(description,
+                    new NullProgressMonitor());
+
+                String[] natureIds = project.getDescription().getNatureIds();
+                for (int i = 0; i < natureIds.length; i++) {
+                  log.fine("Nature id # " + i + " > " + natureIds[i]);
+                }
+
+                monitor.worked(100);
+                monitor.done();
+
+                BasicNewProjectResourceWizard.updatePerspective(config);
+              } catch (CoreException e) {
+                // TODO Auto-generated catch block
+                log.log(Level.SEVERE, "CoreException thrown.", e);
+              } catch (InvocationTargetException e) {
+                // TODO Auto-generated catch block
+                e.printStackTrace();
+              } catch (InterruptedException e) {
+                // TODO Auto-generated catch block
+                e.printStackTrace();
+              }
+            }
+          }, null);
+    } catch (InvocationTargetException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    } catch (InterruptedException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    }
+
+    return true;
+  }
+
+  public void setInitializationData(IConfigurationElement config,
+      String propertyName, Object data) throws CoreException {
+    this.config = config;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapperWizard.java
----------------------------------------------------------------------
diff --git a/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapperWizard.java b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapperWizard.java
new file mode 100644
index 0000000..d3608ad
--- /dev/null
+++ b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapperWizard.java
@@ -0,0 +1,181 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.eclipse.core.resources.IFile;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.FileLocator;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jdt.core.IJavaElement;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.internal.ui.wizards.NewElementWizard;
+import org.eclipse.jdt.ui.wizards.NewTypeWizardPage;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.ui.INewWizard;
+import org.eclipse.ui.IWorkbench;
+
+/**
+ * Wizard for creating a new Mapper class (a class that runs the Map portion
+ * of a MapReduce job). The class is pre-filled with a template.
+ * 
+ */
+
+public class NewMapperWizard extends NewElementWizard implements INewWizard,
+    IRunnableWithProgress {
+  private Page page;
+
+  public NewMapperWizard() {
+    setWindowTitle("New Mapper");
+  }
+
+  public void run(IProgressMonitor monitor) {
+    try {
+      page.createType(monitor);
+    } catch (CoreException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    } catch (InterruptedException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    }
+  }
+
+  @Override
+  public void init(IWorkbench workbench, IStructuredSelection selection) {
+    super.init(workbench, selection);
+
+    page = new Page();
+    addPage(page);
+    page.setSelection(selection);
+  }
+
+  public static class Page extends NewTypeWizardPage {
+    private Button isCreateMapMethod;
+
+    public Page() {
+      super(true, "Mapper");
+
+      setTitle("Mapper");
+      setDescription("Create a new Mapper implementation.");
+      setImageDescriptor(ImageLibrary.get("wizard.mapper.new"));
+    }
+
+    public void setSelection(IStructuredSelection selection) {
+      initContainerPage(getInitialJavaElement(selection));
+      initTypePage(getInitialJavaElement(selection));
+    }
+
+    @Override
+    public void createType(IProgressMonitor monitor) throws CoreException,
+        InterruptedException {
+      super.createType(monitor);
+    }
+
+    @Override
+    protected void createTypeMembers(IType newType, ImportsManager imports,
+        IProgressMonitor monitor) throws CoreException {
+      super.createTypeMembers(newType, imports, monitor);
+      imports.addImport("java.io.IOException");
+      imports.addImport("org.apache.hadoop.io.WritableComparable");
+      imports.addImport("org.apache.hadoop.io.Writable");
+      imports.addImport("org.apache.hadoop.mapred.OutputCollector");
+      imports.addImport("org.apache.hadoop.mapred.Reporter");
+      newType
+          .createMethod(
+              "public void map(WritableComparable key, Writable values, OutputCollector output, Reporter reporter) throws IOException \n{\n}\n",
+              null, false, monitor);
+    }
+
+    public void createControl(Composite parent) {
+      // super.createControl(parent);
+
+      initializeDialogUnits(parent);
+      Composite composite = new Composite(parent, SWT.NONE);
+      GridLayout layout = new GridLayout();
+      layout.numColumns = 4;
+      composite.setLayout(layout);
+
+      createContainerControls(composite, 4);
+      createPackageControls(composite, 4);
+      createSeparator(composite, 4);
+      createTypeNameControls(composite, 4);
+      createSuperClassControls(composite, 4);
+      createSuperInterfacesControls(composite, 4);
+      // createSeparator(composite, 4);
+
+      setControl(composite);
+
+      setSuperClass("org.apache.hadoop.mapred.MapReduceBase", true);
+      setSuperInterfaces(Arrays
+          .asList(new String[] { "org.apache.hadoop.mapred.Mapper" }), true);
+
+      setFocus();
+      validate();
+    }
+
+    @Override
+    protected void handleFieldChanged(String fieldName) {
+      super.handleFieldChanged(fieldName);
+
+      validate();
+    }
+
+    private void validate() {
+      updateStatus(new IStatus[] { fContainerStatus, fPackageStatus,
+          fTypeNameStatus, fSuperClassStatus, fSuperInterfacesStatus });
+    }
+  }
+
+  @Override
+  public boolean performFinish() {
+    if (super.performFinish()) {
+      if (getCreatedElement() != null) {
+        openResource((IFile) page.getModifiedResource());
+        selectAndReveal(page.getModifiedResource());
+      }
+
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  @Override
+  protected void finishPage(IProgressMonitor monitor)
+      throws InterruptedException, CoreException {
+    this.run(monitor);
+  }
+
+  @Override
+  public IJavaElement getCreatedElement() {
+    return page.getCreatedType().getPrimaryElement();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewReducerWizard.java
----------------------------------------------------------------------
diff --git a/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewReducerWizard.java b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewReducerWizard.java
new file mode 100644
index 0000000..55a2e60
--- /dev/null
+++ b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewReducerWizard.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.eclipse.core.resources.IFile;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.FileLocator;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jdt.core.IJavaElement;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.internal.ui.wizards.NewElementWizard;
+import org.eclipse.jdt.ui.wizards.NewTypeWizardPage;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.ui.INewWizard;
+import org.eclipse.ui.IWorkbench;
+
+/**
+ * Wizard for creating a new Reducer class (a class that runs the Reduce
+ * portion of a MapReduce job). The class is pre-filled with a template.
+ * 
+ */
+
+public class NewReducerWizard extends NewElementWizard implements
+    INewWizard, IRunnableWithProgress {
+  private Page page;
+
+  public NewReducerWizard() {
+    setWindowTitle("New Reducer");
+  }
+
+  public void run(IProgressMonitor monitor) {
+    try {
+      page.createType(monitor);
+    } catch (CoreException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    } catch (InterruptedException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    }
+  }
+
+  @Override
+  public void init(IWorkbench workbench, IStructuredSelection selection) {
+    super.init(workbench, selection);
+
+    page = new Page();
+    addPage(page);
+    page.setSelection(selection);
+  }
+
+  public static class Page extends NewTypeWizardPage {
+    public Page() {
+      super(true, "Reducer");
+
+      setTitle("Reducer");
+      setDescription("Create a new Reducer implementation.");
+      setImageDescriptor(ImageLibrary.get("wizard.reducer.new"));
+    }
+
+    public void setSelection(IStructuredSelection selection) {
+      initContainerPage(getInitialJavaElement(selection));
+      initTypePage(getInitialJavaElement(selection));
+    }
+
+    @Override
+    public void createType(IProgressMonitor monitor) throws CoreException,
+        InterruptedException {
+      super.createType(monitor);
+    }
+
+    @Override
+    protected void createTypeMembers(IType newType, ImportsManager imports,
+        IProgressMonitor monitor) throws CoreException {
+      super.createTypeMembers(newType, imports, monitor);
+      imports.addImport("java.io.IOException");
+      imports.addImport("org.apache.hadoop.io.WritableComparable");
+      imports.addImport("org.apache.hadoop.mapred.OutputCollector");
+      imports.addImport("org.apache.hadoop.mapred.Reporter");
+      imports.addImport("java.util.Iterator");
+      newType
+          .createMethod(
+              "public void reduce(WritableComparable _key, Iterator values, OutputCollector output, Reporter reporter) throws IOException \n{\n"
+                  + "\t// replace KeyType with the real type of your key\n"
+                  + "\tKeyType key = (KeyType) _key;\n\n"
+                  + "\twhile (values.hasNext()) {\n"
+                  + "\t\t// replace ValueType with the real type of your value\n"
+                  + "\t\tValueType value = (ValueType) values.next();\n\n"
+                  + "\t\t// process value\n" + "\t}\n" + "}\n", null, false,
+              monitor);
+    }
+
+    public void createControl(Composite parent) {
+      // super.createControl(parent);
+
+      initializeDialogUnits(parent);
+      Composite composite = new Composite(parent, SWT.NONE);
+      GridLayout layout = new GridLayout();
+      layout.numColumns = 4;
+      composite.setLayout(layout);
+
+      createContainerControls(composite, 4);
+      createPackageControls(composite, 4);
+      createSeparator(composite, 4);
+      createTypeNameControls(composite, 4);
+      createSuperClassControls(composite, 4);
+      createSuperInterfacesControls(composite, 4);
+      // createSeparator(composite, 4);
+
+      setControl(composite);
+
+      setSuperClass("org.apache.hadoop.mapred.MapReduceBase", true);
+      setSuperInterfaces(Arrays
+          .asList(new String[] { "org.apache.hadoop.mapred.Reducer" }), true);
+
+      setFocus();
+      validate();
+    }
+
+    @Override
+    protected void handleFieldChanged(String fieldName) {
+      super.handleFieldChanged(fieldName);
+
+      validate();
+    }
+
+    private void validate() {
+      updateStatus(new IStatus[] { fContainerStatus, fPackageStatus,
+          fTypeNameStatus, fSuperClassStatus, fSuperInterfacesStatus });
+    }
+  }
+
+  @Override
+  public boolean performFinish() {
+    if (super.performFinish()) {
+      if (getCreatedElement() != null) {
+        selectAndReveal(page.getModifiedResource());
+        openResource((IFile) page.getModifiedResource());
+      }
+
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  @Override
+  protected void finishPage(IProgressMonitor monitor)
+      throws InterruptedException, CoreException {
+    this.run(monitor);
+  }
+
+  @Override
+  public IJavaElement getCreatedElement() {
+    return (page.getCreatedType() == null) ? null : page.getCreatedType()
+        .getPrimaryElement();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/src/java/org/apache/hadoop/eclipse/PropertyTester.java
----------------------------------------------------------------------
diff --git a/eclipse-plugin/src/java/org/apache/hadoop/eclipse/PropertyTester.java b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/PropertyTester.java
new file mode 100644
index 0000000..97dac5a
--- /dev/null
+++ b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/PropertyTester.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import java.util.logging.Logger;
+
+/**
+ * Class to help with debugging properties
+ */
+public class PropertyTester extends
+    org.eclipse.core.expressions.PropertyTester {
+
+  static Logger log = Logger.getLogger(PropertyTester.class.getName());
+
+  public PropertyTester() {
+  }
+
+  public boolean test(Object receiver, String property, Object[] args,
+      Object expectedValue) {
+    log.fine("Test property " + property + ", " + receiver.getClass());
+
+    return true;
+
+    // todo(jz) support test for deployable if module has hadoop nature etc.
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/DFSActionImpl.java
----------------------------------------------------------------------
diff --git a/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/DFSActionImpl.java b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/DFSActionImpl.java
new file mode 100644
index 0000000..c8b34c3
--- /dev/null
+++ b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/DFSActionImpl.java
@@ -0,0 +1,478 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.actions;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.eclipse.ImageLibrary;
+import org.apache.hadoop.eclipse.dfs.DFSActions;
+import org.apache.hadoop.eclipse.dfs.DFSFile;
+import org.apache.hadoop.eclipse.dfs.DFSFolder;
+import org.apache.hadoop.eclipse.dfs.DFSLocation;
+import org.apache.hadoop.eclipse.dfs.DFSLocationsRoot;
+import org.apache.hadoop.eclipse.dfs.DFSPath;
+import org.eclipse.core.resources.IStorage;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.PlatformObject;
+import org.eclipse.jface.action.IAction;
+import org.eclipse.jface.dialogs.InputDialog;
+import org.eclipse.jface.dialogs.MessageDialog;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.ISelection;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.widgets.DirectoryDialog;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.swt.widgets.FileDialog;
+import org.eclipse.ui.IObjectActionDelegate;
+import org.eclipse.ui.IPersistableElement;
+import org.eclipse.ui.IStorageEditorInput;
+import org.eclipse.ui.IWorkbenchPart;
+import org.eclipse.ui.PartInitException;
+import org.eclipse.ui.PlatformUI;
+
+/**
+ * Actual implementation of DFS actions
+ */
+public class DFSActionImpl implements IObjectActionDelegate {
+
+  private ISelection selection;
+
+  private IWorkbenchPart targetPart;
+
+  /* @inheritDoc */
+  public void setActivePart(IAction action, IWorkbenchPart targetPart) {
+    this.targetPart = targetPart;
+  }
+
+  /* @inheritDoc */
+  public void run(IAction action) {
+
+    // Ignore non structured selections
+    if (!(this.selection instanceof IStructuredSelection))
+      return;
+
+    // operate on the DFS asynchronously to prevent blocking the main UI
+    final IStructuredSelection ss = (IStructuredSelection) selection;
+    final String actionId = action.getActionDefinitionId();
+    Display.getDefault().asyncExec(new Runnable() {
+      public void run() {
+        try {
+          switch (DFSActions.getById(actionId)) {
+            case DELETE: {
+              delete(ss);
+              break;
+            }
+            case OPEN: {
+              open(ss);
+              break;
+            }
+            case MKDIR: {
+              mkdir(ss);
+              break;
+            }
+            case UPLOAD_FILES: {
+              uploadFilesToDFS(ss);
+              break;
+            }
+            case UPLOAD_DIR: {
+              uploadDirectoryToDFS(ss);
+              break;
+            }
+            case REFRESH: {
+              refresh(ss);
+              break;
+            }
+            case DOWNLOAD: {
+              downloadFromDFS(ss);
+              break;
+            }
+            case RECONNECT: {
+              reconnect(ss);
+              break;
+            }
+            case DISCONNECT: {
+              disconnect(ss);
+              break;
+            }
+            default: {
+              System.err.printf("Unhandled DFS Action: " + actionId);
+              break;
+            }
+          }
+
+        } catch (Exception e) {
+          e.printStackTrace();
+          MessageDialog.openError(Display.getDefault().getActiveShell(),
+              "DFS Action error",
+              "An error occurred while performing DFS operation: "
+                  + e.getMessage());
+        }
+      }
+    });
+  }
+
+  /**
+   * Create a new sub-folder into an existing directory
+   * 
+   * @param selection
+   */
+  private void mkdir(IStructuredSelection selection) {
+    List<DFSFolder> folders = filterSelection(DFSFolder.class, selection);
+    if (folders.size() >= 1) {
+      DFSFolder folder = folders.get(0);
+      InputDialog dialog =
+          new InputDialog(Display.getCurrent().getActiveShell(),
+              "Create subfolder", "Enter the name of the subfolder", "",
+              null);
+      if (dialog.open() == InputDialog.OK)
+        folder.mkdir(dialog.getValue());
+    }
+  }
+
+  /**
+   * Implement the import action (upload files from the current machine to
+   * HDFS)
+   * 
+   * @param object
+   * @throws SftpException
+   * @throws JSchException
+   * @throws InvocationTargetException
+   * @throws InterruptedException
+   */
+  private void uploadFilesToDFS(IStructuredSelection selection)
+      throws InvocationTargetException, InterruptedException {
+
+    // Ask the user which files to upload
+    FileDialog dialog =
+        new FileDialog(Display.getCurrent().getActiveShell(), SWT.OPEN
+            | SWT.MULTI);
+    dialog.setText("Select the local files to upload");
+    dialog.open();
+
+    List<File> files = new ArrayList<File>();
+    for (String fname : dialog.getFileNames())
+      files.add(new File(dialog.getFilterPath() + File.separator + fname));
+
+    // TODO enable upload command only when selection is exactly one folder
+    List<DFSFolder> folders = filterSelection(DFSFolder.class, selection);
+    if (folders.size() >= 1)
+      uploadToDFS(folders.get(0), files);
+  }
+
+  /**
+   * Implement the import action (upload directory from the current machine
+   * to HDFS)
+   * 
+   * @param object
+   * @throws SftpException
+   * @throws JSchException
+   * @throws InvocationTargetException
+   * @throws InterruptedException
+   */
+  private void uploadDirectoryToDFS(IStructuredSelection selection)
+      throws InvocationTargetException, InterruptedException {
+
+    // Ask the user which local directory to upload
+    DirectoryDialog dialog =
+        new DirectoryDialog(Display.getCurrent().getActiveShell(), SWT.OPEN
+            | SWT.MULTI);
+    dialog.setText("Select the local file or directory to upload");
+
+    String dirName = dialog.open();
+    final File dir = new File(dirName);
+    List<File> files = new ArrayList<File>();
+    files.add(dir);
+
+    // TODO enable upload command only when selection is exactly one folder
+    final List<DFSFolder> folders =
+        filterSelection(DFSFolder.class, selection);
+    if (folders.size() >= 1)
+      uploadToDFS(folders.get(0), files);
+
+  }
+
+  private void uploadToDFS(final DFSFolder folder, final List<File> files)
+      throws InvocationTargetException, InterruptedException {
+
+    PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
+        new IRunnableWithProgress() {
+          public void run(IProgressMonitor monitor)
+              throws InvocationTargetException {
+
+            int work = 0;
+            for (File file : files)
+              work += computeUploadWork(file);
+
+            monitor.beginTask("Uploading files to distributed file system",
+                work);
+
+            for (File file : files) {
+              try {
+                folder.upload(monitor, file);
+
+              } catch (IOException ioe) {
+                ioe.printStackTrace();
+                MessageDialog.openError(null,
+                    "Upload files to distributed file system",
+                    "Upload failed.\n" + ioe);
+              }
+            }
+
+            monitor.done();
+
+            // Update the UI
+            folder.doRefresh();
+          }
+        });
+  }
+
+  private void reconnect(IStructuredSelection selection) {
+    for (DFSLocation location : filterSelection(DFSLocation.class, selection))
+      location.reconnect();
+  }
+
+  private void disconnect(IStructuredSelection selection) {
+    if (selection.size() != 1)
+      return;
+
+    Object first = selection.getFirstElement();
+    if (!(first instanceof DFSLocationsRoot))
+      return;
+
+    DFSLocationsRoot root = (DFSLocationsRoot) first;
+    root.disconnect();
+    root.refresh();
+  }
+
+  /**
+   * Implements the Download action from HDFS to the current machine
+   * 
+   * @param object
+   * @throws SftpException
+   * @throws JSchException
+   * @throws InterruptedException
+   * @throws InvocationTargetException
+   */
+  private void downloadFromDFS(IStructuredSelection selection)
+      throws InvocationTargetException, InterruptedException {
+
+    // Ask the user where to put the downloaded files
+    DirectoryDialog dialog =
+        new DirectoryDialog(Display.getCurrent().getActiveShell());
+    dialog.setText("Copy to local directory");
+    dialog.setMessage("Copy the selected files and directories from the "
+        + "distributed filesystem to a local directory");
+    String directory = dialog.open();
+
+    if (directory == null)
+      return;
+
+    final File dir = new File(directory);
+    if (!dir.exists())
+      dir.mkdirs();
+
+    if (!dir.isDirectory()) {
+      MessageDialog.openError(null, "Download to local file system",
+          "Invalid directory location: \"" + dir + "\"");
+      return;
+    }
+
+    final List<DFSPath> paths = filterSelection(DFSPath.class, selection);
+
+    PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
+        new IRunnableWithProgress() {
+          public void run(IProgressMonitor monitor)
+              throws InvocationTargetException {
+
+            int work = 0;
+            for (DFSPath path : paths)
+              work += path.computeDownloadWork();
+
+            monitor
+                .beginTask("Downloading files to local file system", work);
+
+            for (DFSPath path : paths) {
+              if (monitor.isCanceled())
+                return;
+              try {
+                path.downloadToLocalDirectory(monitor, dir);
+              } catch (Exception e) {
+                // nothing we want to do here
+                e.printStackTrace();
+              }
+            }
+
+            monitor.done();
+          }
+        });
+  }
+
+  /**
+   * Open the selected DfsPath in the editor window
+   * 
+   * @param selection
+   * @throws JSchException
+   * @throws IOException
+   * @throws PartInitException
+   * @throws InvocationTargetException
+   * @throws InterruptedException
+   */
+  private void open(IStructuredSelection selection) throws IOException,
+      PartInitException, InvocationTargetException, InterruptedException {
+
+    for (DFSFile file : filterSelection(DFSFile.class, selection)) {
+
+      IStorageEditorInput editorInput = new DFSFileEditorInput(file);
+      targetPart.getSite().getWorkbenchWindow().getActivePage().openEditor(
+          editorInput, "org.eclipse.ui.DefaultTextEditor");
+    }
+  }
+
+  /**
+   * @param selection
+   * @throws JSchException
+   */
+  private void refresh(IStructuredSelection selection) {
+    for (DFSPath path : filterSelection(DFSPath.class, selection))
+      path.refresh();
+
+  }
+
+  private void delete(IStructuredSelection selection) {
+    List<DFSPath> list = filterSelection(DFSPath.class, selection);
+    if (list.isEmpty())
+      return;
+
+    StringBuffer msg = new StringBuffer();
+    msg.append("Are you sure you want to delete "
+        + "the following files from the distributed file system?\n");
+    for (DFSPath path : list)
+      msg.append(path.getPath()).append("\n");
+
+    if (MessageDialog.openConfirm(null, "Confirm Delete from DFS", msg
+        .toString())) {
+
+      Set<DFSPath> toRefresh = new HashSet<DFSPath>();
+      for (DFSPath path : list) {
+        path.delete();
+        toRefresh.add(path.getParent());
+      }
+
+      for (DFSPath path : toRefresh) {
+        path.refresh();
+      }
+    }
+  }
+
+  /* @inheritDoc */
+  public void selectionChanged(IAction action, ISelection selection) {
+    this.selection = selection;
+  }
+
+  /**
+   * Extract the list of <T> from the structured selection
+   * 
+   * @param clazz the class T
+   * @param selection the structured selection
+   * @return the list of <T> it contains
+   */
+  private static <T> List<T> filterSelection(Class<T> clazz,
+      IStructuredSelection selection) {
+    List<T> list = new ArrayList<T>();
+    for (Object obj : selection.toList()) {
+      if (clazz.isAssignableFrom(obj.getClass())) {
+        list.add((T) obj);
+      }
+    }
+    return list;
+  }
+
+  private static int computeUploadWork(File file) {
+    if (file.isDirectory()) {
+      int contentWork = 1;
+      for (File child : file.listFiles())
+        contentWork += computeUploadWork(child);
+      return contentWork;
+
+    } else if (file.isFile()) {
+      return 1 + (int) (file.length() / 1024);
+
+    } else {
+      return 0;
+    }
+  }
+
+}
+
+/**
+ * Adapter to allow the viewing of a DfsFile in the Editor window
+ */
+class DFSFileEditorInput extends PlatformObject implements
+    IStorageEditorInput {
+
+  private DFSFile file;
+
+  /**
+   * Constructor
+   * 
+   * @param file
+   */
+  DFSFileEditorInput(DFSFile file) {
+    this.file = file;
+  }
+
+  /* @inheritDoc */
+  public String getToolTipText() {
+    return file.toDetailedString();
+  }
+
+  /* @inheritDoc */
+  public IPersistableElement getPersistable() {
+    return null;
+  }
+
+  /* @inheritDoc */
+  public String getName() {
+    return file.toString();
+  }
+
+  /* @inheritDoc */
+  public ImageDescriptor getImageDescriptor() {
+    return ImageLibrary.get("dfs.file.editor");
+  }
+
+  /* @inheritDoc */
+  public boolean exists() {
+    return true;
+  }
+
+  /* @inheritDoc */
+  public IStorage getStorage() throws CoreException {
+    return file.getIStorage();
+  }
+};

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditLocationAction.java
----------------------------------------------------------------------
diff --git a/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditLocationAction.java b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditLocationAction.java
new file mode 100644
index 0000000..cdfbe93
--- /dev/null
+++ b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditLocationAction.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.actions;
+
+import org.apache.hadoop.eclipse.ImageLibrary;
+import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.apache.hadoop.eclipse.servers.HadoopLocationWizard;
+import org.apache.hadoop.eclipse.view.servers.ServerView;
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.wizard.Wizard;
+import org.eclipse.jface.wizard.WizardDialog;
+
+/**
+ * Editing server properties action
+ */
+public class EditLocationAction extends Action {
+
+  private ServerView serverView;
+
+  public EditLocationAction(ServerView serverView) {
+    this.serverView = serverView;
+
+    setText("Edit Hadoop location...");
+    setImageDescriptor(ImageLibrary.get("server.view.action.location.edit"));
+  }
+
+  @Override
+  public void run() {
+
+    final HadoopServer server = serverView.getSelectedServer();
+    if (server == null)
+      return;
+
+    WizardDialog dialog = new WizardDialog(null, new Wizard() {
+      private HadoopLocationWizard page = new HadoopLocationWizard(server);
+
+      @Override
+      public void addPages() {
+        super.addPages();
+        setWindowTitle("Edit Hadoop location...");
+        addPage(page);
+      }
+
+      @Override
+      public boolean performFinish() {
+        page.performFinish();
+        return true;
+      }
+    });
+
+    dialog.create();
+    dialog.setBlockOnOpen(true);
+    dialog.open();
+
+    super.run();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewLocationAction.java
----------------------------------------------------------------------
diff --git a/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewLocationAction.java b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewLocationAction.java
new file mode 100644
index 0000000..5db0bc5
--- /dev/null
+++ b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewLocationAction.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.actions;
+
+import org.apache.hadoop.eclipse.ImageLibrary;
+import org.apache.hadoop.eclipse.servers.HadoopLocationWizard;
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.wizard.Wizard;
+import org.eclipse.jface.wizard.WizardDialog;
+
+
+/**
+ * Action corresponding to creating a new MapReduce Server.
+ */
+
+public class NewLocationAction extends Action {
+  public NewLocationAction() {
+    setText("New Hadoop location...");
+    setImageDescriptor(ImageLibrary.get("server.view.action.location.new"));
+  }
+
+  @Override
+  public void run() {
+    WizardDialog dialog = new WizardDialog(null, new Wizard() {
+      private HadoopLocationWizard page = new HadoopLocationWizard();
+
+      @Override
+      public void addPages() {
+        super.addPages();
+        setWindowTitle("New Hadoop location...");
+        addPage(page);
+      }
+
+      @Override
+      public boolean performFinish() {
+        page.performFinish();
+        return true;
+      }
+
+    });
+
+    dialog.create();
+    dialog.setBlockOnOpen(true);
+    dialog.open();
+
+    super.run();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRClassWizardAction.java
----------------------------------------------------------------------
diff --git a/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRClassWizardAction.java b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRClassWizardAction.java
new file mode 100644
index 0000000..cc1f9ec
--- /dev/null
+++ b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRClassWizardAction.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.actions;
+
+import java.util.logging.Logger;
+
+import org.apache.hadoop.eclipse.NewDriverWizard;
+import org.apache.hadoop.eclipse.NewMapperWizard;
+import org.apache.hadoop.eclipse.NewReducerWizard;
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.viewers.StructuredSelection;
+import org.eclipse.jface.window.Window;
+import org.eclipse.jface.wizard.WizardDialog;
+import org.eclipse.ui.INewWizard;
+import org.eclipse.ui.IWorkbench;
+import org.eclipse.ui.PlatformUI;
+import org.eclipse.ui.cheatsheets.ICheatSheetAction;
+import org.eclipse.ui.cheatsheets.ICheatSheetManager;
+
+
+/**
+ * Action to open a new MapReduce Class.
+ */
+
+public class OpenNewMRClassWizardAction extends Action implements
+    ICheatSheetAction {
+
+  static Logger log = Logger.getLogger(OpenNewMRClassWizardAction.class
+      .getName());
+
+  public void run(String[] params, ICheatSheetManager manager) {
+
+    if ((params != null) && (params.length > 0)) {
+      IWorkbench workbench = PlatformUI.getWorkbench();
+      INewWizard wizard = getWizard(params[0]);
+      wizard.init(workbench, new StructuredSelection());
+      WizardDialog dialog = new WizardDialog(PlatformUI.getWorkbench()
+          .getActiveWorkbenchWindow().getShell(), wizard);
+      dialog.create();
+      dialog.open();
+
+      // did the wizard succeed ?
+      notifyResult(dialog.getReturnCode() == Window.OK);
+    }
+  }
+
+  private INewWizard getWizard(String typeName) {
+    if (typeName.equals("Mapper")) {
+      return new NewMapperWizard();
+    } else if (typeName.equals("Reducer")) {
+      return new NewReducerWizard();
+    } else if (typeName.equals("Driver")) {
+      return new NewDriverWizard();
+    } else {
+      log.severe("Invalid Wizard requested");
+      return null;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java
----------------------------------------------------------------------
diff --git a/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java
new file mode 100644
index 0000000..c7fde10
--- /dev/null
+++ b/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.actions;
+
+import org.apache.hadoop.eclipse.NewMapReduceProjectWizard;
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.viewers.StructuredSelection;
+import org.eclipse.jface.window.Window;
+import org.eclipse.jface.wizard.WizardDialog;
+import org.eclipse.swt.widgets.Shell;
+import org.eclipse.ui.IWorkbench;
+import org.eclipse.ui.PlatformUI;
+
+/**
+ * Action to open a new Map/Reduce project.
+ */
+
+public class OpenNewMRProjectAction extends Action {
+
+  @Override
+  public void run() {
+    IWorkbench workbench = PlatformUI.getWorkbench();
+    Shell shell = workbench.getActiveWorkbenchWindow().getShell();
+    NewMapReduceProjectWizard wizard = new NewMapReduceProjectWizard();
+    wizard.init(workbench, new StructuredSelection());
+    WizardDialog dialog = new WizardDialog(shell, wizard);
+    dialog.create();
+    dialog.open();
+    // did the wizard succeed?
+    notifyResult(dialog.getReturnCode() == Window.OK);
+  }
+}