You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hdt.apache.org by rs...@apache.org on 2014/06/26 10:36:27 UTC

[04/27] HDT-41: Provide existing MR functionality - ported Mapper/Reducer/Partioner/Driver Wizards - ported Image lookup - ported Map-reduce project wizard - using runtimes from specified hadoop location rather as runtime jars packed in plugin - ported '

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/29467b54/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/HadoopLocationWizard.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/HadoopLocationWizard.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/HadoopLocationWizard.java
new file mode 100644
index 0000000..4f11128
--- /dev/null
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/HadoopLocationWizard.java
@@ -0,0 +1,925 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.ui.internal.launch;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.TreeMap;
+
+import org.apache.hdt.core.launch.ConfProp;
+import org.apache.hdt.core.launch.AbstractHadoopCluster;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.jface.dialogs.IMessageProvider;
+import org.eclipse.jface.wizard.WizardPage;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.custom.ScrolledComposite;
+import org.eclipse.swt.events.ModifyEvent;
+import org.eclipse.swt.events.ModifyListener;
+import org.eclipse.swt.events.SelectionEvent;
+import org.eclipse.swt.events.SelectionListener;
+import org.eclipse.swt.graphics.Image;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.Control;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.swt.widgets.Event;
+import org.eclipse.swt.widgets.Group;
+import org.eclipse.swt.widgets.Label;
+import org.eclipse.swt.widgets.Listener;
+import org.eclipse.swt.widgets.TabFolder;
+import org.eclipse.swt.widgets.TabItem;
+import org.eclipse.swt.widgets.Text;
+
+/**
+ * Wizard for editing the settings of a Hadoop location
+ * 
+ * The wizard contains 3 tabs: General, Tunneling and Advanced. It edits
+ * parameters of the location member which either a new location or a copy of an
+ * existing registered location.
+ */
+
+public class HadoopLocationWizard extends WizardPage {
+
+	Image circle;
+
+	/**
+	 * The location effectively edited by the wizard. This location is a copy or
+	 * a new one.
+	 */
+	private AbstractHadoopCluster location;
+
+	/**
+	 * The original location being edited by the wizard (null if we create a new
+	 * instance).
+	 */
+	private AbstractHadoopCluster original;
+
+	/**
+	 * New Hadoop location wizard
+	 */
+	public HadoopLocationWizard() {
+		super("Hadoop Server", "New Hadoop Location", null);
+
+		this.original = null;
+		try {
+			this.location = AbstractHadoopCluster.createCluster();
+		} catch (CoreException e) {
+			e.printStackTrace();
+		}
+		this.location.setLocationName("");
+	}
+
+	/**
+	 * Constructor to edit the parameters of an existing Hadoop server
+	 * 
+	 * @param server
+	 */
+	public HadoopLocationWizard(AbstractHadoopCluster server) {
+		super("Create a new Hadoop location", "Edit Hadoop Location", null);
+		this.original = server;
+		try {
+			this.location = AbstractHadoopCluster.createCluster(server);
+		} catch (CoreException e) {
+			e.printStackTrace();
+		}
+	}
+
+	/**
+	 * Performs any actions appropriate in response to the user having pressed
+	 * the Finish button, or refuse if finishing now is not permitted.
+	 * 
+	 * @return the created or updated Hadoop location
+	 */
+
+	public AbstractHadoopCluster performFinish() {
+		try {
+			if (this.original == null) {
+				// New location
+				Display.getDefault().syncExec(new Runnable() {
+					public void run() {
+						ServerRegistry.getInstance().addServer(HadoopLocationWizard.this.location);
+					}
+				});
+				return this.location;
+
+			} else {
+				// Update location
+				final String originalName = this.original.getLocationName();
+				this.original.load(this.location);
+
+				Display.getDefault().syncExec(new Runnable() {
+					public void run() {
+						ServerRegistry.getInstance().updateServer(originalName, HadoopLocationWizard.this.location);
+					}
+				});
+				return this.original;
+
+			}
+		} catch (Exception e) {
+			e.printStackTrace();
+			setMessage("Invalid server location values", IMessageProvider.ERROR);
+			return null;
+		}
+	}
+
+	/**
+	 * Validates the current Hadoop location settings (look for Hadoop
+	 * installation directory).
+	 * 
+	 */
+	private void testLocation() {
+		setMessage("Not implemented yet", IMessageProvider.WARNING);
+	}
+
+	/**
+	 * Location is not complete (and finish button not available) until a host
+	 * name is specified.
+	 * 
+	 * @inheritDoc
+	 */
+	@Override
+	public boolean isPageComplete() {
+
+		{
+			String locName = location.getConfProp(ConfProp.PI_LOCATION_NAME);
+			if ((locName == null) || (locName.length() == 0) || locName.contains("/")) {
+
+				setMessage("Bad location name: " + "the location name should not contain " + "any character prohibited in a file name.", WARNING);
+
+				return false;
+			}
+		}
+
+		{
+			String master = location.getConfProp(ConfProp.PI_JOB_TRACKER_HOST);
+			if ((master == null) || (master.length() == 0)) {
+
+				setMessage("Bad master host name: " + "the master host name refers to the machine " + "that runs the Job tracker.", WARNING);
+
+				return false;
+			}
+		}
+
+		{
+			String jobTracker = location.getConfProp(ConfProp.JOB_TRACKER_URI);
+			String[] strs = jobTracker.split(":");
+			boolean ok = (strs.length == 2);
+			if (ok) {
+				try {
+					int port = Integer.parseInt(strs[1]);
+					ok = (port >= 0) && (port < 65536);
+				} catch (NumberFormatException nfe) {
+					ok = false;
+				}
+			}
+			if (!ok) {
+				setMessage("The job tracker information (" + ConfProp.JOB_TRACKER_URI.name + ") is invalid. " + "This usually looks like \"host:port\"",
+						WARNING);
+				return false;
+			}
+		}
+
+		{
+			String fsDefaultURI = location.getConfProp(ConfProp.FS_DEFAULT_URI);
+			try {
+				URI uri = new URI(fsDefaultURI);
+			} catch (URISyntaxException e) {
+
+				setMessage("The default file system URI is invalid. " + "This usually looks like \"hdfs://host:port/\" " + "or \"file:///dir/\"", WARNING);
+			}
+		}
+
+		setMessage("Define the location of a Hadoop infrastructure " + "for running MapReduce applications.");
+		return true;
+	}
+
+	/**
+	 * Create the wizard
+	 */
+	/* @inheritDoc */
+	public void createControl(Composite parent) {
+		setTitle("Define Hadoop location");
+		setDescription("Define the location of a Hadoop infrastructure " + "for running MapReduce applications.");
+
+		Composite panel = new Composite(parent, SWT.FILL);
+		GridLayout glayout = new GridLayout(2, false);
+		panel.setLayout(glayout);
+
+		TabMediator mediator = new TabMediator(panel);
+		{
+			GridData gdata = new GridData(GridData.FILL_BOTH);
+			gdata.horizontalSpan = 2;
+			mediator.folder.setLayoutData(gdata);
+		}
+		this.setControl(panel /* mediator.folder */);
+		{
+			final Button btn = new Button(panel, SWT.NONE);
+			btn.setText("&Load from file");
+			btn.setEnabled(false);
+			btn.setToolTipText("Not yet implemented");
+			btn.addListener(SWT.Selection, new Listener() {
+				public void handleEvent(Event e) {
+					// TODO
+				}
+			});
+		}
+		{
+			final Button validate = new Button(panel, SWT.NONE);
+			validate.setText("&Validate location");
+			validate.setEnabled(false);
+			validate.setToolTipText("Not yet implemented");
+			validate.addListener(SWT.Selection, new Listener() {
+				public void handleEvent(Event e) {
+					testLocation();
+				}
+			});
+		}
+	}
+
+	private interface TabListener {
+		void notifyChange(ConfProp prop, String propValue);
+	}
+
+	/*
+	 * Mediator pattern to keep tabs synchronized with each other and with the
+	 * location state.
+	 */
+
+	private class TabMediator {
+		TabFolder folder;
+
+		private Set<TabListener> tabs = new HashSet<TabListener>();
+
+		TabMediator(Composite parent) {
+			folder = new TabFolder(parent, SWT.NONE);
+			tabs.add(new TabMain(this));
+			tabs.add(new TabAdvanced(this));
+		}
+
+		/**
+		 * Access to current configuration settings
+		 * 
+		 * @param propName
+		 *            the property name
+		 * @return the current property value
+		 */
+		String get(String propName) {
+			return location.getConfProp(propName);
+		}
+
+		String get(ConfProp prop) {
+			return location.getConfProp(prop);
+		}
+
+		/**
+		 * Implements change notifications from any tab: update the location
+		 * state and other tabs
+		 * 
+		 * @param source
+		 *            origin of the notification (one of the tree tabs)
+		 * @param propName
+		 *            modified property
+		 * @param propValue
+		 *            new value
+		 */
+		void notifyChange(TabListener source, final ConfProp prop, final String propValue) {
+			// Ignore notification when no change
+			String oldValue = location.getConfProp(prop);
+			if ((oldValue != null) && oldValue.equals(propValue))
+				return;
+
+			location.setConfProp(prop, propValue);
+			Display.getDefault().syncExec(new Runnable() {
+				public void run() {
+					getContainer().updateButtons();
+				}
+			});
+
+			this.fireChange(source, prop, propValue);
+
+			/*
+			 * Now we deal with dependencies between settings
+			 */
+			final String jobTrackerHost = location.getConfProp(ConfProp.PI_JOB_TRACKER_HOST);
+			final String jobTrackerPort = location.getConfProp(ConfProp.PI_JOB_TRACKER_PORT);
+			final String nameNodeHost = location.getConfProp(ConfProp.PI_NAME_NODE_HOST);
+			final String nameNodePort = location.getConfProp(ConfProp.PI_NAME_NODE_PORT);
+			final boolean colocate = location.getConfProp(ConfProp.PI_COLOCATE_MASTERS).equalsIgnoreCase("yes");
+			final String jobTrackerURI = location.getConfProp(ConfProp.JOB_TRACKER_URI);
+			final String fsDefaultURI = location.getConfProp(ConfProp.FS_DEFAULT_URI);
+			final String socksServerURI = location.getConfProp(ConfProp.SOCKS_SERVER);
+			final boolean socksProxyEnable = location.getConfProp(ConfProp.PI_SOCKS_PROXY_ENABLE).equalsIgnoreCase("yes");
+			final String socksProxyHost = location.getConfProp(ConfProp.PI_SOCKS_PROXY_HOST);
+			final String socksProxyPort = location.getConfProp(ConfProp.PI_SOCKS_PROXY_PORT);
+
+			Display.getDefault().syncExec(new Runnable() {
+				public void run() {
+					switch (prop) {
+					case PI_JOB_TRACKER_HOST: {
+						if (colocate)
+							notifyChange(null, ConfProp.PI_NAME_NODE_HOST, jobTrackerHost);
+						String newJobTrackerURI = String.format("%s:%s", jobTrackerHost, jobTrackerPort);
+						notifyChange(null, ConfProp.JOB_TRACKER_URI, newJobTrackerURI);
+						break;
+					}
+					case PI_JOB_TRACKER_PORT: {
+						String newJobTrackerURI = String.format("%s:%s", jobTrackerHost, jobTrackerPort);
+						notifyChange(null, ConfProp.JOB_TRACKER_URI, newJobTrackerURI);
+						break;
+					}
+					case PI_NAME_NODE_HOST: {
+						String newHDFSURI = String.format("hdfs://%s:%s/", nameNodeHost, nameNodePort);
+						notifyChange(null, ConfProp.FS_DEFAULT_URI, newHDFSURI);
+
+						// Break colocation if someone force the DFS Master
+						if (!colocate && !nameNodeHost.equals(jobTrackerHost))
+							notifyChange(null, ConfProp.PI_COLOCATE_MASTERS, "no");
+						break;
+					}
+					case PI_NAME_NODE_PORT: {
+						String newHDFSURI = String.format("hdfs://%s:%s/", nameNodeHost, nameNodePort);
+						notifyChange(null, ConfProp.FS_DEFAULT_URI, newHDFSURI);
+						break;
+					}
+					case PI_SOCKS_PROXY_HOST: {
+						String newSocksProxyURI = String.format("%s:%s", socksProxyHost, socksProxyPort);
+						notifyChange(null, ConfProp.SOCKS_SERVER, newSocksProxyURI);
+						break;
+					}
+					case PI_SOCKS_PROXY_PORT: {
+						String newSocksProxyURI = String.format("%s:%s", socksProxyHost, socksProxyPort);
+						notifyChange(null, ConfProp.SOCKS_SERVER, newSocksProxyURI);
+						break;
+					}
+					case JOB_TRACKER_URI: {
+						String[] strs = jobTrackerURI.split(":", 2);
+						String host = strs[0];
+						String port = (strs.length == 2) ? strs[1] : "";
+						notifyChange(null, ConfProp.PI_JOB_TRACKER_HOST, host);
+						notifyChange(null, ConfProp.PI_JOB_TRACKER_PORT, port);
+						break;
+					}
+					case FS_DEFAULT_URI: {
+						try {
+							URI uri = new URI(fsDefaultURI);
+							if (uri.getScheme().equals("hdfs")) {
+								String host = uri.getHost();
+								String port = Integer.toString(uri.getPort());
+								notifyChange(null, ConfProp.PI_NAME_NODE_HOST, host);
+								notifyChange(null, ConfProp.PI_NAME_NODE_PORT, port);
+							}
+						} catch (URISyntaxException use) {
+							// Ignore the update!
+						}
+						break;
+					}
+					case SOCKS_SERVER: {
+						String[] strs = socksServerURI.split(":", 2);
+						String host = strs[0];
+						String port = (strs.length == 2) ? strs[1] : "";
+						notifyChange(null, ConfProp.PI_SOCKS_PROXY_HOST, host);
+						notifyChange(null, ConfProp.PI_SOCKS_PROXY_PORT, port);
+						break;
+					}
+					case PI_COLOCATE_MASTERS: {
+						if (colocate)
+							notifyChange(null, ConfProp.PI_NAME_NODE_HOST, jobTrackerHost);
+						break;
+					}
+					case PI_SOCKS_PROXY_ENABLE: {
+						if (socksProxyEnable) {
+							notifyChange(null, ConfProp.SOCKET_FACTORY_DEFAULT, "org.apache.hadoop.net.SocksSocketFactory");
+						} else {
+							notifyChange(null, ConfProp.SOCKET_FACTORY_DEFAULT, "org.apache.hadoop.net.StandardSocketFactory");
+						}
+						break;
+					}
+					}
+				}
+			});
+
+		}
+
+		/**
+		 * Change notifications on properties (by name). A property might not be
+		 * reflected as a ConfProp enum. If it is, the notification is forwarded
+		 * to the ConfProp notifyChange method. If not, it is processed here.
+		 * 
+		 * @param source
+		 * @param propName
+		 * @param propValue
+		 */
+		void notifyChange(TabListener source, String propName, String propValue) {
+
+			ConfProp prop = ConfProp.getByName(propName);
+			if (prop != null)
+				notifyChange(source, prop, propValue);
+
+			location.setConfProp(propName, propValue);
+		}
+
+		/**
+		 * Broadcast a property change to all registered tabs. If a tab is
+		 * identified as the source of the change, this tab will not be
+		 * notified.
+		 * 
+		 * @param source
+		 *            TODO
+		 * @param prop
+		 * @param value
+		 */
+		private void fireChange(TabListener source, ConfProp prop, String value) {
+			for (TabListener tab : tabs) {
+				if (tab != source)
+					tab.notifyChange(prop, value);
+			}
+		}
+
+	}
+
+	/**
+	 * Create a SWT Text component for the given {@link ConfProp} text
+	 * configuration property.
+	 * 
+	 * @param listener
+	 * @param parent
+	 * @param prop
+	 * @return
+	 */
+	private Text createConfText(ModifyListener listener, Composite parent, ConfProp prop) {
+
+		Text text = new Text(parent, SWT.SINGLE | SWT.BORDER);
+		GridData data = new GridData(GridData.FILL_HORIZONTAL);
+		text.setLayoutData(data);
+		text.setData("hProp", prop);
+		text.setText(location.getConfProp(prop));
+		text.addModifyListener(listener);
+
+		return text;
+	}
+
+	/**
+	 * Create a SWT Checked Button component for the given {@link ConfProp}
+	 * boolean configuration property.
+	 * 
+	 * @param listener
+	 * @param parent
+	 * @param prop
+	 * @return
+	 */
+	private Button createConfCheckButton(SelectionListener listener, Composite parent, ConfProp prop, String text) {
+
+		Button button = new Button(parent, SWT.CHECK);
+		button.setText(text);
+		button.setData("hProp", prop);
+		button.setSelection(location.getConfProp(prop).equalsIgnoreCase("yes"));
+		button.addSelectionListener(listener);
+
+		return button;
+	}
+
+	/**
+	 * Create editor entry for the given configuration property. The editor is a
+	 * couple (Label, Text).
+	 * 
+	 * @param listener
+	 *            the listener to trigger on property change
+	 * @param parent
+	 *            the SWT parent container
+	 * @param prop
+	 *            the property to create an editor for
+	 * @param labelText
+	 *            a label (null will defaults to the property name)
+	 * 
+	 * @return a SWT Text field
+	 */
+	private Text createConfLabelText(ModifyListener listener, Composite parent, ConfProp prop, String labelText) {
+
+		Label label = new Label(parent, SWT.NONE);
+		if (labelText == null)
+			labelText = prop.name;
+		label.setText(labelText);
+
+		return createConfText(listener, parent, prop);
+	}
+
+	/**
+	 * Create an editor entry for the given configuration name
+	 * 
+	 * @param listener
+	 *            the listener to trigger on property change
+	 * @param parent
+	 *            the SWT parent container
+	 * @param propName
+	 *            the name of the property to create an editor for
+	 * @param labelText
+	 *            a label (null will defaults to the property name)
+	 * 
+	 * @return a SWT Text field
+	 */
+	private Text createConfNameEditor(ModifyListener listener, Composite parent, String propName, String labelText) {
+
+		{
+			ConfProp prop = ConfProp.getByName(propName);
+			if (prop != null)
+				return createConfLabelText(listener, parent, prop, labelText);
+		}
+
+		Label label = new Label(parent, SWT.NONE);
+		if (labelText == null)
+			labelText = propName;
+		label.setText(labelText);
+
+		Text text = new Text(parent, SWT.SINGLE | SWT.BORDER);
+		GridData data = new GridData(GridData.FILL_HORIZONTAL);
+		text.setLayoutData(data);
+		text.setData("hPropName", propName);
+		text.setText(location.getConfProp(propName));
+		text.addModifyListener(listener);
+
+		return text;
+	}
+
+	/**
+	 * Main parameters of the Hadoop location: <li>host and port of the
+	 * Map/Reduce master (Job tracker) <li>host and port of the DFS master (Name
+	 * node) <li>SOCKS proxy
+	 */
+	private class TabMain implements TabListener, ModifyListener, SelectionListener {
+
+		TabMediator mediator;
+
+		Text locationName;
+
+		Text textJTHost;
+
+		Text textNNHost;
+
+		Button colocateMasters;
+
+		Text textJTPort;
+
+		Text textNNPort;
+
+		Text userName;
+
+		Button useSocksProxy;
+
+		Text socksProxyHost;
+
+		Text socksProxyPort;
+
+		TabMain(TabMediator mediator) {
+			this.mediator = mediator;
+			TabItem tab = new TabItem(mediator.folder, SWT.NONE);
+			tab.setText("General");
+			tab.setToolTipText("General location parameters");
+			tab.setImage(circle);
+			tab.setControl(createControl(mediator.folder));
+		}
+
+		private Control createControl(Composite parent) {
+
+			Composite panel = new Composite(parent, SWT.FILL);
+			panel.setLayout(new GridLayout(2, false));
+
+			GridData data;
+
+			/*
+			 * Location name
+			 */
+			{
+				Composite subpanel = new Composite(panel, SWT.FILL);
+				subpanel.setLayout(new GridLayout(2, false));
+				data = new GridData();
+				data.horizontalSpan = 2;
+				data.horizontalAlignment = SWT.FILL;
+				subpanel.setLayoutData(data);
+
+				locationName = createConfLabelText(this, subpanel, ConfProp.PI_LOCATION_NAME, "&Location name:");
+			}
+
+			/*
+			 * Map/Reduce group
+			 */
+			{
+				Group groupMR = new Group(panel, SWT.SHADOW_NONE);
+				groupMR.setText("Map/Reduce Master");
+				groupMR.setToolTipText("Address of the Map/Reduce master node " + "(the Job Tracker).");
+				GridLayout layout = new GridLayout(2, false);
+				groupMR.setLayout(layout);
+				data = new GridData();
+				data.verticalAlignment = SWT.FILL;
+				data.horizontalAlignment = SWT.CENTER;
+				data.widthHint = 250;
+				groupMR.setLayoutData(data);
+
+				// Job Tracker host
+				Label label = new Label(groupMR, SWT.NONE);
+				label.setText("Host:");
+				data = new GridData(GridData.BEGINNING, GridData.CENTER, false, true);
+				label.setLayoutData(data);
+
+				textJTHost = createConfText(this, groupMR, ConfProp.PI_JOB_TRACKER_HOST);
+				data = new GridData(GridData.FILL, GridData.CENTER, true, true);
+				textJTHost.setLayoutData(data);
+
+				// Job Tracker port
+				label = new Label(groupMR, SWT.NONE);
+				label.setText("Port:");
+				data = new GridData(GridData.BEGINNING, GridData.CENTER, false, true);
+				label.setLayoutData(data);
+
+				textJTPort = createConfText(this, groupMR, ConfProp.PI_JOB_TRACKER_PORT);
+				data = new GridData(GridData.FILL, GridData.CENTER, true, true);
+				textJTPort.setLayoutData(data);
+			}
+
+			/*
+			 * DFS group
+			 */
+			{
+				Group groupDFS = new Group(panel, SWT.SHADOW_NONE);
+				groupDFS.setText("DFS Master");
+				groupDFS.setToolTipText("Address of the Distributed FileSystem " + "master node (the Name Node).");
+				GridLayout layout = new GridLayout(2, false);
+				groupDFS.setLayout(layout);
+				data = new GridData();
+				data.horizontalAlignment = SWT.CENTER;
+				data.widthHint = 250;
+				groupDFS.setLayoutData(data);
+
+				colocateMasters = createConfCheckButton(this, groupDFS, ConfProp.PI_COLOCATE_MASTERS, "Use M/R Master host");
+				data = new GridData();
+				data.horizontalSpan = 2;
+				colocateMasters.setLayoutData(data);
+
+				// Job Tracker host
+				Label label = new Label(groupDFS, SWT.NONE);
+				data = new GridData();
+				label.setText("Host:");
+				label.setLayoutData(data);
+
+				textNNHost = createConfText(this, groupDFS, ConfProp.PI_NAME_NODE_HOST);
+
+				// Job Tracker port
+				label = new Label(groupDFS, SWT.NONE);
+				data = new GridData();
+				label.setText("Port:");
+				label.setLayoutData(data);
+
+				textNNPort = createConfText(this, groupDFS, ConfProp.PI_NAME_NODE_PORT);
+			}
+
+			{
+				Composite subpanel = new Composite(panel, SWT.FILL);
+				subpanel.setLayout(new GridLayout(2, false));
+				data = new GridData();
+				data.horizontalSpan = 2;
+				data.horizontalAlignment = SWT.FILL;
+				subpanel.setLayoutData(data);
+
+				userName = createConfLabelText(this, subpanel, ConfProp.PI_USER_NAME, "&User name:");
+			}
+
+			// SOCKS proxy group
+			{
+				Group groupSOCKS = new Group(panel, SWT.SHADOW_NONE);
+				groupSOCKS.setText("SOCKS proxy");
+				groupSOCKS.setToolTipText("Address of the SOCKS proxy to use " + "to connect to the infrastructure.");
+				GridLayout layout = new GridLayout(2, false);
+				groupSOCKS.setLayout(layout);
+				data = new GridData();
+				data.horizontalAlignment = SWT.CENTER;
+				data.horizontalSpan = 2;
+				data.widthHint = 250;
+				groupSOCKS.setLayoutData(data);
+
+				useSocksProxy = createConfCheckButton(this, groupSOCKS, ConfProp.PI_SOCKS_PROXY_ENABLE, "Enable SOCKS proxy");
+				data = new GridData();
+				data.horizontalSpan = 2;
+				useSocksProxy.setLayoutData(data);
+
+				// SOCKS proxy host
+				Label label = new Label(groupSOCKS, SWT.NONE);
+				data = new GridData();
+				label.setText("Host:");
+				label.setLayoutData(data);
+
+				socksProxyHost = createConfText(this, groupSOCKS, ConfProp.PI_SOCKS_PROXY_HOST);
+
+				// SOCKS proxy port
+				label = new Label(groupSOCKS, SWT.NONE);
+				data = new GridData();
+				label.setText("Port:");
+				label.setLayoutData(data);
+
+				socksProxyPort = createConfText(this, groupSOCKS, ConfProp.PI_SOCKS_PROXY_PORT);
+			}
+
+			// Update the state of all widgets according to the current values!
+			reloadConfProp(ConfProp.PI_COLOCATE_MASTERS);
+			reloadConfProp(ConfProp.PI_SOCKS_PROXY_ENABLE);
+			reloadConfProp(ConfProp.PI_JOB_TRACKER_HOST);
+
+			return panel;
+		}
+
+		/**
+		 * Reload the given configuration property value
+		 * 
+		 * @param prop
+		 */
+		private void reloadConfProp(ConfProp prop) {
+			this.notifyChange(prop, location.getConfProp(prop));
+		}
+
+		public void notifyChange(ConfProp prop, String propValue) {
+			switch (prop) {
+			case PI_JOB_TRACKER_HOST: {
+				textJTHost.setText(propValue);
+				break;
+			}
+			case PI_JOB_TRACKER_PORT: {
+				textJTPort.setText(propValue);
+				break;
+			}
+			case PI_LOCATION_NAME: {
+				locationName.setText(propValue);
+				break;
+			}
+			case PI_USER_NAME: {
+				userName.setText(propValue);
+				break;
+			}
+			case PI_COLOCATE_MASTERS: {
+				if (colocateMasters != null) {
+					boolean colocate = propValue.equalsIgnoreCase("yes");
+					colocateMasters.setSelection(colocate);
+					if (textNNHost != null) {
+						textNNHost.setEnabled(!colocate);
+					}
+				}
+				break;
+			}
+			case PI_NAME_NODE_HOST: {
+				textNNHost.setText(propValue);
+				break;
+			}
+			case PI_NAME_NODE_PORT: {
+				textNNPort.setText(propValue);
+				break;
+			}
+			case PI_SOCKS_PROXY_ENABLE: {
+				if (useSocksProxy != null) {
+					boolean useProxy = propValue.equalsIgnoreCase("yes");
+					useSocksProxy.setSelection(useProxy);
+					if (socksProxyHost != null)
+						socksProxyHost.setEnabled(useProxy);
+					if (socksProxyPort != null)
+						socksProxyPort.setEnabled(useProxy);
+				}
+				break;
+			}
+			case PI_SOCKS_PROXY_HOST: {
+				socksProxyHost.setText(propValue);
+				break;
+			}
+			case PI_SOCKS_PROXY_PORT: {
+				socksProxyPort.setText(propValue);
+				break;
+			}
+			}
+		}
+
+		/* @inheritDoc */
+		public void modifyText(ModifyEvent e) {
+			final Text text = (Text) e.widget;
+			final ConfProp prop = (ConfProp) text.getData("hProp");
+			Display.getDefault().syncExec(new Runnable() {
+				public void run() {
+					mediator.notifyChange(TabMain.this, prop, text.getText());
+				}
+			});
+		}
+
+		/* @inheritDoc */
+		public void widgetDefaultSelected(SelectionEvent e) {
+			this.widgetSelected(e);
+		}
+
+		/* @inheritDoc */
+		public void widgetSelected(SelectionEvent e) {
+			final Button button = (Button) e.widget;
+			final ConfProp prop = (ConfProp) button.getData("hProp");
+
+			Display.getDefault().syncExec(new Runnable() {
+				public void run() {
+					// We want to receive the update also!
+					mediator.notifyChange(null, prop, button.getSelection() ? "yes" : "no");
+				}
+			});
+		}
+
+	}
+
+	private class TabAdvanced implements TabListener, ModifyListener {
+		TabMediator mediator;
+
+		private Composite panel;
+
+		private Map<String, Text> textMap = new TreeMap<String, Text>();
+
+		TabAdvanced(TabMediator mediator) {
+			this.mediator = mediator;
+			TabItem tab = new TabItem(mediator.folder, SWT.NONE);
+			tab.setText("Advanced parameters");
+			tab.setToolTipText("Access to advanced Hadoop parameters");
+			tab.setImage(circle);
+			tab.setControl(createControl(mediator.folder));
+
+		}
+
+		private Control createControl(Composite parent) {
+			ScrolledComposite sc = new ScrolledComposite(parent, SWT.BORDER | SWT.H_SCROLL | SWT.V_SCROLL);
+
+			panel = new Composite(sc, SWT.NONE);
+			sc.setContent(panel);
+
+			sc.setExpandHorizontal(true);
+			sc.setExpandVertical(true);
+
+			sc.setMinSize(640, 480);
+
+			GridLayout layout = new GridLayout();
+			layout.numColumns = 2;
+			layout.makeColumnsEqualWidth = false;
+			panel.setLayout(layout);
+			panel.setLayoutData(new GridData(GridData.FILL, GridData.FILL, true, true, 1, 1));
+
+			// Sort by property name
+			SortedMap<String, String> map = new TreeMap<String, String>();
+			Iterator<Entry<String, String>> it = location.getConfiguration();
+			while (it.hasNext()) {
+				Entry<String, String> entry = it.next();
+				map.put(entry.getKey(), entry.getValue());
+			}
+
+			for (Entry<String, String> entry : map.entrySet()) {
+				Text text = createConfNameEditor(this, panel, entry.getKey(), null);
+				textMap.put(entry.getKey(), text);
+			}
+
+			sc.setMinSize(panel.computeSize(SWT.DEFAULT, SWT.DEFAULT));
+
+			return sc;
+		}
+
+		public void notifyChange(ConfProp prop, final String propValue) {
+			Text text = textMap.get(prop.name);
+			text.setText(propValue);
+		}
+
+		public void modifyText(ModifyEvent e) {
+			final Text text = (Text) e.widget;
+			Object hProp = text.getData("hProp");
+			final ConfProp prop = (hProp != null) ? (ConfProp) hProp : null;
+			Object hPropName = text.getData("hPropName");
+			final String propName = (hPropName != null) ? (String) hPropName : null;
+
+			Display.getDefault().syncExec(new Runnable() {
+				public void run() {
+					if (prop != null)
+						mediator.notifyChange(TabAdvanced.this, prop, text.getText());
+					else
+						mediator.notifyChange(TabAdvanced.this, propName, text.getText());
+				}
+			});
+		}
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/29467b54/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/HadoopServerSelectionListContentProvider.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/HadoopServerSelectionListContentProvider.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/HadoopServerSelectionListContentProvider.java
new file mode 100644
index 0000000..1f854d0
--- /dev/null
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/HadoopServerSelectionListContentProvider.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.ui.internal.launch;
+
+import org.apache.hdt.core.launch.AbstractHadoopCluster;
+import org.eclipse.jface.viewers.IContentProvider;
+import org.eclipse.jface.viewers.ILabelProviderListener;
+import org.eclipse.jface.viewers.IStructuredContentProvider;
+import org.eclipse.jface.viewers.ITableLabelProvider;
+import org.eclipse.jface.viewers.Viewer;
+import org.eclipse.swt.graphics.Image;
+
+/**
+ * Provider that enables selection of a predefined Hadoop server.
+ */
+
+public class HadoopServerSelectionListContentProvider implements
+    IContentProvider, ITableLabelProvider, IStructuredContentProvider {
+  public void dispose() {
+
+  }
+
+  public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
+
+  }
+
+  public Image getColumnImage(Object element, int columnIndex) {
+    return null;
+  }
+
+  public String getColumnText(Object element, int columnIndex) {
+    if (element instanceof AbstractHadoopCluster) {
+    	AbstractHadoopCluster location = (AbstractHadoopCluster) element;
+      if (columnIndex == 0) {
+        return location.getLocationName();
+
+      } else if (columnIndex == 1) {
+        return location.getMasterHostName();
+      }
+    }
+
+    return element.toString();
+  }
+
+  public void addListener(ILabelProviderListener listener) {
+
+  }
+
+  public boolean isLabelProperty(Object element, String property) {
+    return false;
+  }
+
+  public void removeListener(ILabelProviderListener listener) {
+
+  }
+
+  public Object[] getElements(Object inputElement) {
+    return ServerRegistry.getInstance().getServers().toArray();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/29467b54/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/JarModule.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/JarModule.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/JarModule.java
new file mode 100644
index 0000000..a494baa
--- /dev/null
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/JarModule.java
@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.ui.internal.launch;
+
+import java.io.File;
+import java.util.logging.Logger;
+
+import org.apache.hdt.core.launch.ErrorMessageDialog;
+import org.apache.hdt.core.launch.IJarModule;
+import org.apache.hdt.ui.Activator;
+import org.eclipse.core.resources.IResource;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jdt.core.ICompilationUnit;
+import org.eclipse.jdt.core.IJavaElement;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.ui.jarpackager.IJarExportRunnable;
+import org.eclipse.jdt.ui.jarpackager.JarPackageData;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.ui.PlatformUI;
+
+/**
+ * Methods for interacting with the jar file containing the
+ * Mapper/Reducer/Driver classes for a MapReduce job.
+ */
+
+public class JarModule implements IJarModule {
+
+  static Logger log = Logger.getLogger(JarModule.class.getName());
+
+  private IResource resource;
+
+  private File jarFile;
+
+  public JarModule(IResource resource) {
+    this.resource = resource;
+  }
+
+  public String getName() {
+    return resource.getProject().getName() + "/" + resource.getName();
+  }
+
+  /**
+   * Creates a JAR file containing the given resource (Java class with
+   * main()) and all associated resources
+   * 
+   * @param resource the resource
+   * @return a file designing the created package
+   */
+  public void run(IProgressMonitor monitor) {
+
+    log.fine("Build jar");
+    JarPackageData jarrer = new JarPackageData();
+
+    jarrer.setExportJavaFiles(true);
+    jarrer.setExportClassFiles(true);
+    jarrer.setExportOutputFolders(true);
+    jarrer.setOverwrite(true);
+
+    try {
+      // IJavaProject project =
+      // (IJavaProject) resource.getProject().getNature(JavaCore.NATURE_ID);
+
+      // check this is the case before letting this method get called
+      Object element = resource.getAdapter(IJavaElement.class);
+      IType type = ((ICompilationUnit) element).findPrimaryType();
+      jarrer.setManifestMainClass(type);
+
+      // Create a temporary JAR file name
+      File baseDir = Activator.getDefault().getStateLocation().toFile();
+
+      String prefix =
+          String.format("%s_%s-", resource.getProject().getName(), resource
+              .getName());
+      File jarFile = File.createTempFile(prefix, ".jar", baseDir);
+      jarrer.setJarLocation(new Path(jarFile.getAbsolutePath()));
+
+      jarrer.setElements(resource.getProject().members(IResource.FILE));
+      IJarExportRunnable runnable =
+          jarrer.createJarExportRunnable(Display.getDefault()
+              .getActiveShell());
+      runnable.run(monitor);
+
+      this.jarFile = jarFile;
+
+    } catch (Exception e) {
+      e.printStackTrace();
+      throw new RuntimeException(e);
+    }
+  }
+
+  /**
+   * Allow the retrieval of the resulting JAR file
+   * 
+   * @return the generated JAR file
+   */
+  public File getJarFile() {
+    return this.jarFile;
+  }
+
+  /**
+   * Static way to create a JAR package for the given resource and showing a
+   * progress bar
+   * 
+   * @param resource
+   * @return
+   */
+  public static File createJarPackage(IResource resource) {
+
+    JarModule jarModule = new JarModule(resource);
+    try {
+      PlatformUI.getWorkbench().getProgressService().run(false, true,
+          jarModule);
+
+    } catch (Exception e) {
+      e.printStackTrace();
+      return null;
+    }
+
+    File jarFile = jarModule.getJarFile();
+    if (jarFile == null) {
+      ErrorMessageDialog.display("Run on Hadoop",
+          "Unable to create or locate the JAR file for the Job");
+      return null;
+    }
+
+    return jarFile;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/29467b54/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/RunOnHadoopWizard.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/RunOnHadoopWizard.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/RunOnHadoopWizard.java
new file mode 100644
index 0000000..fd9f465
--- /dev/null
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/RunOnHadoopWizard.java
@@ -0,0 +1,346 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.ui.internal.launch;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hdt.core.launch.ErrorMessageDialog;
+import org.apache.hdt.core.launch.AbstractHadoopCluster;
+import org.apache.hdt.ui.Activator;
+import org.eclipse.core.resources.IFile;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IPath;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
+import org.eclipse.jdt.launching.IJavaLaunchConfigurationConstants;
+import org.eclipse.jdt.launching.IRuntimeClasspathEntry;
+import org.eclipse.jdt.launching.JavaRuntime;
+import org.eclipse.jface.viewers.TableViewer;
+import org.eclipse.jface.wizard.Wizard;
+import org.eclipse.jface.wizard.WizardPage;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.events.SelectionEvent;
+import org.eclipse.swt.events.SelectionListener;
+import org.eclipse.swt.layout.FillLayout;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.Label;
+import org.eclipse.swt.widgets.Table;
+import org.eclipse.swt.widgets.TableColumn;
+import org.eclipse.swt.widgets.Text;
+
+/**
+ * Wizard for publishing a job to a Hadoop server.
+ */
+
+public class RunOnHadoopWizard extends Wizard {
+
+	private MainWizardPage mainPage;
+
+	private HadoopLocationWizard createNewPage;
+
+	/**
+	 * The file resource (containing a main()) to run on the Hadoop location
+	 */
+	private IFile resource;
+
+	/**
+	 * The launch configuration to update
+	 */
+	private ILaunchConfigurationWorkingCopy iConf;
+
+	private IProgressMonitor progressMonitor;
+
+	public RunOnHadoopWizard(IFile resource, ILaunchConfigurationWorkingCopy iConf) {
+		this.resource = resource;
+		this.iConf = iConf;
+		setForcePreviousAndNextButtons(true);
+		setNeedsProgressMonitor(true);
+		setWindowTitle("Run on Hadoop");
+	}
+
+	/**
+	 * This wizard contains 2 pages: <li>the first one lets the user choose an
+	 * already existing location <li>the second one allows the user to create a
+	 * new location, in case it does not already exist
+	 */
+	/* @inheritDoc */
+	@Override
+	public void addPages() {
+		addPage(this.mainPage = new MainWizardPage());
+		addPage(this.createNewPage = new HadoopLocationWizard());
+	}
+
+	/**
+	 * Performs any actions appropriate in response to the user having pressed
+	 * the Finish button, or refuse if finishing now is not permitted.
+	 */
+	/* @inheritDoc */
+	@Override
+	public boolean performFinish() {
+
+		/*
+		 * Create a new location or get an existing one
+		 */
+		AbstractHadoopCluster location = null;
+		if (mainPage.createNew.getSelection()) {
+			location = createNewPage.performFinish();
+
+		} else if (mainPage.table.getSelection().length == 1) {
+			location = (AbstractHadoopCluster) mainPage.table.getSelection()[0].getData();
+		}
+
+		if (location == null)
+			return false;
+
+		/*
+		 * Get the base directory of the plug-in for storing configurations and
+		 * JARs
+		 */
+		File baseDir = Activator.getDefault().getStateLocation().toFile();
+
+		// Package the Job into a JAR
+		File jarFile = JarModule.createJarPackage(resource);
+		if (jarFile == null) {
+			ErrorMessageDialog.display("Run on Hadoop", "Unable to create or locate the JAR file for the Job");
+			return false;
+		}
+
+		/*
+		 * Generate a temporary Hadoop configuration directory and add it to the
+		 * classpath of the launch configuration
+		 */
+
+		File confDir;
+		try {
+			confDir = File.createTempFile("hadoop-conf-", "", baseDir);
+			confDir.delete();
+			confDir.mkdirs();
+			if (!confDir.isDirectory()) {
+				ErrorMessageDialog.display("Run on Hadoop", "Cannot create temporary directory: " + confDir);
+				return false;
+			}
+		} catch (IOException ioe) {
+			ioe.printStackTrace();
+			return false;
+		}
+		try {
+			location.saveConfiguration(confDir, jarFile.getAbsolutePath());
+		} catch (IOException ioe) {
+			ioe.printStackTrace();
+			return false;
+		}
+		// Setup the Launch class path
+		List<String> classPath;
+		try {
+			classPath = iConf.getAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH, new ArrayList());
+			IPath confIPath = new Path(confDir.getAbsolutePath());
+			IRuntimeClasspathEntry cpEntry = JavaRuntime.newArchiveRuntimeClasspathEntry(confIPath);
+			classPath.add(0, cpEntry.getMemento());
+			iConf.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH, classPath);
+			iConf.setAttribute(IJavaLaunchConfigurationConstants.ATTR_PROGRAM_ARGUMENTS, mainPage.argumentsText.getText());
+
+		} catch (CoreException e) {
+			e.printStackTrace();
+			return false;
+		}
+
+		// location.runResource(resource, progressMonitor);
+		return true;
+	}
+
+	private void refreshButtons() {
+		getContainer().updateButtons();
+	}
+
+	/**
+	 * Allows finish when an existing server is selected or when a new server
+	 * location is defined
+	 */
+	/* @inheritDoc */
+	@Override
+	public boolean canFinish() {
+		if (mainPage != null)
+			return mainPage.canFinish();
+		return false;
+	}
+
+	/**
+	 * This is the main page of the wizard. It allows the user either to choose
+	 * an already existing location or to indicate he wants to create a new
+	 * location.
+	 */
+	public class MainWizardPage extends WizardPage {
+
+		private Button createNew;
+
+		private Table table;
+		private Text argumentsText;
+
+		private Button chooseExisting;
+
+		public MainWizardPage() {
+			super("Select or define server to run on");
+			setTitle("Select Hadoop location");
+			setDescription("Select a Hadoop location to run on.");
+		}
+
+		/* @inheritDoc */
+		@Override
+		public boolean canFlipToNextPage() {
+			return createNew.getSelection();
+		}
+
+		/* @inheritDoc */
+		public void createControl(Composite parent) {
+			Composite panel = new Composite(parent, SWT.NONE);
+			panel.setLayout(new GridLayout(1, false));
+
+			// Label
+			Label label = new Label(panel, SWT.NONE);
+			label.setText("Select a Hadoop Server to run on.");
+			GridData gData = new GridData(GridData.FILL_BOTH);
+			gData.grabExcessVerticalSpace = false;
+			label.setLayoutData(gData);
+
+			// Create location button
+			createNew = new Button(panel, SWT.RADIO);
+			createNew.setText("Define a new Hadoop server location");
+			createNew.setLayoutData(gData);
+			createNew.addSelectionListener(new SelectionListener() {
+				public void widgetDefaultSelected(SelectionEvent e) {
+				}
+
+				public void widgetSelected(SelectionEvent e) {
+					setPageComplete(true);
+					RunOnHadoopWizard.this.refreshButtons();
+				}
+			});
+			createNew.setSelection(true);
+
+			// Select existing location button
+			chooseExisting = new Button(panel, SWT.RADIO);
+			chooseExisting.setText("Choose an existing server from the list below");
+			chooseExisting.setLayoutData(gData);
+			chooseExisting.addSelectionListener(new SelectionListener() {
+				public void widgetDefaultSelected(SelectionEvent e) {
+				}
+
+				public void widgetSelected(SelectionEvent e) {
+					if (chooseExisting.getSelection() && (table.getSelectionCount() == 0)) {
+						if (table.getItems().length > 0) {
+							table.setSelection(0);
+						}
+					}
+					RunOnHadoopWizard.this.refreshButtons();
+				}
+			});
+
+			// Table of existing locations
+			Composite serverListPanel = new Composite(panel, SWT.FILL);
+			gData = new GridData(GridData.FILL_BOTH);
+			gData.horizontalSpan = 1;
+			serverListPanel.setLayoutData(gData);
+
+			FillLayout layout = new FillLayout();
+			layout.marginHeight = layout.marginWidth = 12;
+			serverListPanel.setLayout(layout);
+
+			table = new Table(serverListPanel, SWT.BORDER | SWT.H_SCROLL | SWT.V_SCROLL | SWT.FULL_SELECTION);
+			table.setHeaderVisible(true);
+			table.setLinesVisible(true);
+
+			TableColumn nameColumn = new TableColumn(table, SWT.LEFT);
+			nameColumn.setText("Location");
+			nameColumn.setWidth(450);
+
+			TableColumn hostColumn = new TableColumn(table, SWT.LEFT);
+			hostColumn.setText("Master host name");
+			hostColumn.setWidth(250);
+
+			// If the user select one entry, switch to "chooseExisting"
+			table.addSelectionListener(new SelectionListener() {
+				public void widgetDefaultSelected(SelectionEvent e) {
+				}
+
+				public void widgetSelected(SelectionEvent e) {
+					chooseExisting.setSelection(true);
+					createNew.setSelection(false);
+					setPageComplete(table.getSelectionCount() == 1);
+					RunOnHadoopWizard.this.refreshButtons();
+				}
+			});
+
+			// Label
+			Label argumentsLabel = new Label(panel, SWT.NONE);
+			argumentsLabel.setText("Arguments:");
+			GridData gDataArgumentsLabel = new GridData(GridData.FILL_BOTH);
+			gDataArgumentsLabel.grabExcessVerticalSpace = false;
+			argumentsLabel.setLayoutData(gDataArgumentsLabel);
+
+			// Textbox
+			argumentsText = new Text(panel, SWT.NONE);
+			try {
+				argumentsText.setText(iConf.getAttribute(IJavaLaunchConfigurationConstants.ATTR_PROGRAM_ARGUMENTS, ""));
+			} catch (CoreException e1) {
+				e1.printStackTrace();
+			}
+			GridData gDataArgumentsText = new GridData(GridData.FILL_BOTH);
+			gDataArgumentsText.grabExcessVerticalSpace = false;
+			argumentsText.setLayoutData(gDataArgumentsText);
+
+			TableViewer viewer = new TableViewer(table);
+			HadoopServerSelectionListContentProvider provider = new HadoopServerSelectionListContentProvider();
+			viewer.setContentProvider(provider);
+			viewer.setLabelProvider(provider);
+			viewer.setInput(new Object());
+			// don't care, get from singleton server registry
+
+			this.setControl(panel);
+		}
+
+		/**
+		 * Returns whether this page state allows the Wizard to finish or not
+		 * 
+		 * @return can the wizard finish or not?
+		 */
+		public boolean canFinish() {
+			if (!isControlCreated())
+				return false;
+
+			if (this.createNew.getSelection())
+				return getNextPage().isPageComplete();
+
+			return this.chooseExisting.getSelection();
+		}
+	}
+
+	/**
+	 * @param progressMonitor
+	 */
+	public void setProgressMonitor(IProgressMonitor progressMonitor) {
+		this.progressMonitor = progressMonitor;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/29467b54/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/ServerRegistry.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/ServerRegistry.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/ServerRegistry.java
new file mode 100644
index 0000000..785286c
--- /dev/null
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/ServerRegistry.java
@@ -0,0 +1,200 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.ui.internal.launch;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+
+import javax.security.auth.login.Configuration;
+
+import org.apache.hdt.core.launch.AbstractHadoopCluster;
+import org.apache.hdt.core.launch.IHadoopClusterListener;
+import org.apache.hdt.ui.Activator;
+import org.eclipse.jface.dialogs.MessageDialog;
+
+/**
+ * Register of Hadoop locations.
+ * 
+ * Each location corresponds to a Hadoop {@link Configuration} stored as an XML
+ * file in the workspace plug-in configuration directory:
+ * <p>
+ * <tt>
+ * &lt;workspace-dir&gt;/.metadata/.plugins/org.apache.hadoop.eclipse/locations/*.xml
+ * </tt>
+ * 
+ */
+public class ServerRegistry {
+
+	private static final ServerRegistry INSTANCE = new ServerRegistry();
+
+	public static final int SERVER_ADDED = 0;
+
+	public static final int SERVER_REMOVED = 1;
+
+	public static final int SERVER_STATE_CHANGED = 2;
+
+	private final File baseDir = Activator.getDefault().getStateLocation().toFile();
+
+	private final File saveDir = new File(baseDir, "locations");
+
+	private ServerRegistry() {
+		if (saveDir.exists() && !saveDir.isDirectory())
+			saveDir.delete();
+		if (!saveDir.exists())
+			saveDir.mkdirs();
+
+		load();
+	}
+
+	private Map<String, AbstractHadoopCluster> servers;
+
+	private Set<IHadoopClusterListener> listeners = new HashSet<IHadoopClusterListener>();
+
+	public static ServerRegistry getInstance() {
+		return INSTANCE;
+	}
+
+	public synchronized Collection<AbstractHadoopCluster> getServers() {
+		return Collections.unmodifiableCollection(servers.values());
+	}
+
+	/**
+	 * Load all available locations from the workspace configuration directory.
+	 */
+	private synchronized void load() {
+		Map<String, AbstractHadoopCluster> map = new TreeMap<String, AbstractHadoopCluster>();
+		for (File file : saveDir.listFiles()) {
+			try {
+				AbstractHadoopCluster server = AbstractHadoopCluster.createCluster(file);
+				map.put(server.getLocationName(), server);
+
+			} catch (Exception exn) {
+				System.err.println(exn);
+			}
+		}
+		this.servers = map;
+	}
+
+	private synchronized void store() {
+		try {
+			File dir = File.createTempFile("locations", "new", baseDir);
+			dir.delete();
+			dir.mkdirs();
+
+			for (AbstractHadoopCluster server : servers.values()) {
+				server.storeSettingsToFile(new File(dir, server.getLocationName() + ".xml"));
+			}
+
+			FilenameFilter XMLFilter = new FilenameFilter() {
+				public boolean accept(File dir, String name) {
+					String lower = name.toLowerCase();
+					return lower.endsWith(".xml");
+				}
+			};
+
+			File backup = new File(baseDir, "locations.backup");
+			if (backup.exists()) {
+				for (File file : backup.listFiles(XMLFilter))
+					if (!file.delete())
+						throw new IOException("Unable to delete backup location file: " + file);
+				if (!backup.delete())
+					throw new IOException("Unable to delete backup location directory: " + backup);
+			}
+
+			saveDir.renameTo(backup);
+			dir.renameTo(saveDir);
+
+		} catch (IOException ioe) {
+			ioe.printStackTrace();
+			MessageDialog.openError(null, "Saving configuration of Hadoop locations failed", ioe.toString());
+		}
+	}
+
+	public void dispose() {
+		for (AbstractHadoopCluster server : getServers()) {
+			server.dispose();
+		}
+	}
+
+	public synchronized AbstractHadoopCluster getServer(String location) {
+		return servers.get(location);
+	}
+
+	/*
+	 * HadoopServer map listeners
+	 */
+
+	public void addListener(IHadoopClusterListener l) {
+		synchronized (listeners) {
+			listeners.add(l);
+		}
+	}
+
+	public void removeListener(IHadoopClusterListener l) {
+		synchronized (listeners) {
+			listeners.remove(l);
+		}
+	}
+
+	private void fireListeners(AbstractHadoopCluster location, int kind) {
+		synchronized (listeners) {
+			for (IHadoopClusterListener listener : listeners) {
+				listener.serverChanged(location, kind);
+			}
+		}
+	}
+
+	public synchronized void removeServer(AbstractHadoopCluster server) {
+		this.servers.remove(server.getLocationName());
+		store();
+		fireListeners(server, SERVER_REMOVED);
+	}
+
+	public synchronized void addServer(AbstractHadoopCluster server) {
+		this.servers.put(server.getLocationName(), server);
+		store();
+		fireListeners(server, SERVER_ADDED);
+	}
+
+	/**
+	 * Update one Hadoop location
+	 * 
+	 * @param originalName
+	 *            the original location name (might have changed)
+	 * @param server
+	 *            the location
+	 */
+	public synchronized void updateServer(String originalName, AbstractHadoopCluster server) {
+
+		// Update the map if the location name has changed
+		if (!server.getLocationName().equals(originalName)) {
+			servers.remove(originalName);
+			servers.put(server.getLocationName(), server);
+		}
+		store();
+		fireListeners(server, SERVER_STATE_CHANGED);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/29467b54/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/ClusterView.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/ClusterView.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/ClusterView.java
new file mode 100644
index 0000000..9952904
--- /dev/null
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/ClusterView.java
@@ -0,0 +1,450 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.ui.internal.mr;
+
+import java.util.Collection;
+
+import org.apache.hdt.core.launch.AbstractHadoopCluster;
+import org.apache.hdt.core.launch.IHadoopClusterListener;
+import org.apache.hdt.core.launch.IHadoopJob;
+import org.apache.hdt.core.launch.IJarModule;
+import org.apache.hdt.core.launch.IJobListener;
+import org.apache.hdt.ui.ImageLibrary;
+import org.apache.hdt.ui.internal.launch.JarModule;
+import org.apache.hdt.ui.internal.launch.ServerRegistry;
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.action.IAction;
+import org.eclipse.jface.action.IMenuListener;
+import org.eclipse.jface.action.IMenuManager;
+import org.eclipse.jface.action.MenuManager;
+import org.eclipse.jface.dialogs.MessageDialog;
+import org.eclipse.jface.viewers.ILabelProviderListener;
+import org.eclipse.jface.viewers.ISelection;
+import org.eclipse.jface.viewers.ISelectionChangedListener;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.jface.viewers.ITableLabelProvider;
+import org.eclipse.jface.viewers.ITreeContentProvider;
+import org.eclipse.jface.viewers.ITreeSelection;
+import org.eclipse.jface.viewers.SelectionChangedEvent;
+import org.eclipse.jface.viewers.TreeViewer;
+import org.eclipse.jface.viewers.Viewer;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.graphics.Image;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.swt.widgets.Menu;
+import org.eclipse.swt.widgets.Tree;
+import org.eclipse.swt.widgets.TreeColumn;
+import org.eclipse.ui.IViewSite;
+import org.eclipse.ui.PartInitException;
+import org.eclipse.ui.actions.ActionFactory;
+import org.eclipse.ui.part.ViewPart;
+
+/**
+ * Map/Reduce locations view: displays all available Hadoop locations and the
+ * Jobs running/finished on these locations
+ */
+public class ClusterView extends ViewPart implements ITreeContentProvider, ITableLabelProvider, IJobListener, IHadoopClusterListener {
+
+	/**
+	 * Deletion action: delete a Hadoop location, kill a running job or remove a
+	 * finished job entry
+	 */
+	class DeleteAction extends Action {
+
+		DeleteAction() {
+			setText("Delete");
+			setImageDescriptor(ImageLibrary.get("server.view.action.delete"));
+		}
+
+		/* @inheritDoc */
+		@Override
+		public void run() {
+			ISelection selection = getViewSite().getSelectionProvider().getSelection();
+			if ((selection != null) && (selection instanceof IStructuredSelection)) {
+				Object selItem = ((IStructuredSelection) selection).getFirstElement();
+
+				if (selItem instanceof AbstractHadoopCluster) {
+					AbstractHadoopCluster location = (AbstractHadoopCluster) selItem;
+					if (MessageDialog.openConfirm(Display.getDefault().getActiveShell(), "Confirm delete Hadoop location",
+							"Do you really want to remove the Hadoop location: " + location.getLocationName())) {
+						ServerRegistry.getInstance().removeServer(location);
+					}
+
+				} else if (selItem instanceof IHadoopJob) {
+
+					// kill the job
+					IHadoopJob job = (IHadoopJob) selItem;
+					if (job.isCompleted()) {
+						// Job already finished, remove the entry
+						job.getLocation().purgeJob(job);
+
+					} else {
+						// Job is running, kill the job?
+						if (MessageDialog.openConfirm(Display.getDefault().getActiveShell(), "Confirm kill running Job",
+								"Do you really want to kill running Job: " + job.getJobID())) {
+							job.kill();
+						}
+					}
+				}
+			}
+		}
+	}
+
+	/**
+	 * This object is the root content for this content provider
+	 */
+	private static final Object CONTENT_ROOT = new Object();
+
+	private final IAction deleteAction = new DeleteAction();
+
+	private final IAction editServerAction = new EditLocationAction(this);
+
+	private final IAction newLocationAction = new NewLocationAction();
+
+	private TreeViewer viewer;
+
+	public ClusterView() {
+	}
+
+	/* @inheritDoc */
+	@Override
+	public void init(IViewSite site) throws PartInitException {
+		super.init(site);
+	}
+
+	/* @inheritDoc */
+	@Override
+	public void dispose() {
+		ServerRegistry.getInstance().removeListener(this);
+	}
+
+	/**
+	 * Creates the columns for the view
+	 */
+	@Override
+	public void createPartControl(Composite parent) {
+		Tree main = new Tree(parent, SWT.SINGLE | SWT.FULL_SELECTION | SWT.H_SCROLL | SWT.V_SCROLL);
+		main.setHeaderVisible(true);
+		main.setLinesVisible(false);
+		main.setLayoutData(new GridData(GridData.FILL_BOTH));
+
+		TreeColumn serverCol = new TreeColumn(main, SWT.SINGLE);
+		serverCol.setText("Location");
+		serverCol.setWidth(300);
+		serverCol.setResizable(true);
+
+		TreeColumn locationCol = new TreeColumn(main, SWT.SINGLE);
+		locationCol.setText("Master node");
+		locationCol.setWidth(185);
+		locationCol.setResizable(true);
+
+		TreeColumn stateCol = new TreeColumn(main, SWT.SINGLE);
+		stateCol.setText("State");
+		stateCol.setWidth(95);
+		stateCol.setResizable(true);
+
+		TreeColumn statusCol = new TreeColumn(main, SWT.SINGLE);
+		statusCol.setText("Status");
+		statusCol.setWidth(300);
+		statusCol.setResizable(true);
+
+		viewer = new TreeViewer(main);
+		viewer.setContentProvider(this);
+		viewer.setLabelProvider(this);
+		viewer.setInput(CONTENT_ROOT); // don't care
+
+		getViewSite().setSelectionProvider(viewer);
+
+		getViewSite().getActionBars().setGlobalActionHandler(ActionFactory.DELETE.getId(), deleteAction);
+		getViewSite().getActionBars().getToolBarManager().add(editServerAction);
+		getViewSite().getActionBars().getToolBarManager().add(newLocationAction);
+
+		createActions();
+		createContextMenu();
+	}
+
+	/**
+	 * Actions
+	 */
+	private void createActions() {
+		/*
+		 * addItemAction = new Action("Add...") { public void run() { addItem();
+		 * } }; addItemAction.setImageDescriptor(ImageLibrary
+		 * .get("server.view.location.new"));
+		 */
+		/*
+		 * deleteItemAction = new Action("Delete") { public void run() {
+		 * deleteItem(); } };
+		 * deleteItemAction.setImageDescriptor(getImageDescriptor
+		 * ("delete.gif"));
+		 * 
+		 * selectAllAction = new Action("Select All") { public void run() {
+		 * selectAll(); } };
+		 */
+		// Add selection listener.
+		viewer.addSelectionChangedListener(new ISelectionChangedListener() {
+			public void selectionChanged(SelectionChangedEvent event) {
+				updateActionEnablement();
+			}
+		});
+	}
+
+	private void addItem() {
+		System.out.printf("ADD ITEM\n");
+	}
+
+	private void updateActionEnablement() {
+		IStructuredSelection sel = (IStructuredSelection) viewer.getSelection();
+		// deleteItemAction.setEnabled(sel.size() > 0);
+	}
+
+	/**
+	 * Contextual menu
+	 */
+	private void createContextMenu() {
+		// Create menu manager.
+		MenuManager menuMgr = new MenuManager();
+		menuMgr.setRemoveAllWhenShown(true);
+		menuMgr.addMenuListener(new IMenuListener() {
+			public void menuAboutToShow(IMenuManager mgr) {
+				fillContextMenu(mgr);
+			}
+		});
+
+		// Create menu.
+		Menu menu = menuMgr.createContextMenu(viewer.getControl());
+		viewer.getControl().setMenu(menu);
+
+		// Register menu for extension.
+		getSite().registerContextMenu(menuMgr, viewer);
+	}
+
+	private void fillContextMenu(IMenuManager mgr) {
+		mgr.add(newLocationAction);
+		mgr.add(editServerAction);
+		mgr.add(deleteAction);
+		/*
+		 * mgr.add(new GroupMarker(IWorkbenchActionConstants.MB_ADDITIONS));
+		 * mgr.add(deleteItemAction); mgr.add(new Separator());
+		 * mgr.add(selectAllAction);
+		 */
+	}
+
+	/* @inheritDoc */
+	@Override
+	public void setFocus() {
+
+	}
+
+	/*
+	 * IHadoopServerListener implementation
+	 */
+
+	/* @inheritDoc */
+	public void serverChanged(AbstractHadoopCluster location, int type) {
+		Display.getDefault().syncExec(new Runnable() {
+			public void run() {
+				ClusterView.this.viewer.refresh();
+			}
+		});
+	}
+
+	/*
+	 * IStructuredContentProvider implementation
+	 */
+
+	/* @inheritDoc */
+	public void inputChanged(final Viewer viewer, Object oldInput, Object newInput) {
+		if (oldInput == CONTENT_ROOT)
+			ServerRegistry.getInstance().removeListener(this);
+		if (newInput == CONTENT_ROOT)
+			ServerRegistry.getInstance().addListener(this);
+	}
+
+	/**
+	 * The root elements displayed by this view are the existing Hadoop
+	 * locations
+	 */
+	/* @inheritDoc */
+	public Object[] getElements(Object inputElement) {
+		return ServerRegistry.getInstance().getServers().toArray();
+	}
+
+	/*
+	 * ITreeStructuredContentProvider implementation
+	 */
+
+	/**
+	 * Each location contains a child entry for each job it runs.
+	 */
+	/* @inheritDoc */
+	public Object[] getChildren(Object parent) {
+
+		if (parent instanceof AbstractHadoopCluster) {
+			AbstractHadoopCluster location = (AbstractHadoopCluster) parent;
+			location.addJobListener(this);
+			Collection<? extends IHadoopJob> jobs = location.getJobs();
+			return jobs.toArray();
+		}
+
+		return null;
+	}
+
+	/* @inheritDoc */
+	public Object getParent(Object element) {
+		if (element instanceof AbstractHadoopCluster) {
+			return CONTENT_ROOT;
+
+		} else if (element instanceof IHadoopJob) {
+			return ((IHadoopJob) element).getLocation();
+		}
+
+		return null;
+	}
+
+	/* @inheritDoc */
+	public boolean hasChildren(Object element) {
+		/* Only server entries have children */
+		return (element instanceof AbstractHadoopCluster);
+	}
+
+	/*
+	 * ITableLabelProvider implementation
+	 */
+
+	/* @inheritDoc */
+	public void addListener(ILabelProviderListener listener) {
+		// no listeners handling
+	}
+
+	public boolean isLabelProperty(Object element, String property) {
+		return false;
+	}
+
+	/* @inheritDoc */
+	public void removeListener(ILabelProviderListener listener) {
+		// no listener handling
+	}
+
+	/* @inheritDoc */
+	public Image getColumnImage(Object element, int columnIndex) {
+		if ((columnIndex == 0) && (element instanceof AbstractHadoopCluster)) {
+			return ImageLibrary.getImage("server.view.location.entry");
+
+		} else if ((columnIndex == 0) && (element instanceof IHadoopJob)) {
+			return ImageLibrary.getImage("server.view.job.entry");
+		}
+		return null;
+	}
+
+	/* @inheritDoc */
+	public String getColumnText(Object element, int columnIndex) {
+		if (element instanceof AbstractHadoopCluster) {
+			AbstractHadoopCluster server = (AbstractHadoopCluster) element;
+
+			switch (columnIndex) {
+			case 0:
+				return server.getLocationName();
+			case 1:
+				return server.getMasterHostName().toString();
+			case 2:
+				return server.getState();
+			case 3:
+				return "";
+			}
+		} else if (element instanceof IHadoopJob) {
+			IHadoopJob job = (IHadoopJob) element;
+
+			switch (columnIndex) {
+			case 0:
+				return "" + job.getJobID();
+			case 1:
+				return "";
+			case 2:
+				return job.getState();
+			case 3:
+				return job.getStatus();
+			}
+		} else if (element instanceof JarModule) {
+			JarModule jar = (JarModule) element;
+
+			switch (columnIndex) {
+			case 0:
+				return jar.toString();
+			case 1:
+				return "Publishing jar to server..";
+			case 2:
+				return "";
+			}
+		}
+
+		return null;
+	}
+
+	/*
+	 * IJobListener (Map/Reduce Jobs listener) implementation
+	 */
+
+	/* @inheritDoc */
+	public void jobAdded(IHadoopJob job) {
+		viewer.refresh();
+	}
+
+	/* @inheritDoc */
+	public void jobRemoved(IHadoopJob job) {
+		viewer.refresh();
+	}
+
+	/* @inheritDoc */
+	public void jobChanged(IHadoopJob job) {
+		viewer.refresh(job);
+	}
+
+	/* @inheritDoc */
+	public void publishDone(IJarModule jar) {
+		viewer.refresh();
+	}
+
+	/* @inheritDoc */
+	public void publishStart(IJarModule jar) {
+		viewer.refresh();
+	}
+
+	/*
+	 * Miscellaneous
+	 */
+
+	/**
+	 * Return the currently selected server (null if there is no selection or if
+	 * the selection is not a server)
+	 * 
+	 * @return the currently selected server entry
+	 */
+	public AbstractHadoopCluster getSelectedServer() {
+		ITreeSelection selection = (ITreeSelection) viewer.getSelection();
+		Object first = selection.getFirstElement();
+		if (first instanceof AbstractHadoopCluster) {
+			return (AbstractHadoopCluster) first;
+		}
+		return null;
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/29467b54/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/EditLocationAction.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/EditLocationAction.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/EditLocationAction.java
new file mode 100644
index 0000000..416241a
--- /dev/null
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/EditLocationAction.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.ui.internal.mr;
+
+import org.apache.hdt.core.launch.AbstractHadoopCluster;
+import org.apache.hdt.ui.ImageLibrary;
+import org.apache.hdt.ui.internal.launch.HadoopLocationWizard;
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.wizard.Wizard;
+import org.eclipse.jface.wizard.WizardDialog;
+
+/**
+ * Editing server properties action
+ */
+public class EditLocationAction extends Action {
+
+	private ClusterView serverView;
+
+	public EditLocationAction(ClusterView serverView) {
+		this.serverView = serverView;
+
+		setText("Edit Hadoop location...");
+		setImageDescriptor(ImageLibrary.get("server.view.action.location.edit"));
+	}
+
+	@Override
+	public void run() {
+
+		final AbstractHadoopCluster server = serverView.getSelectedServer();
+		if (server == null)
+			return;
+
+		WizardDialog dialog = new WizardDialog(null, new Wizard() {
+			private HadoopLocationWizard page = new HadoopLocationWizard(server);
+
+			@Override
+			public void addPages() {
+				super.addPages();
+				setWindowTitle("Edit Hadoop location...");
+				addPage(page);
+			}
+
+			@Override
+			public boolean performFinish() {
+				page.performFinish();
+				return true;
+			}
+		});
+
+		dialog.create();
+		dialog.setBlockOnOpen(true);
+		dialog.open();
+
+		super.run();
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/29467b54/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewDriverWizard.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewDriverWizard.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewDriverWizard.java
new file mode 100644
index 0000000..14dcb49
--- /dev/null
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewDriverWizard.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.ui.internal.mr;
+
+import org.eclipse.core.resources.IFile;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.jdt.core.IJavaElement;
+import org.eclipse.jdt.internal.ui.wizards.NewElementWizard;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.ui.INewWizard;
+import org.eclipse.ui.IWorkbench;
+
+/**
+ * Wizard for creating a new Driver class (a class that runs a MapReduce job).
+ * 
+ */
+
+public class NewDriverWizard extends NewElementWizard implements INewWizard,
+    IRunnableWithProgress {
+  private NewDriverWizardPage page;
+
+  /*
+   * @Override public boolean performFinish() { }
+   */
+  public void run(IProgressMonitor monitor) {
+    try {
+      page.createType(monitor);
+    } catch (CoreException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    } catch (InterruptedException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    }
+  }
+
+  public NewDriverWizard() {
+    setWindowTitle("New MapReduce Driver");
+  }
+
+  @Override
+  public void init(IWorkbench workbench, IStructuredSelection selection) {
+    super.init(workbench, selection);
+
+    page = new NewDriverWizardPage();
+    addPage(page);
+    page.setSelection(selection);
+  }
+
+  @Override
+  /**
+   * Performs any actions appropriate in response to the user having pressed the
+   * Finish button, or refuse if finishing now is not permitted.
+   */
+  public boolean performFinish() {
+    if (super.performFinish()) {
+      if (getCreatedElement() != null) {
+        selectAndReveal(page.getModifiedResource());
+        openResource((IFile) page.getModifiedResource());
+      }
+
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  @Override
+  /**
+   * 
+   */
+  protected void finishPage(IProgressMonitor monitor)
+      throws InterruptedException, CoreException {
+    this.run(monitor);
+  }
+
+  @Override
+  public IJavaElement getCreatedElement() {
+    return page.getCreatedType().getPrimaryElement();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/29467b54/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewDriverWizardPage.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewDriverWizardPage.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewDriverWizardPage.java
new file mode 100644
index 0000000..4857529
--- /dev/null
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewDriverWizardPage.java
@@ -0,0 +1,264 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hdt.ui.internal.mr;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.hdt.ui.ImageLibrary;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.FileLocator;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.core.JavaModelException;
+import org.eclipse.jdt.core.search.SearchEngine;
+import org.eclipse.jdt.ui.IJavaElementSearchConstants;
+import org.eclipse.jdt.ui.JavaUI;
+import org.eclipse.jdt.ui.wizards.NewTypeWizardPage;
+import org.eclipse.jface.dialogs.ProgressMonitorDialog;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.jface.window.Window;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.Event;
+import org.eclipse.swt.widgets.Label;
+import org.eclipse.swt.widgets.Listener;
+import org.eclipse.swt.widgets.Text;
+import org.eclipse.ui.dialogs.SelectionDialog;
+
+/**
+ * Pre-fills the new MapReduce driver class with a template.
+ * 
+ */
+
+public class NewDriverWizardPage extends NewTypeWizardPage {
+  private Button isCreateMapMethod;
+
+  private Text reducerText;
+
+  private Text mapperText;
+
+  private final boolean showContainerSelector;
+
+  public NewDriverWizardPage() {
+    this(true);
+  }
+
+  public NewDriverWizardPage(boolean showContainerSelector) {
+    super(true, "MapReduce Driver");
+
+    this.showContainerSelector = showContainerSelector;
+    setTitle("MapReduce Driver");
+    setDescription("Create a new MapReduce driver");
+    setImageDescriptor(ImageLibrary.get("wizard.driver.new"));
+  }
+
+  public void setSelection(IStructuredSelection selection) {
+    initContainerPage(getInitialJavaElement(selection));
+    initTypePage(getInitialJavaElement(selection));
+  }
+
+  @Override
+  /**
+   * Creates the new type using the entered field values.
+   */
+  public void createType(IProgressMonitor monitor) throws CoreException,
+      InterruptedException {
+    super.createType(monitor);
+  }
+
+  @Override
+  protected void createTypeMembers(final IType newType, ImportsManager imports,
+      final IProgressMonitor monitor) throws CoreException {
+    super.createTypeMembers(newType, imports, monitor);
+    imports.addImport("org.apache.hadoop.fs.Path");
+    imports.addImport("org.apache.hadoop.io.Text");
+    imports.addImport("org.apache.hadoop.io.IntWritable");
+    imports.addImport("org.apache.hadoop.mapreduce.Job");
+    imports.addImport("org.apache.hadoop.mapreduce.lib.input.FileInputFormat");
+    imports.addImport("org.apache.hadoop.mapreduce.lib.output.FileOutputFormat");
+
+    /**
+     * TODO(jz) - move most code out of the runnable
+     */
+    getContainer().getShell().getDisplay().syncExec(new Runnable() {
+      public void run() {
+
+        String method = "public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {\n";
+        method += "  Job job = new Job();\n\n";
+        method += "  job.setJarByClass( ... );\n\n";
+        method += "  job.setJobName( \"a nice name\"  );\n\n";
+
+        method += "  FileInputFormat.setInputPaths(job, new Path(args[0]));\n";
+        method += "  FileOutputFormat.setOutputPath(job, new Path(args[1]));\n\n";
+        
+        if (mapperText.getText().length() > 0) {
+          method += "  job.setMapperClass(" + mapperText.getText()
+              + ".class);\n\n";
+        } else {
+          method += "  // TODO: specify a mapper\njob.setMapperClass( ... );\n\n";
+        }
+        if (reducerText.getText().length() > 0) {
+          method += "  job.setReducerClass(" + reducerText.getText()
+              + ".class);\n\n";
+        } else {
+          method += "  // TODO: specify a reducer\njob.setReducerClass( ... );\n\n";
+        }
+
+        method += "  job.setOutputKeyClass(Text.class);\n";
+    	method += "  job.setOutputValueClass(IntWritable.class);\n\n";
+        
+        method += "  boolean success = job.waitForCompletion(true);\n";
+        method += "  System.exit(success ? 0 : 1);\n\t};";
+
+        try {
+          newType.createMethod(method, null, false, monitor);
+        } catch (JavaModelException e) {
+          // TODO Auto-generated catch block
+          e.printStackTrace();
+        }
+      }
+    });
+  }
+
+  public void createControl(Composite parent) {
+    // super.createControl(parent);
+
+    initializeDialogUnits(parent);
+    Composite composite = new Composite(parent, SWT.NONE);
+    GridLayout layout = new GridLayout();
+    layout.numColumns = 4;
+    composite.setLayout(layout);
+
+    createContainerControls(composite, 4);
+
+    createPackageControls(composite, 4);
+    createSeparator(composite, 4);
+    createTypeNameControls(composite, 4);
+
+    createSuperClassControls(composite, 4);
+    createSuperInterfacesControls(composite, 4);
+    createSeparator(composite, 4);
+
+    createMapperControls(composite);
+    createReducerControls(composite);
+
+    if (!showContainerSelector) {
+      setPackageFragmentRoot(null, false);
+      setSuperClass("java.lang.Object", false);
+      setSuperInterfaces(new ArrayList(), false);
+    }
+
+    setControl(composite);
+
+    setFocus();
+    handleFieldChanged(CONTAINER);
+
+    // setSuperClass("org.apache.hadoop.mapred.MapReduceBase", true);
+    // setSuperInterfaces(Arrays.asList(new String[]{
+    // "org.apache.hadoop.mapred.Mapper" }), true);
+  }
+
+  @Override
+  protected void handleFieldChanged(String fieldName) {
+    super.handleFieldChanged(fieldName);
+
+    validate();
+  }
+
+  private void validate() {
+    if (showContainerSelector) {
+      updateStatus(new IStatus[] { fContainerStatus, fPackageStatus,
+          fTypeNameStatus, fSuperClassStatus, fSuperInterfacesStatus });
+    } else {
+      updateStatus(new IStatus[] { fTypeNameStatus, });
+    }
+  }
+
+  private void createMapperControls(Composite composite) {
+    this.mapperText = createBrowseClassControl(composite, "Ma&pper:",
+        "&Browse...", "org.apache.hadoop.mapreduce.Mapper", "Mapper Selection");
+  }
+
+  private void createReducerControls(Composite composite) {
+    this.reducerText = createBrowseClassControl(composite, "&Reducer:",
+        "Browse&...", "org.apache.hadoop.mapreduce.Reducer", "Reducer Selection");
+  }
+
+  private Text createBrowseClassControl(final Composite composite,
+      final String string, String browseButtonLabel,
+      final String baseClassName, final String dialogTitle) {
+    Label label = new Label(composite, SWT.NONE);
+    GridData data = new GridData(GridData.FILL_HORIZONTAL);
+    label.setText(string);
+    label.setLayoutData(data);
+
+    final Text text = new Text(composite, SWT.SINGLE | SWT.BORDER);
+    GridData data2 = new GridData(GridData.FILL_HORIZONTAL);
+    data2.horizontalSpan = 2;
+    text.setLayoutData(data2);
+
+    Button browse = new Button(composite, SWT.NONE);
+    browse.setText(browseButtonLabel);
+    GridData data3 = new GridData(GridData.FILL_HORIZONTAL);
+    browse.setLayoutData(data3);
+    browse.addListener(SWT.Selection, new Listener() {
+      public void handleEvent(Event event) {
+        IType baseType;
+        try {
+          baseType = getPackageFragmentRoot().getJavaProject().findType(
+              baseClassName);
+
+          // edit this to limit the scope
+          SelectionDialog dialog = JavaUI.createTypeDialog(
+              composite.getShell(), new ProgressMonitorDialog(composite
+                  .getShell()), SearchEngine.createHierarchyScope(baseType),
+              IJavaElementSearchConstants.CONSIDER_CLASSES, false);
+
+          dialog.setMessage("&Choose a type:");
+          dialog.setBlockOnOpen(true);
+          dialog.setTitle(dialogTitle);
+          dialog.open();
+
+          if ((dialog.getReturnCode() == Window.OK)
+              && (dialog.getResult().length > 0)) {
+            IType type = (IType) dialog.getResult()[0];
+            text.setText(type.getFullyQualifiedName());
+          }
+        } catch (JavaModelException e) {
+          // TODO Auto-generated catch block
+          e.printStackTrace();
+        }
+      }
+    });
+
+    if (!showContainerSelector) {
+      label.setEnabled(false);
+      text.setEnabled(false);
+      browse.setEnabled(false);
+    }
+
+    return text;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/29467b54/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewLocationAction.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewLocationAction.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewLocationAction.java
new file mode 100644
index 0000000..20e269e
--- /dev/null
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewLocationAction.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.ui.internal.mr;
+
+import org.apache.hdt.ui.ImageLibrary;
+import org.apache.hdt.ui.internal.launch.HadoopLocationWizard;
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.wizard.Wizard;
+import org.eclipse.jface.wizard.WizardDialog;
+
+/**
+ * Action corresponding to creating a new MapReduce Server.
+ */
+
+public class NewLocationAction extends Action {
+	public NewLocationAction() {
+		setText("New Hadoop location...");
+		setImageDescriptor(ImageLibrary.get("server.view.action.location.new"));
+	}
+
+	@Override
+	public void run() {
+		WizardDialog dialog = new WizardDialog(null, new Wizard() {
+			private HadoopLocationWizard page = new HadoopLocationWizard();
+
+			@Override
+			public void addPages() {
+				super.addPages();
+				setWindowTitle("New Hadoop location...");
+				addPage(page);
+			}
+
+			@Override
+			public boolean performFinish() {
+				page.performFinish();
+				return true;
+			}
+
+		});
+
+		dialog.create();
+		dialog.setBlockOnOpen(true);
+		dialog.open();
+
+		super.run();
+	}
+}