You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hdt.apache.org by ad...@apache.org on 2013/01/08 23:26:26 UTC

[9/11] Import of source from Apache Hadoop MapReduce contrib, this is the plugin as it existed in the Hadoop 0.23.4 release.

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/1f/1f8a4660957dcfae64ee09dd9259ef7480d740ae.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/1f/1f8a4660957dcfae64ee09dd9259ef7480d740ae.svn-base b/eclipse-plugin/.svn/pristine/1f/1f8a4660957dcfae64ee09dd9259ef7480d740ae.svn-base
new file mode 100644
index 0000000..cef50a3
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/1f/1f8a4660957dcfae64ee09dd9259ef7480d740ae.svn-base
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.eclipse.preferences;
+
+import org.apache.hadoop.eclipse.Activator;
+import org.eclipse.jface.preference.DirectoryFieldEditor;
+import org.eclipse.jface.preference.FieldEditorPreferencePage;
+import org.eclipse.ui.IWorkbench;
+import org.eclipse.ui.IWorkbenchPreferencePage;
+
+/**
+ * This class represents a preference page that is contributed to the
+ * Preferences dialog. By sub-classing <tt>FieldEditorPreferencePage</tt>,
+ * we can use the field support built into JFace that allows us to create a
+ * page that is small and knows how to save, restore and apply itself.
+ * 
+ * <p>
+ * This page is used to modify preferences only. They are stored in the
+ * preference store that belongs to the main plug-in class. That way,
+ * preferences can be accessed directly via the preference store.
+ */
+
+public class MapReducePreferencePage extends FieldEditorPreferencePage
+    implements IWorkbenchPreferencePage {
+
+  public MapReducePreferencePage() {
+    super(GRID);
+    setPreferenceStore(Activator.getDefault().getPreferenceStore());
+    setTitle("Hadoop Map/Reduce Tools");
+    // setDescription("Hadoop Map/Reduce Preferences");
+  }
+
+  /**
+   * Creates the field editors. Field editors are abstractions of the common
+   * GUI blocks needed to manipulate various types of preferences. Each field
+   * editor knows how to save and restore itself.
+   */
+  @Override
+  public void createFieldEditors() {
+    addField(new DirectoryFieldEditor(PreferenceConstants.P_PATH,
+        "&Hadoop installation directory:", getFieldEditorParent()));
+
+  }
+
+  /* @inheritDoc */
+  public void init(IWorkbench workbench) {
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/20/206f77d8fda916a3a5b339c281089cd18718e8a4.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/20/206f77d8fda916a3a5b339c281089cd18718e8a4.svn-base b/eclipse-plugin/.svn/pristine/20/206f77d8fda916a3a5b339c281089cd18718e8a4.svn-base
new file mode 100644
index 0000000..d80deb3
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/20/206f77d8fda916a3a5b339c281089cd18718e8a4.svn-base
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import org.eclipse.debug.ui.IDebugUIConstants;
+import org.eclipse.jdt.ui.JavaUI;
+import org.eclipse.ui.IFolderLayout;
+import org.eclipse.ui.IPageLayout;
+import org.eclipse.ui.IPerspectiveFactory;
+import org.eclipse.ui.console.IConsoleConstants;
+
+/**
+ * Creates links to the new MapReduce-based wizards and views for a MapReduce
+ * perspective
+ * 
+ */
+
+public class HadoopPerspectiveFactory implements IPerspectiveFactory {
+
+  public void createInitialLayout(IPageLayout layout) {
+    layout.addNewWizardShortcut("org.apache.hadoop.eclipse.NewDriverWizard");
+    layout.addNewWizardShortcut("org.apache.hadoop.eclipse.NewMapperWizard");
+    layout
+        .addNewWizardShortcut("org.apache.hadoop.eclipse.NewReducerWizard");
+
+    IFolderLayout left =
+        layout.createFolder("org.apache.hadoop.eclipse.perspective.left",
+            IPageLayout.LEFT, 0.2f, layout.getEditorArea());
+    left.addView("org.eclipse.ui.navigator.ProjectExplorer");
+
+    IFolderLayout bottom =
+        layout.createFolder("org.apache.hadoop.eclipse.perspective.bottom",
+            IPageLayout.BOTTOM, 0.7f, layout.getEditorArea());
+    bottom.addView(IPageLayout.ID_PROBLEM_VIEW);
+    bottom.addView(IPageLayout.ID_TASK_LIST);
+    bottom.addView(JavaUI.ID_JAVADOC_VIEW);
+    bottom.addView("org.apache.hadoop.eclipse.view.servers");
+    bottom.addPlaceholder(JavaUI.ID_SOURCE_VIEW);
+    bottom.addPlaceholder(IPageLayout.ID_PROGRESS_VIEW);
+    bottom.addPlaceholder(IConsoleConstants.ID_CONSOLE_VIEW);
+    bottom.addPlaceholder(IPageLayout.ID_BOOKMARKS);
+
+    IFolderLayout right =
+        layout.createFolder("org.apache.hadoop.eclipse.perspective.right",
+            IPageLayout.RIGHT, 0.8f, layout.getEditorArea());
+    right.addView(IPageLayout.ID_OUTLINE);
+    right.addView("org.eclipse.ui.cheatsheets.views.CheatSheetView");
+    // right.addView(layout.ID); .. cheat sheet here
+
+    layout.addActionSet(IDebugUIConstants.LAUNCH_ACTION_SET);
+    layout.addActionSet(JavaUI.ID_ACTION_SET);
+    layout.addActionSet(JavaUI.ID_CODING_ACTION_SET);
+    layout.addActionSet(JavaUI.ID_ELEMENT_CREATION_ACTION_SET);
+    layout.addActionSet(IPageLayout.ID_NAVIGATE_ACTION_SET);
+    layout.addActionSet(JavaUI.ID_SEARCH_ACTION_SET);
+
+    layout
+        .addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewPackageCreationWizard");
+    layout
+        .addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewClassCreationWizard");
+    layout
+        .addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewInterfaceCreationWizard");
+    layout
+        .addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewEnumCreationWizard");
+    layout
+        .addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewAnnotationCreationWizard");
+    layout
+        .addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewSourceFolderCreationWizard");
+    layout
+        .addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewSnippetFileCreationWizard");
+    layout.addNewWizardShortcut("org.eclipse.ui.wizards.new.folder");
+    layout.addNewWizardShortcut("org.eclipse.ui.wizards.new.file");
+    layout
+        .addNewWizardShortcut("org.eclipse.ui.editors.wizards.UntitledTextFileWizard");
+
+    // CheatSheetViewerFactory.createCheatSheetView().setInput("org.apache.hadoop.eclipse.cheatsheet");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/22/22dc81d65c66d34ace2ab7c493046026097ddade.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/22/22dc81d65c66d34ace2ab7c493046026097ddade.svn-base b/eclipse-plugin/.svn/pristine/22/22dc81d65c66d34ace2ab7c493046026097ddade.svn-base
new file mode 100644
index 0000000..389d92e
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/22/22dc81d65c66d34ace2ab7c493046026097ddade.svn-base
@@ -0,0 +1,460 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.view.servers;
+
+import java.util.Collection;
+
+import org.apache.hadoop.eclipse.ImageLibrary;
+import org.apache.hadoop.eclipse.actions.EditLocationAction;
+import org.apache.hadoop.eclipse.actions.NewLocationAction;
+import org.apache.hadoop.eclipse.server.HadoopJob;
+import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.apache.hadoop.eclipse.server.IJobListener;
+import org.apache.hadoop.eclipse.server.JarModule;
+import org.apache.hadoop.eclipse.servers.IHadoopServerListener;
+import org.apache.hadoop.eclipse.servers.ServerRegistry;
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.action.IAction;
+import org.eclipse.jface.action.IMenuListener;
+import org.eclipse.jface.action.IMenuManager;
+import org.eclipse.jface.action.MenuManager;
+import org.eclipse.jface.dialogs.MessageDialog;
+import org.eclipse.jface.viewers.ILabelProviderListener;
+import org.eclipse.jface.viewers.ISelection;
+import org.eclipse.jface.viewers.ISelectionChangedListener;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.jface.viewers.ITableLabelProvider;
+import org.eclipse.jface.viewers.ITreeContentProvider;
+import org.eclipse.jface.viewers.ITreeSelection;
+import org.eclipse.jface.viewers.SelectionChangedEvent;
+import org.eclipse.jface.viewers.TreeViewer;
+import org.eclipse.jface.viewers.Viewer;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.graphics.Image;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.swt.widgets.Menu;
+import org.eclipse.swt.widgets.Tree;
+import org.eclipse.swt.widgets.TreeColumn;
+import org.eclipse.ui.IViewSite;
+import org.eclipse.ui.PartInitException;
+import org.eclipse.ui.actions.ActionFactory;
+import org.eclipse.ui.part.ViewPart;
+
+/**
+ * Map/Reduce locations view: displays all available Hadoop locations and the
+ * Jobs running/finished on these locations
+ */
+public class ServerView extends ViewPart implements ITreeContentProvider,
+    ITableLabelProvider, IJobListener, IHadoopServerListener {
+
+  /**
+   * Deletion action: delete a Hadoop location, kill a running job or remove
+   * a finished job entry
+   */
+  class DeleteAction extends Action {
+
+    DeleteAction() {
+      setText("Delete");
+      setImageDescriptor(ImageLibrary.get("server.view.action.delete"));
+    }
+
+    /* @inheritDoc */
+    @Override
+    public void run() {
+      ISelection selection =
+          getViewSite().getSelectionProvider().getSelection();
+      if ((selection != null) && (selection instanceof IStructuredSelection)) {
+        Object selItem =
+            ((IStructuredSelection) selection).getFirstElement();
+
+        if (selItem instanceof HadoopServer) {
+          HadoopServer location = (HadoopServer) selItem;
+          if (MessageDialog.openConfirm(Display.getDefault()
+              .getActiveShell(), "Confirm delete Hadoop location",
+              "Do you really want to remove the Hadoop location: "
+                  + location.getLocationName())) {
+            ServerRegistry.getInstance().removeServer(location);
+          }
+
+        } else if (selItem instanceof HadoopJob) {
+
+          // kill the job
+          HadoopJob job = (HadoopJob) selItem;
+          if (job.isCompleted()) {
+            // Job already finished, remove the entry
+            job.getLocation().purgeJob(job);
+
+          } else {
+            // Job is running, kill the job?
+            if (MessageDialog.openConfirm(Display.getDefault()
+                .getActiveShell(), "Confirm kill running Job",
+                "Do you really want to kill running Job: " + job.getJobID())) {
+              job.kill();
+            }
+          }
+        }
+      }
+    }
+  }
+
+  /**
+   * This object is the root content for this content provider
+   */
+  private static final Object CONTENT_ROOT = new Object();
+
+  private final IAction deleteAction = new DeleteAction();
+
+  private final IAction editServerAction = new EditLocationAction(this);
+
+  private final IAction newLocationAction = new NewLocationAction();
+
+  private TreeViewer viewer;
+
+  public ServerView() {
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void init(IViewSite site) throws PartInitException {
+    super.init(site);
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void dispose() {
+    ServerRegistry.getInstance().removeListener(this);
+  }
+
+  /**
+   * Creates the columns for the view
+   */
+  @Override
+  public void createPartControl(Composite parent) {
+    Tree main =
+        new Tree(parent, SWT.SINGLE | SWT.FULL_SELECTION | SWT.H_SCROLL
+            | SWT.V_SCROLL);
+    main.setHeaderVisible(true);
+    main.setLinesVisible(false);
+    main.setLayoutData(new GridData(GridData.FILL_BOTH));
+
+    TreeColumn serverCol = new TreeColumn(main, SWT.SINGLE);
+    serverCol.setText("Location");
+    serverCol.setWidth(300);
+    serverCol.setResizable(true);
+
+    TreeColumn locationCol = new TreeColumn(main, SWT.SINGLE);
+    locationCol.setText("Master node");
+    locationCol.setWidth(185);
+    locationCol.setResizable(true);
+
+    TreeColumn stateCol = new TreeColumn(main, SWT.SINGLE);
+    stateCol.setText("State");
+    stateCol.setWidth(95);
+    stateCol.setResizable(true);
+
+    TreeColumn statusCol = new TreeColumn(main, SWT.SINGLE);
+    statusCol.setText("Status");
+    statusCol.setWidth(300);
+    statusCol.setResizable(true);
+
+    viewer = new TreeViewer(main);
+    viewer.setContentProvider(this);
+    viewer.setLabelProvider(this);
+    viewer.setInput(CONTENT_ROOT); // don't care
+
+    getViewSite().setSelectionProvider(viewer);
+    
+    getViewSite().getActionBars().setGlobalActionHandler(
+        ActionFactory.DELETE.getId(), deleteAction);
+    getViewSite().getActionBars().getToolBarManager().add(editServerAction);
+    getViewSite().getActionBars().getToolBarManager().add(newLocationAction);
+
+    createActions();
+    createContextMenu();
+  }
+
+  /**
+   * Actions
+   */
+  private void createActions() {
+    /*
+     * addItemAction = new Action("Add...") { public void run() { addItem(); } };
+     * addItemAction.setImageDescriptor(ImageLibrary
+     * .get("server.view.location.new"));
+     */
+    /*
+     * deleteItemAction = new Action("Delete") { public void run() {
+     * deleteItem(); } };
+     * deleteItemAction.setImageDescriptor(getImageDescriptor("delete.gif"));
+     * 
+     * selectAllAction = new Action("Select All") { public void run() {
+     * selectAll(); } };
+     */
+    // Add selection listener.
+    viewer.addSelectionChangedListener(new ISelectionChangedListener() {
+      public void selectionChanged(SelectionChangedEvent event) {
+        updateActionEnablement();
+      }
+    });
+  }
+
+  private void addItem() {
+    System.out.printf("ADD ITEM\n");
+  }
+
+  private void updateActionEnablement() {
+    IStructuredSelection sel = (IStructuredSelection) viewer.getSelection();
+    // deleteItemAction.setEnabled(sel.size() > 0);
+  }
+
+  /**
+   * Contextual menu
+   */
+  private void createContextMenu() {
+    // Create menu manager.
+    MenuManager menuMgr = new MenuManager();
+    menuMgr.setRemoveAllWhenShown(true);
+    menuMgr.addMenuListener(new IMenuListener() {
+      public void menuAboutToShow(IMenuManager mgr) {
+        fillContextMenu(mgr);
+      }
+    });
+
+    // Create menu.
+    Menu menu = menuMgr.createContextMenu(viewer.getControl());
+    viewer.getControl().setMenu(menu);
+
+    // Register menu for extension.
+    getSite().registerContextMenu(menuMgr, viewer);
+  }
+
+  private void fillContextMenu(IMenuManager mgr) {
+    mgr.add(newLocationAction);
+    mgr.add(editServerAction);
+    mgr.add(deleteAction);
+    /*
+     * mgr.add(new GroupMarker(IWorkbenchActionConstants.MB_ADDITIONS));
+     * mgr.add(deleteItemAction); mgr.add(new Separator());
+     * mgr.add(selectAllAction);
+     */
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void setFocus() {
+
+  }
+
+  /*
+   * IHadoopServerListener implementation
+   */
+
+  /* @inheritDoc */
+  public void serverChanged(HadoopServer location, int type) {
+    Display.getDefault().syncExec(new Runnable() {
+      public void run() {
+        ServerView.this.viewer.refresh();
+      }
+    });
+  }
+
+  /*
+   * IStructuredContentProvider implementation
+   */
+
+  /* @inheritDoc */
+  public void inputChanged(final Viewer viewer, Object oldInput,
+      Object newInput) {
+    if (oldInput == CONTENT_ROOT)
+      ServerRegistry.getInstance().removeListener(this);
+    if (newInput == CONTENT_ROOT)
+      ServerRegistry.getInstance().addListener(this);
+  }
+
+  /**
+   * The root elements displayed by this view are the existing Hadoop
+   * locations
+   */
+  /* @inheritDoc */
+  public Object[] getElements(Object inputElement) {
+    return ServerRegistry.getInstance().getServers().toArray();
+  }
+
+  /*
+   * ITreeStructuredContentProvider implementation
+   */
+
+  /**
+   * Each location contains a child entry for each job it runs.
+   */
+  /* @inheritDoc */
+  public Object[] getChildren(Object parent) {
+
+    if (parent instanceof HadoopServer) {
+      HadoopServer location = (HadoopServer) parent;
+      location.addJobListener(this);
+      Collection<HadoopJob> jobs = location.getJobs();
+      return jobs.toArray();
+    }
+
+    return null;
+  }
+
+  /* @inheritDoc */
+  public Object getParent(Object element) {
+    if (element instanceof HadoopServer) {
+      return CONTENT_ROOT;
+
+    } else if (element instanceof HadoopJob) {
+      return ((HadoopJob) element).getLocation();
+    }
+
+    return null;
+  }
+
+  /* @inheritDoc */
+  public boolean hasChildren(Object element) {
+    /* Only server entries have children */
+    return (element instanceof HadoopServer);
+  }
+
+  /*
+   * ITableLabelProvider implementation
+   */
+
+  /* @inheritDoc */
+  public void addListener(ILabelProviderListener listener) {
+    // no listeners handling
+  }
+
+  public boolean isLabelProperty(Object element, String property) {
+    return false;
+  }
+
+  /* @inheritDoc */
+  public void removeListener(ILabelProviderListener listener) {
+    // no listener handling
+  }
+
+  /* @inheritDoc */
+  public Image getColumnImage(Object element, int columnIndex) {
+    if ((columnIndex == 0) && (element instanceof HadoopServer)) {
+      return ImageLibrary.getImage("server.view.location.entry");
+
+    } else if ((columnIndex == 0) && (element instanceof HadoopJob)) {
+      return ImageLibrary.getImage("server.view.job.entry");
+    }
+    return null;
+  }
+
+  /* @inheritDoc */
+  public String getColumnText(Object element, int columnIndex) {
+    if (element instanceof HadoopServer) {
+      HadoopServer server = (HadoopServer) element;
+
+      switch (columnIndex) {
+        case 0:
+          return server.getLocationName();
+        case 1:
+          return server.getMasterHostName().toString();
+        case 2:
+          return server.getState();
+        case 3:
+          return "";
+      }
+    } else if (element instanceof HadoopJob) {
+      HadoopJob job = (HadoopJob) element;
+
+      switch (columnIndex) {
+        case 0:
+          return job.getJobID().toString();
+        case 1:
+          return "";
+        case 2:
+          return job.getState().toString();
+        case 3:
+          return job.getStatus();
+      }
+    } else if (element instanceof JarModule) {
+      JarModule jar = (JarModule) element;
+
+      switch (columnIndex) {
+        case 0:
+          return jar.toString();
+        case 1:
+          return "Publishing jar to server..";
+        case 2:
+          return "";
+      }
+    }
+
+    return null;
+  }
+
+  /*
+   * IJobListener (Map/Reduce Jobs listener) implementation
+   */
+
+  /* @inheritDoc */
+  public void jobAdded(HadoopJob job) {
+    viewer.refresh();
+  }
+
+  /* @inheritDoc */
+  public void jobRemoved(HadoopJob job) {
+    viewer.refresh();
+  }
+
+  /* @inheritDoc */
+  public void jobChanged(HadoopJob job) {
+    viewer.refresh(job);
+  }
+
+  /* @inheritDoc */
+  public void publishDone(JarModule jar) {
+    viewer.refresh();
+  }
+
+  /* @inheritDoc */
+  public void publishStart(JarModule jar) {
+    viewer.refresh();
+  }
+
+  /*
+   * Miscellaneous
+   */
+
+  /**
+   * Return the currently selected server (null if there is no selection or
+   * if the selection is not a server)
+   * 
+   * @return the currently selected server entry
+   */
+  public HadoopServer getSelectedServer() {
+    ITreeSelection selection = (ITreeSelection) viewer.getSelection();
+    Object first = selection.getFirstElement();
+    if (first instanceof HadoopServer) {
+      return (HadoopServer) first;
+    }
+    return null;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/23/23b5b6660ef95d6f633d0ac8c715d4029e1b1123.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/23/23b5b6660ef95d6f633d0ac8c715d4029e1b1123.svn-base b/eclipse-plugin/.svn/pristine/23/23b5b6660ef95d6f633d0ac8c715d4029e1b1123.svn-base
new file mode 100644
index 0000000..efc441b
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/23/23b5b6660ef95d6f633d0ac8c715d4029e1b1123.svn-base
@@ -0,0 +1,147 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.server;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+
+public enum ConfProp {
+  /**
+   * Property name for the Hadoop location name
+   */
+  PI_LOCATION_NAME(true, "location.name", "New Hadoop location"),
+
+  /**
+   * Property name for the master host name (the Job tracker)
+   */
+  PI_JOB_TRACKER_HOST(true, "jobtracker.host", "localhost"),
+
+  /**
+   * Property name for the DFS master host name (the Name node)
+   */
+  PI_NAME_NODE_HOST(true, "namenode.host", "localhost"),
+
+  /**
+   * Property name for the installation directory on the master node
+   */
+  // PI_INSTALL_DIR(true, "install.dir", "/dir/hadoop-version/"),
+  /**
+   * User name to use for Hadoop operations
+   */
+  PI_USER_NAME(true, "user.name", System.getProperty("user.name",
+      "who are you?")),
+
+  /**
+   * Property name for SOCKS proxy activation
+   */
+  PI_SOCKS_PROXY_ENABLE(true, "socks.proxy.enable", "no"),
+
+  /**
+   * Property name for the SOCKS proxy host
+   */
+  PI_SOCKS_PROXY_HOST(true, "socks.proxy.host", "host"),
+
+  /**
+   * Property name for the SOCKS proxy port
+   */
+  PI_SOCKS_PROXY_PORT(true, "socks.proxy.port", "1080"),
+
+  /**
+   * TCP port number for the name node
+   */
+  PI_NAME_NODE_PORT(true, "namenode.port", "50040"),
+
+  /**
+   * TCP port number for the job tracker
+   */
+  PI_JOB_TRACKER_PORT(true, "jobtracker.port", "50020"),
+
+  /**
+   * Are the Map/Reduce and the Distributed FS masters hosted on the same
+   * machine?
+   */
+  PI_COLOCATE_MASTERS(true, "masters.colocate", "yes"),
+
+  /**
+   * Property name for naming the job tracker (URI). This property is related
+   * to {@link #PI_MASTER_HOST_NAME}
+   */
+  JOB_TRACKER_URI(false, "mapreduce.jobtracker.address", "localhost:50020"),
+
+  /**
+   * Property name for naming the default file system (URI).
+   */
+  FS_DEFAULT_URI(false, "fs.default.name", "hdfs://localhost:50040/"),
+
+  /**
+   * Property name for the default socket factory:
+   */
+  SOCKET_FACTORY_DEFAULT(false, "hadoop.rpc.socket.factory.class.default",
+      "org.apache.hadoop.net.StandardSocketFactory"),
+
+  /**
+   * Property name for the SOCKS server URI.
+   */
+  SOCKS_SERVER(false, "hadoop.socks.server", "host:1080"),
+
+  ;
+
+  /**
+   * Map <property name> -> ConfProp
+   */
+  private static Map<String, ConfProp> map;
+
+  private static synchronized void registerProperty(String name,
+      ConfProp prop) {
+
+    if (ConfProp.map == null)
+      ConfProp.map = new HashMap<String, ConfProp>();
+
+    ConfProp.map.put(name, prop);
+  }
+
+  public static ConfProp getByName(String propName) {
+    return map.get(propName);
+  }
+
+  public final String name;
+
+  public final String defVal;
+
+  ConfProp(boolean internal, String name, String defVal) {
+    if (internal)
+      name = "eclipse.plug-in." + name;
+    this.name = name;
+    this.defVal = defVal;
+
+    ConfProp.registerProperty(name, this);
+  }
+
+  String get(Configuration conf) {
+    return conf.get(name);
+  }
+
+  void set(Configuration conf, String value) {
+    assert value != null;
+    conf.set(name, value);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/23/23fa8c300fe9a7d904148729066bac1a2ef4d560.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/23/23fa8c300fe9a7d904148729066bac1a2ef4d560.svn-base b/eclipse-plugin/.svn/pristine/23/23fa8c300fe9a7d904148729066bac1a2ef4d560.svn-base
new file mode 100644
index 0000000..2bc247a
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/23/23fa8c300fe9a7d904148729066bac1a2ef4d560.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/24/24ec47017f46b8ba7e61a229e2b9f5e2fa0116a0.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/24/24ec47017f46b8ba7e61a229e2b9f5e2fa0116a0.svn-base b/eclipse-plugin/.svn/pristine/24/24ec47017f46b8ba7e61a229e2b9f5e2fa0116a0.svn-base
new file mode 100644
index 0000000..77e2e0d
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/24/24ec47017f46b8ba7e61a229e2b9f5e2fa0116a0.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/25/252c1da0d1512f5bac6f13f5d8c510ecec743ecc.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/25/252c1da0d1512f5bac6f13f5d8c510ecec743ecc.svn-base b/eclipse-plugin/.svn/pristine/25/252c1da0d1512f5bac6f13f5d8c510ecec743ecc.svn-base
new file mode 100644
index 0000000..54c4495
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/25/252c1da0d1512f5bac6f13f5d8c510ecec743ecc.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/25/25f82e2d284e7ae0a87ea8d1d65e34f1df9aa76a.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/25/25f82e2d284e7ae0a87ea8d1d65e34f1df9aa76a.svn-base b/eclipse-plugin/.svn/pristine/25/25f82e2d284e7ae0a87ea8d1d65e34f1df9aa76a.svn-base
new file mode 100644
index 0000000..15d5718
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/25/25f82e2d284e7ae0a87ea8d1d65e34f1df9aa76a.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/27/270ef87d6973212c91581082fda28601943944e3.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/27/270ef87d6973212c91581082fda28601943944e3.svn-base b/eclipse-plugin/.svn/pristine/27/270ef87d6973212c91581082fda28601943944e3.svn-base
new file mode 100644
index 0000000..46df449
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/27/270ef87d6973212c91581082fda28601943944e3.svn-base
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.launch;
+
+import org.eclipse.core.runtime.jobs.ISchedulingRule;
+
+public class MutexRule implements ISchedulingRule {
+  private final String id;
+
+  public MutexRule(String id) {
+    this.id = id;
+  }
+
+  public boolean contains(ISchedulingRule rule) {
+    return (rule instanceof MutexRule) && ((MutexRule) rule).id.equals(id);
+  }
+
+  public boolean isConflicting(ISchedulingRule rule) {
+    return (rule instanceof MutexRule) && ((MutexRule) rule).id.equals(id);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/2c/2c392f20ac867c3e5cc7161e9b68dddbae4c186a.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/2c/2c392f20ac867c3e5cc7161e9b68dddbae4c186a.svn-base b/eclipse-plugin/.svn/pristine/2c/2c392f20ac867c3e5cc7161e9b68dddbae4c186a.svn-base
new file mode 100644
index 0000000..4a2060a
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/2c/2c392f20ac867c3e5cc7161e9b68dddbae4c186a.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/2e/2e4cebdccfda094dcea4f9bb8fb8a25dfc292441.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/2e/2e4cebdccfda094dcea4f9bb8fb8a25dfc292441.svn-base b/eclipse-plugin/.svn/pristine/2e/2e4cebdccfda094dcea4f9bb8fb8a25dfc292441.svn-base
new file mode 100644
index 0000000..94b2bc9
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/2e/2e4cebdccfda094dcea4f9bb8fb8a25dfc292441.svn-base
@@ -0,0 +1,263 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.eclipse;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.FileLocator;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.core.JavaModelException;
+import org.eclipse.jdt.core.search.SearchEngine;
+import org.eclipse.jdt.ui.IJavaElementSearchConstants;
+import org.eclipse.jdt.ui.JavaUI;
+import org.eclipse.jdt.ui.wizards.NewTypeWizardPage;
+import org.eclipse.jface.dialogs.ProgressMonitorDialog;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.jface.window.Window;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.Event;
+import org.eclipse.swt.widgets.Label;
+import org.eclipse.swt.widgets.Listener;
+import org.eclipse.swt.widgets.Text;
+import org.eclipse.ui.dialogs.SelectionDialog;
+
+/**
+ * Pre-fills the new MapReduce driver class with a template.
+ * 
+ */
+
+public class NewDriverWizardPage extends NewTypeWizardPage {
+  private Button isCreateMapMethod;
+
+  private Text reducerText;
+
+  private Text mapperText;
+
+  private final boolean showContainerSelector;
+
+  public NewDriverWizardPage() {
+    this(true);
+  }
+
+  public NewDriverWizardPage(boolean showContainerSelector) {
+    super(true, "MapReduce Driver");
+
+    this.showContainerSelector = showContainerSelector;
+    setTitle("MapReduce Driver");
+    setDescription("Create a new MapReduce driver.");
+    setImageDescriptor(ImageLibrary.get("wizard.driver.new"));
+  }
+
+  public void setSelection(IStructuredSelection selection) {
+    initContainerPage(getInitialJavaElement(selection));
+    initTypePage(getInitialJavaElement(selection));
+  }
+
+  @Override
+  /**
+   * Creates the new type using the entered field values.
+   */
+  public void createType(IProgressMonitor monitor) throws CoreException,
+      InterruptedException {
+    super.createType(monitor);
+  }
+
+  @Override
+  protected void createTypeMembers(final IType newType, ImportsManager imports,
+      final IProgressMonitor monitor) throws CoreException {
+    super.createTypeMembers(newType, imports, monitor);
+    imports.addImport("org.apache.hadoop.fs.Path");
+    imports.addImport("org.apache.hadoop.io.Text");
+    imports.addImport("org.apache.hadoop.io.IntWritable");
+    imports.addImport("org.apache.hadoop.mapred.JobClient");
+    imports.addImport("org.apache.hadoop.mapred.JobConf");
+    imports.addImport("org.apache.hadoop.mapred.Reducer");
+    imports.addImport("org.apache.hadoop.mapred.Mapper");
+
+    /**
+     * TODO(jz) - move most code out of the runnable
+     */
+    getContainer().getShell().getDisplay().syncExec(new Runnable() {
+      public void run() {
+
+        String method = "public static void main(String[] args) {\n JobClient client = new JobClient();";
+        method += "JobConf conf = new JobConf("
+            + newType.getFullyQualifiedName() + ".class);\n\n";
+
+        method += "// TODO: specify output types\nconf.setOutputKeyClass(Text.class);\nconf.setOutputValueClass(IntWritable.class);\n\n";
+
+        method += "// TODO: specify input and output DIRECTORIES (not files)\nconf.setInputPath(new Path(\"src\"));\nconf.setOutputPath(new Path(\"out\"));\n\n";
+
+        if (mapperText.getText().length() > 0) {
+          method += "conf.setMapperClass(" + mapperText.getText()
+              + ".class);\n\n";
+        } else {
+          method += "// TODO: specify a mapper\nconf.setMapperClass(org.apache.hadoop.mapred.lib.IdentityMapper.class);\n\n";
+        }
+        if (reducerText.getText().length() > 0) {
+          method += "conf.setReducerClass(" + reducerText.getText()
+              + ".class);\n\n";
+        } else {
+          method += "// TODO: specify a reducer\nconf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);\n\n";
+        }
+
+        method += "client.setConf(conf);\n";
+        method += "try {\n\tJobClient.runJob(conf);\n} catch (Exception e) {\n"
+            + "\te.printStackTrace();\n}\n";
+        method += "}\n";
+
+        try {
+          newType.createMethod(method, null, false, monitor);
+        } catch (JavaModelException e) {
+          // TODO Auto-generated catch block
+          e.printStackTrace();
+        }
+      }
+    });
+  }
+
+  public void createControl(Composite parent) {
+    // super.createControl(parent);
+
+    initializeDialogUnits(parent);
+    Composite composite = new Composite(parent, SWT.NONE);
+    GridLayout layout = new GridLayout();
+    layout.numColumns = 4;
+    composite.setLayout(layout);
+
+    createContainerControls(composite, 4);
+
+    createPackageControls(composite, 4);
+    createSeparator(composite, 4);
+    createTypeNameControls(composite, 4);
+
+    createSuperClassControls(composite, 4);
+    createSuperInterfacesControls(composite, 4);
+    createSeparator(composite, 4);
+
+    createMapperControls(composite);
+    createReducerControls(composite);
+
+    if (!showContainerSelector) {
+      setPackageFragmentRoot(null, false);
+      setSuperClass("java.lang.Object", false);
+      setSuperInterfaces(new ArrayList(), false);
+    }
+
+    setControl(composite);
+
+    setFocus();
+    handleFieldChanged(CONTAINER);
+
+    // setSuperClass("org.apache.hadoop.mapred.MapReduceBase", true);
+    // setSuperInterfaces(Arrays.asList(new String[]{
+    // "org.apache.hadoop.mapred.Mapper" }), true);
+  }
+
+  @Override
+  protected void handleFieldChanged(String fieldName) {
+    super.handleFieldChanged(fieldName);
+
+    validate();
+  }
+
+  private void validate() {
+    if (showContainerSelector) {
+      updateStatus(new IStatus[] { fContainerStatus, fPackageStatus,
+          fTypeNameStatus, fSuperClassStatus, fSuperInterfacesStatus });
+    } else {
+      updateStatus(new IStatus[] { fTypeNameStatus, });
+    }
+  }
+
+  private void createMapperControls(Composite composite) {
+    this.mapperText = createBrowseClassControl(composite, "Ma&pper:",
+        "&Browse...", "org.apache.hadoop.mapred.Mapper", "Mapper Selection");
+  }
+
+  private void createReducerControls(Composite composite) {
+    this.reducerText = createBrowseClassControl(composite, "&Reducer:",
+        "Browse&...", "org.apache.hadoop.mapred.Reducer", "Reducer Selection");
+  }
+
+  private Text createBrowseClassControl(final Composite composite,
+      final String string, String browseButtonLabel,
+      final String baseClassName, final String dialogTitle) {
+    Label label = new Label(composite, SWT.NONE);
+    GridData data = new GridData(GridData.FILL_HORIZONTAL);
+    label.setText(string);
+    label.setLayoutData(data);
+
+    final Text text = new Text(composite, SWT.SINGLE | SWT.BORDER);
+    GridData data2 = new GridData(GridData.FILL_HORIZONTAL);
+    data2.horizontalSpan = 2;
+    text.setLayoutData(data2);
+
+    Button browse = new Button(composite, SWT.NONE);
+    browse.setText(browseButtonLabel);
+    GridData data3 = new GridData(GridData.FILL_HORIZONTAL);
+    browse.setLayoutData(data3);
+    browse.addListener(SWT.Selection, new Listener() {
+      public void handleEvent(Event event) {
+        IType baseType;
+        try {
+          baseType = getPackageFragmentRoot().getJavaProject().findType(
+              baseClassName);
+
+          // edit this to limit the scope
+          SelectionDialog dialog = JavaUI.createTypeDialog(
+              composite.getShell(), new ProgressMonitorDialog(composite
+                  .getShell()), SearchEngine.createHierarchyScope(baseType),
+              IJavaElementSearchConstants.CONSIDER_CLASSES, false);
+
+          dialog.setMessage("&Choose a type:");
+          dialog.setBlockOnOpen(true);
+          dialog.setTitle(dialogTitle);
+          dialog.open();
+
+          if ((dialog.getReturnCode() == Window.OK)
+              && (dialog.getResult().length > 0)) {
+            IType type = (IType) dialog.getResult()[0];
+            text.setText(type.getFullyQualifiedName());
+          }
+        } catch (JavaModelException e) {
+          // TODO Auto-generated catch block
+          e.printStackTrace();
+        }
+      }
+    });
+
+    if (!showContainerSelector) {
+      label.setEnabled(false);
+      text.setEnabled(false);
+      browse.setEnabled(false);
+    }
+
+    return text;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/2e/2eb4af878981c16aee202fbd812eb0585d615d24.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/2e/2eb4af878981c16aee202fbd812eb0585d615d24.svn-base b/eclipse-plugin/.svn/pristine/2e/2eb4af878981c16aee202fbd812eb0585d615d24.svn-base
new file mode 100644
index 0000000..7dc72a7
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/2e/2eb4af878981c16aee202fbd812eb0585d615d24.svn-base
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Status;
+import org.eclipse.core.runtime.jobs.Job;
+import org.eclipse.jface.dialogs.MessageDialog;
+
+/**
+ * Local representation of a folder in the DFS.
+ * 
+ * The constructor creates an empty representation of the folder and spawn a
+ * thread that will fill
+ */
+public class DFSFolder extends DFSPath implements DFSContent {
+
+  static Logger log = Logger.getLogger(DFSFolder.class.getName());
+
+  private DFSContent[] children;
+
+  protected DFSFolder(DFSContentProvider provider, HadoopServer location)
+      throws IOException {
+
+    super(provider, location);
+  }
+
+  private DFSFolder(DFSPath parent, Path path) {
+    super(parent, path);
+  }
+
+  protected void loadDFSFolderChildren() throws IOException {
+    List<DFSPath> list = new ArrayList<DFSPath>();
+
+    for (FileStatus status : getDFS().listStatus(this.getPath())) {
+      if (status.isDir()) {
+        list.add(new DFSFolder(this, status.getPath()));
+      } else {
+        list.add(new DFSFile(this, status.getPath()));
+      }
+    }
+
+    this.children = list.toArray(new DFSContent[list.size()]);
+  }
+
+  /**
+   * Upload the given file or directory into this DfsFolder
+   * 
+   * @param file
+   * @throws IOException
+   */
+  public void upload(IProgressMonitor monitor, final File file)
+      throws IOException {
+
+    if (file.isDirectory()) {
+      Path filePath = new Path(this.path, file.getName());
+      getDFS().mkdirs(filePath);
+      DFSFolder newFolder = new DFSFolder(this, filePath);
+      monitor.worked(1);
+      for (File child : file.listFiles()) {
+        if (monitor.isCanceled())
+          return;
+        newFolder.upload(monitor, child);
+      }
+
+    } else if (file.isFile()) {
+      Path filePath = new Path(this.path, file.getName());
+      DFSFile newFile = new DFSFile(this, filePath, file, monitor);
+
+    } else {
+      // XXX don't know what the file is?
+    }
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void downloadToLocalDirectory(IProgressMonitor monitor, File dir) {
+    if (!dir.exists())
+      dir.mkdirs();
+
+    if (!dir.isDirectory()) {
+      MessageDialog.openError(null, "Download to local file system",
+          "Invalid directory location: \"" + dir + "\"");
+      return;
+    }
+
+    File dfsPath = new File(this.getPath().toString());
+    File destination = new File(dir, dfsPath.getName());
+
+    if (!destination.exists()) {
+      if (!destination.mkdir()) {
+        MessageDialog.openError(null, "Download to local directory",
+            "Unable to create directory " + destination.getAbsolutePath());
+        return;
+      }
+    }
+
+    // Download all DfsPath children
+    for (Object childObj : getChildren()) {
+      if (childObj instanceof DFSPath) {
+        ((DFSPath) childObj).downloadToLocalDirectory(monitor, destination);
+        monitor.worked(1);
+      }
+    }
+  }
+
+  /* @inheritDoc */
+  @Override
+  public int computeDownloadWork() {
+    int work = 1;
+    for (DFSContent child : getChildren()) {
+      if (child instanceof DFSPath)
+        work += ((DFSPath) child).computeDownloadWork();
+    }
+
+    return work;
+  }
+
+  /**
+   * Create a new sub directory into this directory
+   * 
+   * @param folderName
+   */
+  public void mkdir(String folderName) {
+    try {
+      getDFS().mkdirs(new Path(this.path, folderName));
+    } catch (IOException ioe) {
+      ioe.printStackTrace();
+    }
+    doRefresh();
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    if (this.children == null)
+      return true;
+    else
+      return (this.children.length > 0);
+  }
+
+  /* @inheritDoc */
+  public DFSContent[] getChildren() {
+    if (children == null) {
+      new Job("Connecting to DFS " + location) {
+        @Override
+        protected IStatus run(IProgressMonitor monitor) {
+          try {
+            loadDFSFolderChildren();
+            return Status.OK_STATUS;
+
+          } catch (IOException ioe) {
+            children =
+                new DFSContent[] { new DFSMessage("Error: "
+                    + ioe.getLocalizedMessage()) };
+            return Status.CANCEL_STATUS;
+
+          } finally {
+            // Under all circumstances, update the UI
+            provider.refresh(DFSFolder.this);
+          }
+        }
+      }.schedule();
+
+      return new DFSContent[] { new DFSMessage("Listing folder content...") };
+    }
+    return this.children;
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void refresh() {
+    this.children = null;
+    this.doRefresh();
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return String.format("%s (%s)", super.toString(),
+        this.getChildren().length);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/33/33b39f782c0b1ef81be143a52e9a99ebc5f2bd1b.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/33/33b39f782c0b1ef81be143a52e9a99ebc5f2bd1b.svn-base b/eclipse-plugin/.svn/pristine/33/33b39f782c0b1ef81be143a52e9a99ebc5f2bd1b.svn-base
new file mode 100644
index 0000000..2e842db
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/33/33b39f782c0b1ef81be143a52e9a99ebc5f2bd1b.svn-base
@@ -0,0 +1,58 @@
+<?xml version="1.0" ?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<ivy-module version="1.0">
+  <info organisation="org.apache.hadoop" module="${ant.project.name}">
+    <license name="Apache 2.0"/>
+    <ivyauthor name="Apache Hadoop Team" url="http://hadoop.apache.org"/>
+    <description>
+        Apache Hadoop
+    </description>
+  </info>
+  <configurations defaultconfmapping="default">
+    <!--these match the Maven configurations-->
+    <conf name="default" extends="master,runtime"/>
+    <conf name="master" description="contains the artifact but no dependencies"/>
+    <conf name="runtime" description="runtime but not the artifact" />
+
+    <conf name="common" visibility="private" 
+      extends="runtime"
+      description="artifacts needed for compile/test the application"/>
+    <conf name="test" visibility="private" extends="runtime"/>
+  </configurations>
+
+  <publications>
+    <!--get the artifact from our module name-->
+    <artifact conf="master"/>
+  </publications>
+  <dependencies>
+   <dependency org="org.apache.hadoop" name="hadoop-annotations" rev="${hadoop-common.version}" conf="common->default"/>
+    <dependency org="org.apache.hadoop" name="hadoop-common" rev="${hadoop-common.version}" conf="common->default"/>
+    <dependency org="org.apache.hadoop" name="hadoop-hdfs" rev="${hadoop-hdfs.version}" conf="common->default"/>
+    <dependency org="commons-logging" name="commons-logging" rev="${commons-logging.version}" conf="common->default"/>
+    <dependency org="log4j" name="log4j" rev="${log4j.version}" conf="common->master"/>
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-core" rev="${yarn.version}" conf="common->default"/>
+
+   <!-- Exclusions for transitive dependencies pulled in by log4j -->
+   <exclude org="com.sun.jdmk"/>
+   <exclude org="com.sun.jmx"/>
+   <exclude org="javax.jms"/> 
+   <exclude org="javax.mail"/> 
+
+  </dependencies>
+</ivy-module>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/35/3512197cdccf9084c21ae7bb87e20a77e4939f75.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/35/3512197cdccf9084c21ae7bb87e20a77e4939f75.svn-base b/eclipse-plugin/.svn/pristine/35/3512197cdccf9084c21ae7bb87e20a77e4939f75.svn-base
new file mode 100644
index 0000000..d1a105e
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/35/3512197cdccf9084c21ae7bb87e20a77e4939f75.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/36/3672dc2fe9de87b8667dfe18e887c2d7f3619aea.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/36/3672dc2fe9de87b8667dfe18e887c2d7f3619aea.svn-base b/eclipse-plugin/.svn/pristine/36/3672dc2fe9de87b8667dfe18e887c2d7f3619aea.svn-base
new file mode 100644
index 0000000..8ec09d4
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/36/3672dc2fe9de87b8667dfe18e887c2d7f3619aea.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/36/36db84fee18c4007241dc7e43d372109a45095b0.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/36/36db84fee18c4007241dc7e43d372109a45095b0.svn-base b/eclipse-plugin/.svn/pristine/36/36db84fee18c4007241dc7e43d372109a45095b0.svn-base
new file mode 100644
index 0000000..82ef714
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/36/36db84fee18c4007241dc7e43d372109a45095b0.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/3b/3b1662854682c001ef66301eb57de622cb7b9ae3.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/3b/3b1662854682c001ef66301eb57de622cb7b9ae3.svn-base b/eclipse-plugin/.svn/pristine/3b/3b1662854682c001ef66301eb57de622cb7b9ae3.svn-base
new file mode 100644
index 0000000..cf58b9c
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/3b/3b1662854682c001ef66301eb57de622cb7b9ae3.svn-base
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.server;
+
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.swt.graphics.Image;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.ui.IEditorInput;
+import org.eclipse.ui.IEditorPart;
+import org.eclipse.ui.IEditorSite;
+import org.eclipse.ui.IPropertyListener;
+import org.eclipse.ui.IWorkbenchPartSite;
+import org.eclipse.ui.PartInitException;
+
+public class HadoopPathPage implements IEditorPart {
+
+  public IEditorInput getEditorInput() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  public IEditorSite getEditorSite() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  public void init(IEditorSite site, IEditorInput input)
+      throws PartInitException {
+    // TODO Auto-generated method stub
+
+  }
+
+  public void addPropertyListener(IPropertyListener listener) {
+    // TODO Auto-generated method stub
+
+  }
+
+  public void createPartControl(Composite parent) {
+    // TODO Auto-generated method stub
+
+  }
+
+  public void dispose() {
+    // TODO Auto-generated method stub
+
+  }
+
+  public IWorkbenchPartSite getSite() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  public String getTitle() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  public Image getTitleImage() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  public String getTitleToolTip() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  public void removePropertyListener(IPropertyListener listener) {
+    // TODO Auto-generated method stub
+
+  }
+
+  public void setFocus() {
+    // TODO Auto-generated method stub
+
+  }
+
+  public Object getAdapter(Class adapter) {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  public void doSave(IProgressMonitor monitor) {
+    // TODO Auto-generated method stub
+
+  }
+
+  public void doSaveAs() {
+    // TODO Auto-generated method stub
+
+  }
+
+  public boolean isDirty() {
+    // TODO Auto-generated method stub
+    return false;
+  }
+
+  public boolean isSaveAsAllowed() {
+    // TODO Auto-generated method stub
+    return false;
+  }
+
+  public boolean isSaveOnCloseNeeded() {
+    // TODO Auto-generated method stub
+    return false;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/3b/3b66a340b5fe5c183bddfbf68117272372b038ab.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/3b/3b66a340b5fe5c183bddfbf68117272372b038ab.svn-base b/eclipse-plugin/.svn/pristine/3b/3b66a340b5fe5c183bddfbf68117272372b038ab.svn-base
new file mode 100644
index 0000000..03be2b7
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/3b/3b66a340b5fe5c183bddfbf68117272372b038ab.svn-base
@@ -0,0 +1,49 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+
+<cheatsheet title="Set default Hadoop path tutorial">
+	<intro>
+		<description>
+			This tutorial informs you how to set the default Hadoop
+			directory for the plugin.
+		</description>
+	</intro>
+	<item title="Create MapReduce Cluster" skip="true">
+		<description>
+			Define a MapReduce cluster [if you have not done so already]
+			by opening the MapReduce Servers view and clicking on the
+			blue elephant in the upper right.
+
+			Use the following embedded command to create a new Hadoop Server:
+		</description>
+
+		<action pluginId="com.ibm.hipods.mapreduce"
+			class="org.apache.hadoop.eclipse.actions.NewServerAction" />
+	</item>
+	<item title="Open and Explore DFS Tree">
+
+		<description>
+			Project Explorer view shows an elephant icon for each defined
+			server.  Opening a server entry will open a connection to
+			the root of that server's DFS tree.  You can then explore the
+			DFS tree.
+		</description>
+
+	</item>
+</cheatsheet>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/3c/3c4b8f8656d7649546c130c4f1aec3629a3b5f25.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/3c/3c4b8f8656d7649546c130c4f1aec3629a3b5f25.svn-base b/eclipse-plugin/.svn/pristine/3c/3c4b8f8656d7649546c130c4f1aec3629a3b5f25.svn-base
new file mode 100644
index 0000000..0240ed2
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/3c/3c4b8f8656d7649546c130c4f1aec3629a3b5f25.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/42/42b4eb061b4e33588b0d4067a735439f6916ea01.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/42/42b4eb061b4e33588b0d4067a735439f6916ea01.svn-base b/eclipse-plugin/.svn/pristine/42/42b4eb061b4e33588b0d4067a735439f6916ea01.svn-base
new file mode 100644
index 0000000..31c8fb3
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/42/42b4eb061b4e33588b0d4067a735439f6916ea01.svn-base
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+import java.io.IOException;
+
+import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Status;
+import org.eclipse.core.runtime.jobs.Job;
+
+/**
+ * DFS Content representation of a HDFS location
+ */
+public class DFSLocation implements DFSContent {
+
+  private final DFSContentProvider provider;
+
+  private final HadoopServer location;
+
+  private DFSContent rootFolder = null;
+
+  DFSLocation(DFSContentProvider provider, HadoopServer server) {
+    this.provider = provider;
+    this.location = server;
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return this.location.getLocationName();
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public DFSContent[] getChildren() {
+    if (this.rootFolder == null) {
+      /*
+       * DfsFolder constructor might block as it contacts the NameNode: work
+       * asynchronously here or this will potentially freeze the UI
+       */
+      new Job("Connecting to DFS " + location) {
+        @Override
+        protected IStatus run(IProgressMonitor monitor) {
+          try {
+            rootFolder = new DFSFolder(provider, location);
+            return Status.OK_STATUS;
+
+          } catch (IOException ioe) {
+            rootFolder =
+                new DFSMessage("Error: " + ioe.getLocalizedMessage());
+            return Status.CANCEL_STATUS;
+
+          } finally {
+            // Under all circumstances, update the UI
+            provider.refresh(DFSLocation.this);
+          }
+        }
+      }.schedule();
+
+      return new DFSContent[] { new DFSMessage("Connecting to DFS "
+          + toString()) };
+    }
+    return new DFSContent[] { this.rootFolder };
+  }
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    return true;
+  }
+  
+  /* @inheritDoc */
+  public void refresh() {
+    this.rootFolder = null;
+    this.provider.refresh(this);
+  }
+
+  /*
+   * Actions
+   */
+  
+  /**
+   * Refresh the location using a new connection
+   */
+  public void reconnect() {
+    this.refresh();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/47/4747af06ad3f33383920c096c7ab4d26bdbeeb08.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/47/4747af06ad3f33383920c096c7ab4d26bdbeeb08.svn-base b/eclipse-plugin/.svn/pristine/47/4747af06ad3f33383920c096c7ab4d26bdbeeb08.svn-base
new file mode 100644
index 0000000..b12dd8a
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/47/4747af06ad3f33383920c096c7ab4d26bdbeeb08.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/4c/4cd5d68379855e78f0475db9c66595a0fc9b64fd.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/4c/4cd5d68379855e78f0475db9c66595a0fc9b64fd.svn-base b/eclipse-plugin/.svn/pristine/4c/4cd5d68379855e78f0475db9c66595a0fc9b64fd.svn-base
new file mode 100644
index 0000000..c24253e
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/4c/4cd5d68379855e78f0475db9c66595a0fc9b64fd.svn-base
@@ -0,0 +1,349 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.server;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.Counters;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobID;
+import org.apache.hadoop.mapred.JobStatus;
+import org.apache.hadoop.mapred.RunningJob;
+
+/**
+ * Representation of a Map/Reduce running job on a given location
+ */
+
+public class HadoopJob {
+
+  /**
+   * Enum representation of a Job state
+   */
+  public enum JobState {
+    PREPARE(JobStatus.PREP), RUNNING(JobStatus.RUNNING), FAILED(
+        JobStatus.FAILED), SUCCEEDED(JobStatus.SUCCEEDED);
+
+    final int state;
+
+    JobState(int state) {
+      this.state = state;
+    }
+
+    static JobState ofInt(int state) {
+      if (state == JobStatus.PREP) {
+        return PREPARE;
+      }
+      else if (state == JobStatus.RUNNING) {
+        return RUNNING;
+      }
+      else if (state == JobStatus.FAILED) {
+        return FAILED;
+      }
+      else if (state == JobStatus.SUCCEEDED) {
+        return SUCCEEDED;
+      }
+      else {
+        return null;
+      }
+    }
+  }
+
+  /**
+   * Location this Job runs on
+   */
+  private final HadoopServer location;
+
+  /**
+   * Unique identifier of this Job
+   */
+  final JobID jobId;
+
+  /**
+   * Status representation of a running job. This actually contains a
+   * reference to a JobClient. Its methods might block.
+   */
+  RunningJob running;
+
+  /**
+   * Last polled status
+   * 
+   * @deprecated should apparently not be used
+   */
+  JobStatus status;
+
+  /**
+   * Last polled counters
+   */
+  Counters counters;
+
+  /**
+   * Job Configuration
+   */
+  JobConf jobConf = null;
+
+  boolean completed = false;
+
+  boolean successful = false;
+
+  boolean killed = false;
+
+  int totalMaps;
+
+  int totalReduces;
+
+  int completedMaps;
+
+  int completedReduces;
+
+  float mapProgress;
+
+  float reduceProgress;
+
+  /**
+   * Constructor for a Hadoop job representation
+   * 
+   * @param location
+   * @param id
+   * @param running
+   * @param status
+   */
+  public HadoopJob(HadoopServer location, JobID id, RunningJob running,
+      JobStatus status) {
+
+    this.location = location;
+    this.jobId = id;
+    this.running = running;
+
+    loadJobFile();
+
+    update(status);
+  }
+
+  /**
+   * Try to locate and load the JobConf file for this job so to get more
+   * details on the job (number of maps and of reduces)
+   */
+  private void loadJobFile() {
+    try {
+      String jobFile = getJobFile();
+      FileSystem fs = location.getDFS();
+      File tmp = File.createTempFile(getJobID().toString(), ".xml");
+      if (FileUtil.copy(fs, new Path(jobFile), tmp, false, location
+          .getConfiguration())) {
+        this.jobConf = new JobConf(tmp.toString());
+
+        this.totalMaps = jobConf.getNumMapTasks();
+        this.totalReduces = jobConf.getNumReduceTasks();
+      }
+
+    } catch (IOException ioe) {
+      ioe.printStackTrace();
+    }
+  }
+
+  /* @inheritDoc */
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + ((jobId == null) ? 0 : jobId.hashCode());
+    result = prime * result + ((location == null) ? 0 : location.hashCode());
+    return result;
+  }
+
+  /* @inheritDoc */
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (!(obj instanceof HadoopJob))
+      return false;
+    final HadoopJob other = (HadoopJob) obj;
+    if (jobId == null) {
+      if (other.jobId != null)
+        return false;
+    } else if (!jobId.equals(other.jobId))
+      return false;
+    if (location == null) {
+      if (other.location != null)
+        return false;
+    } else if (!location.equals(other.location))
+      return false;
+    return true;
+  }
+
+  /**
+   * Get the running status of the Job (@see {@link JobStatus}).
+   * 
+   * @return
+   */
+  public JobState getState() {
+    if (this.completed) {
+      if (this.successful) {
+        return JobState.SUCCEEDED;
+      } else {
+        return JobState.FAILED;
+      }
+    } else {
+      return JobState.RUNNING;
+    }
+    // return JobState.ofInt(this.status.getRunState());
+  }
+
+  /**
+   * @return
+   */
+  public JobID getJobID() {
+    return this.jobId;
+  }
+
+  /**
+   * @return
+   */
+  public HadoopServer getLocation() {
+    return this.location;
+  }
+
+  /**
+   * @return
+   */
+  public boolean isCompleted() {
+    return this.completed;
+  }
+
+  /**
+   * @return
+   */
+  public String getJobName() {
+    return this.running.getJobName();
+  }
+
+  /**
+   * @return
+   */
+  public String getJobFile() {
+    return this.running.getJobFile();
+  }
+
+  /**
+   * Return the tracking URL for this Job.
+   * 
+   * @return string representation of the tracking URL for this Job
+   */
+  public String getTrackingURL() {
+    return this.running.getTrackingURL();
+  }
+
+  /**
+   * Returns a string representation of this job status
+   * 
+   * @return string representation of this job status
+   */
+  public String getStatus() {
+
+    StringBuffer s = new StringBuffer();
+
+    s.append("Maps : " + completedMaps + "/" + totalMaps);
+    s.append(" (" + mapProgress + ")");
+    s.append("  Reduces : " + completedReduces + "/" + totalReduces);
+    s.append(" (" + reduceProgress + ")");
+
+    return s.toString();
+  }
+
+  /**
+   * Update this job status according to the given JobStatus
+   * 
+   * @param status
+   */
+  void update(JobStatus status) {
+    this.status = status;
+    try {
+      this.counters = running.getCounters();
+      this.completed = running.isComplete();
+      this.successful = running.isSuccessful();
+      this.mapProgress = running.mapProgress();
+      this.reduceProgress = running.reduceProgress();
+      // running.getTaskCompletionEvents(fromEvent);
+
+    } catch (IOException ioe) {
+      ioe.printStackTrace();
+    }
+
+    this.completedMaps = (int) (this.totalMaps * this.mapProgress);
+    this.completedReduces = (int) (this.totalReduces * this.reduceProgress);
+  }
+
+  /**
+   * Print this job counters (for debugging purpose)
+   */
+  void printCounters() {
+    System.out.printf("New Job:\n", counters);
+    for (String groupName : counters.getGroupNames()) {
+      Counters.Group group = counters.getGroup(groupName);
+      System.out.printf("\t%s[%s]\n", groupName, group.getDisplayName());
+
+      for (Counters.Counter counter : group) {
+        System.out.printf("\t\t%s: %s\n", counter.getDisplayName(),
+                                          counter.getCounter());
+      }
+    }
+    System.out.printf("\n");
+  }
+
+  /**
+   * Kill this job
+   */
+  public void kill() {
+    try {
+      this.running.killJob();
+      this.killed = true;
+
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+  }
+
+  /**
+   * Print this job status (for debugging purpose)
+   */
+  public void display() {
+    System.out.printf("Job id=%s, name=%s\n", getJobID(), getJobName());
+    System.out.printf("Configuration file: %s\n", getJobID());
+    System.out.printf("Tracking URL: %s\n", getTrackingURL());
+
+    System.out.printf("Completion: map: %f reduce %f\n",
+        100.0 * this.mapProgress, 100.0 * this.reduceProgress);
+
+    System.out.println("Job total maps = " + totalMaps);
+    System.out.println("Job completed maps = " + completedMaps);
+    System.out.println("Map percentage complete = " + mapProgress);
+    System.out.println("Job total reduces = " + totalReduces);
+    System.out.println("Job completed reduces = " + completedReduces);
+    System.out.println("Reduce percentage complete = " + reduceProgress);
+    System.out.flush();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/4d/4d80b6f8c25cab47ff14d7e3b60c07972bbdf0c0.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/4d/4d80b6f8c25cab47ff14d7e3b60c07972bbdf0c0.svn-base b/eclipse-plugin/.svn/pristine/4d/4d80b6f8c25cab47ff14d7e3b60c07972bbdf0c0.svn-base
new file mode 100644
index 0000000..66db5d2
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/4d/4d80b6f8c25cab47ff14d7e3b60c07972bbdf0c0.svn-base
@@ -0,0 +1,182 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.launch;
+
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.debug.core.ILaunchConfiguration;
+import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
+import org.eclipse.debug.ui.AbstractLaunchConfigurationTab;
+import org.eclipse.debug.ui.AbstractLaunchConfigurationTabGroup;
+import org.eclipse.debug.ui.CommonTab;
+import org.eclipse.debug.ui.ILaunchConfigurationDialog;
+import org.eclipse.debug.ui.ILaunchConfigurationTab;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.core.JavaModelException;
+import org.eclipse.jdt.core.dom.AST;
+import org.eclipse.jdt.core.search.SearchEngine;
+import org.eclipse.jdt.debug.ui.launchConfigurations.JavaArgumentsTab;
+import org.eclipse.jdt.debug.ui.launchConfigurations.JavaClasspathTab;
+import org.eclipse.jdt.debug.ui.launchConfigurations.JavaJRETab;
+import org.eclipse.jdt.ui.IJavaElementSearchConstants;
+import org.eclipse.jdt.ui.JavaUI;
+import org.eclipse.jface.dialogs.ProgressMonitorDialog;
+import org.eclipse.jface.window.Window;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.Event;
+import org.eclipse.swt.widgets.Label;
+import org.eclipse.swt.widgets.Listener;
+import org.eclipse.swt.widgets.Text;
+import org.eclipse.ui.dialogs.SelectionDialog;
+
+/**
+ * 
+ * Handler for Local MapReduce job launches
+ * 
+ * TODO(jz) this may not be needed as we almost always deploy to a remote server
+ * and not locally, where we do do it locally we may just be able to exec
+ * scripts without going to java
+ * 
+ */
+public class LocalMapReduceLaunchTabGroup extends
+    AbstractLaunchConfigurationTabGroup {
+
+  public LocalMapReduceLaunchTabGroup() {
+    // TODO Auto-generated constructor stub
+  }
+
+  public void createTabs(ILaunchConfigurationDialog dialog, String mode) {
+    setTabs(new ILaunchConfigurationTab[] { new MapReduceLaunchTab(),
+        new JavaArgumentsTab(), new JavaJRETab(), new JavaClasspathTab(),
+        new CommonTab() });
+  }
+
+  public static class MapReduceLaunchTab extends AbstractLaunchConfigurationTab {
+    private Text combinerClass;
+
+    private Text reducerClass;
+
+    private Text mapperClass;
+
+    @Override
+    public boolean canSave() {
+      return true;
+    }
+
+    @Override
+    public boolean isValid(ILaunchConfiguration launchConfig) {
+      // todo: only if all classes are of proper types
+      return true;
+    }
+
+    public void createControl(final Composite parent) {
+      Composite panel = new Composite(parent, SWT.NONE);
+      GridLayout layout = new GridLayout(3, false);
+      panel.setLayout(layout);
+
+      Label mapperLabel = new Label(panel, SWT.NONE);
+      mapperLabel.setText("Mapper");
+      mapperClass = new Text(panel, SWT.SINGLE | SWT.BORDER);
+      createRow(parent, panel, mapperClass);
+
+      Label reducerLabel = new Label(panel, SWT.NONE);
+      reducerLabel.setText("Reducer");
+      reducerClass = new Text(panel, SWT.SINGLE | SWT.BORDER);
+      createRow(parent, panel, reducerClass);
+
+      Label combinerLabel = new Label(panel, SWT.NONE);
+      combinerLabel.setText("Combiner");
+      combinerClass = new Text(panel, SWT.SINGLE | SWT.BORDER);
+      createRow(parent, panel, combinerClass);
+
+      panel.pack();
+      setControl(panel);
+    }
+
+    private void createRow(final Composite parent, Composite panel,
+        final Text text) {
+      text.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
+      Button button = new Button(panel, SWT.BORDER);
+      button.setText("Browse...");
+      button.addListener(SWT.Selection, new Listener() {
+        public void handleEvent(Event arg0) {
+          try {
+            AST ast = AST.newAST(3);
+
+            SelectionDialog dialog = JavaUI.createTypeDialog(parent.getShell(),
+                new ProgressMonitorDialog(parent.getShell()), SearchEngine
+                    .createWorkspaceScope(),
+                IJavaElementSearchConstants.CONSIDER_CLASSES, false);
+            dialog.setMessage("Select Mapper type (implementing )");
+            dialog.setBlockOnOpen(true);
+            dialog.setTitle("Select Mapper Type");
+            dialog.open();
+
+            if ((dialog.getReturnCode() == Window.OK)
+                && (dialog.getResult().length > 0)) {
+              IType type = (IType) dialog.getResult()[0];
+              text.setText(type.getFullyQualifiedName());
+              setDirty(true);
+            }
+          } catch (JavaModelException e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+          }
+        }
+      });
+    }
+
+    public String getName() {
+      return "Hadoop";
+    }
+
+    public void initializeFrom(ILaunchConfiguration configuration) {
+      try {
+        mapperClass.setText(configuration.getAttribute(
+            "org.apache.hadoop.eclipse.launch.mapper", ""));
+        reducerClass.setText(configuration.getAttribute(
+            "org.apache.hadoop.eclipse.launch.reducer", ""));
+        combinerClass.setText(configuration.getAttribute(
+            "org.apache.hadoop.eclipse.launch.combiner", ""));
+      } catch (CoreException e) {
+        // TODO Auto-generated catch block
+        e.printStackTrace();
+        setErrorMessage(e.getMessage());
+      }
+    }
+
+    public void performApply(ILaunchConfigurationWorkingCopy configuration) {
+      configuration.setAttribute("org.apache.hadoop.eclipse.launch.mapper",
+          mapperClass.getText());
+      configuration.setAttribute(
+          "org.apache.hadoop.eclipse.launch.reducer", reducerClass
+              .getText());
+      configuration.setAttribute(
+          "org.apache.hadoop.eclipse.launch.combiner", combinerClass
+              .getText());
+    }
+
+    public void setDefaults(ILaunchConfigurationWorkingCopy configuration) {
+
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/53/5383abc8f2374c58cb370403a28a0e8593bd4156.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/53/5383abc8f2374c58cb370403a28a0e8593bd4156.svn-base b/eclipse-plugin/.svn/pristine/53/5383abc8f2374c58cb370403a28a0e8593bd4156.svn-base
new file mode 100644
index 0000000..0a48eaa
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/53/5383abc8f2374c58cb370403a28a0e8593bd4156.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/55/552c3e548b1649e3cec6d7f2e229b56937fad8dd.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/55/552c3e548b1649e3cec6d7f2e229b56937fad8dd.svn-base b/eclipse-plugin/.svn/pristine/55/552c3e548b1649e3cec6d7f2e229b56937fad8dd.svn-base
new file mode 100644
index 0000000..1678e0d
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/55/552c3e548b1649e3cec6d7f2e229b56937fad8dd.svn-base
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.launch;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.eclipse.servers.RunOnHadoopWizard;
+import org.eclipse.core.resources.IFile;
+import org.eclipse.core.resources.IResource;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.debug.core.ILaunchConfiguration;
+import org.eclipse.debug.core.ILaunchConfigurationType;
+import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
+import org.eclipse.jdt.core.IJavaProject;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.core.JavaCore;
+import org.eclipse.jdt.debug.ui.launchConfigurations.JavaApplicationLaunchShortcut;
+import org.eclipse.jdt.launching.IJavaLaunchConfigurationConstants;
+import org.eclipse.jdt.launching.IRuntimeClasspathEntry;
+import org.eclipse.jdt.launching.JavaRuntime;
+import org.eclipse.jface.wizard.IWizard;
+import org.eclipse.jface.wizard.WizardDialog;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.swt.widgets.Shell;
+
+/**
+ * Add a shortcut "Run on Hadoop" to the Run menu
+ */
+
+public class HadoopApplicationLaunchShortcut extends
+    JavaApplicationLaunchShortcut {
+
+  static Logger log =
+      Logger.getLogger(HadoopApplicationLaunchShortcut.class.getName());
+
+  // private ActionDelegate delegate = new RunOnHadoopActionDelegate();
+
+  public HadoopApplicationLaunchShortcut() {
+  }
+
+  /* @inheritDoc */
+  @Override
+  protected ILaunchConfiguration findLaunchConfiguration(IType type,
+      ILaunchConfigurationType configType) {
+
+    // Find an existing or create a launch configuration (Standard way)
+    ILaunchConfiguration iConf =
+        super.findLaunchConfiguration(type, configType);
+    if (iConf == null) iConf = super.createConfiguration(type);
+    ILaunchConfigurationWorkingCopy iConfWC;
+    try {
+      /*
+       * Tune the default launch configuration: setup run-time classpath
+       * manually
+       */
+      iConfWC = iConf.getWorkingCopy();
+
+      iConfWC.setAttribute(
+          IJavaLaunchConfigurationConstants.ATTR_DEFAULT_CLASSPATH, false);
+
+      List<String> classPath = new ArrayList<String>();
+      IResource resource = type.getResource();
+      IJavaProject project =
+          (IJavaProject) resource.getProject().getNature(JavaCore.NATURE_ID);
+      IRuntimeClasspathEntry cpEntry =
+          JavaRuntime.newDefaultProjectClasspathEntry(project);
+      classPath.add(0, cpEntry.getMemento());
+
+      iConfWC.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH,
+          classPath);
+
+    } catch (CoreException e) {
+      e.printStackTrace();
+      // FIXME Error dialog
+      return null;
+    }
+
+    /*
+     * Update the selected configuration with a specific Hadoop location
+     * target
+     */
+    IResource resource = type.getResource();
+    if (!(resource instanceof IFile))
+      return null;
+    RunOnHadoopWizard wizard =
+        new RunOnHadoopWizard((IFile) resource, iConfWC);
+    WizardDialog dialog =
+        new WizardDialog(Display.getDefault().getActiveShell(), wizard);
+
+    dialog.create();
+    dialog.setBlockOnOpen(true);
+    if (dialog.open() != WizardDialog.OK)
+      return null;
+
+    try {
+      
+      // Only save if some configuration is different.
+      if(!iConfWC.contentsEqual(iConf))
+        iConfWC.doSave();
+
+    } catch (CoreException e) {
+      e.printStackTrace();
+      // FIXME Error dialog
+      return null;
+    }
+
+    return iConfWC;
+  }
+
+  /**
+   * Was used to run the RunOnHadoopWizard inside and provide it a
+   * ProgressMonitor
+   */
+  static class Dialog extends WizardDialog {
+    public Dialog(Shell parentShell, IWizard newWizard) {
+      super(parentShell, newWizard);
+    }
+
+    @Override
+    public void create() {
+      super.create();
+
+      ((RunOnHadoopWizard) getWizard())
+          .setProgressMonitor(getProgressMonitor());
+    }
+  }
+}