You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hdt.apache.org by ad...@apache.org on 2013/01/08 23:26:26 UTC

[8/11] Import of source from Apache Hadoop MapReduce contrib, this is the plugin as it existed in the Hadoop 0.23.4 release.

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/56/569f5c08030a7c2b1b424af1f23722f21afbfca9.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/56/569f5c08030a7c2b1b424af1f23722f21afbfca9.svn-base b/eclipse-plugin/.svn/pristine/56/569f5c08030a7c2b1b424af1f23722f21afbfca9.svn-base
new file mode 100644
index 0000000..65436ac
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/56/569f5c08030a7c2b1b424af1f23722f21afbfca9.svn-base
@@ -0,0 +1,193 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+import org.apache.hadoop.eclipse.ImageLibrary;
+import org.apache.hadoop.eclipse.actions.DFSActionImpl;
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.action.IMenuManager;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.ISelection;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.ui.IActionBars;
+import org.eclipse.ui.PlatformUI;
+import org.eclipse.ui.actions.ActionFactory;
+import org.eclipse.ui.navigator.CommonActionProvider;
+import org.eclipse.ui.navigator.ICommonActionConstants;
+import org.eclipse.ui.navigator.ICommonActionExtensionSite;
+import org.eclipse.ui.navigator.ICommonMenuConstants;
+
+/**
+ * Allows the user to delete and refresh items in the DFS tree
+ */
+
+public class ActionProvider extends CommonActionProvider {
+
+  private static ICommonActionExtensionSite site;
+
+  public ActionProvider() {
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void init(ICommonActionExtensionSite site) {
+    if (ActionProvider.site != null) {
+      System.err.printf("%s: Multiple init()\n", this.getClass()
+          .getCanonicalName());
+      return;
+    }
+    super.init(site);
+    ActionProvider.site = site;
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void fillActionBars(IActionBars actionBars) {
+    actionBars.setGlobalActionHandler(ActionFactory.DELETE.getId(),
+        new DFSAction(DFSActions.DELETE));
+    actionBars.setGlobalActionHandler(ActionFactory.REFRESH.getId(),
+        new DFSAction(DFSActions.REFRESH));
+
+    if (site == null)
+      return;
+
+    if ((site.getStructuredViewer().getSelection() instanceof IStructuredSelection)
+        && (((IStructuredSelection) site.getStructuredViewer()
+            .getSelection()).size() == 1)
+        && (((IStructuredSelection) site.getStructuredViewer()
+            .getSelection()).getFirstElement() instanceof DFSFile)) {
+
+      actionBars.setGlobalActionHandler(ICommonActionConstants.OPEN,
+          new DFSAction(DFSActions.OPEN));
+    }
+
+    actionBars.updateActionBars();
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void fillContextMenu(IMenuManager menu) {
+    /*
+     * Actions on multiple selections
+     */
+    menu.appendToGroup(ICommonMenuConstants.GROUP_EDIT, new DFSAction(
+        DFSActions.DELETE));
+
+    menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
+        DFSActions.REFRESH));
+
+    menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
+        DFSActions.DOWNLOAD));
+
+    if (site == null)
+      return;
+
+    ISelection isel = site.getStructuredViewer().getSelection();
+    if (!(isel instanceof IStructuredSelection))
+      return;
+
+    /*
+     * Actions on single selections only
+     */
+
+    IStructuredSelection issel = (IStructuredSelection) isel;
+    if (issel.size() != 1)
+      return;
+    Object element = issel.getFirstElement();
+
+    if (element instanceof DFSFile) {
+      menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
+          DFSActions.OPEN));
+
+    } else if (element instanceof DFSFolder) {
+      menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
+          DFSActions.MKDIR));
+      menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
+          DFSActions.UPLOAD_FILES));
+      menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
+          DFSActions.UPLOAD_DIR));
+
+    } else if (element instanceof DFSLocation) {
+      menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
+          DFSActions.RECONNECT));
+
+    } else if (element instanceof DFSLocationsRoot) {
+      menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
+          DFSActions.DISCONNECT));
+    }
+
+  }
+
+  /**
+   * Representation of an action on a DFS entry in the browser
+   */
+  public static class DFSAction extends Action {
+
+    private final String id;
+
+    private final String title;
+
+    private DFSActions action;
+
+    public DFSAction(String id, String title) {
+      this.id = id;
+      this.title = title;
+    }
+
+    public DFSAction(DFSActions action) {
+      this.id = action.id;
+      this.title = action.title;
+    }
+
+    /* @inheritDoc */
+    @Override
+    public String getText() {
+      return this.title;
+    }
+
+    /* @inheritDoc */
+    @Override
+    public ImageDescriptor getImageDescriptor() {
+      return ImageLibrary.get(getActionDefinitionId());
+    }
+
+    /* @inheritDoc */
+    @Override
+    public String getActionDefinitionId() {
+      return id;
+    }
+
+    /* @inheritDoc */
+    @Override
+    public void run() {
+      DFSActionImpl action = new DFSActionImpl();
+      action.setActivePart(this, PlatformUI.getWorkbench()
+          .getActiveWorkbenchWindow().getActivePage().getActivePart());
+      action.selectionChanged(this, site.getStructuredViewer()
+          .getSelection());
+      action.run(this);
+    }
+
+    /* @inheritDoc */
+    @Override
+    public boolean isEnabled() {
+      return true;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/57/5776a23705c2f129998c3e068f096c57bae7178d.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/57/5776a23705c2f129998c3e068f096c57bae7178d.svn-base b/eclipse-plugin/.svn/pristine/57/5776a23705c2f129998c3e068f096c57bae7178d.svn-base
new file mode 100644
index 0000000..db9c181
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/57/5776a23705c2f129998c3e068f096c57bae7178d.svn-base
@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<cheatsheet title="MapReduce project run tutorial">
+	<intro>
+		<description>
+			This tutorial informs you how to run your newly created
+			MapReduce Project in one of two fashions: locally as a Java
+			Application, or on a Hadoop Server.
+		</description>
+	</intro>
+	<item title="Run as Java Application">
+		<description>
+			To run your MapReduce application locally, right-click on
+			your Driver class in the Package Explorer and select <b>Run as
+			/ Java Application</b>.
+		</description>
+	</item>
+	<item title="Run on Hadoop Server">
+		<description>
+			To run your MapReduce application on a Hadoop server, right-click on
+			your Driver class in the Package Explorer and select <b>Run as
+			/ Run on Hadoop</b>.
+		</description>
+	</item>
+</cheatsheet>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/58/5808ca44b394de69114a80b2451189febb02d89d.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/58/5808ca44b394de69114a80b2451189febb02d89d.svn-base b/eclipse-plugin/.svn/pristine/58/5808ca44b394de69114a80b2451189febb02d89d.svn-base
new file mode 100644
index 0000000..a7a072f
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/58/5808ca44b394de69114a80b2451189febb02d89d.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/58/589a3675188924532a5643a8fce70d0b509477e9.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/58/589a3675188924532a5643a8fce70d0b509477e9.svn-base b/eclipse-plugin/.svn/pristine/58/589a3675188924532a5643a8fce70d0b509477e9.svn-base
new file mode 100644
index 0000000..d3608ad
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/58/589a3675188924532a5643a8fce70d0b509477e9.svn-base
@@ -0,0 +1,181 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.eclipse.core.resources.IFile;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.FileLocator;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jdt.core.IJavaElement;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.internal.ui.wizards.NewElementWizard;
+import org.eclipse.jdt.ui.wizards.NewTypeWizardPage;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.ui.INewWizard;
+import org.eclipse.ui.IWorkbench;
+
+/**
+ * Wizard for creating a new Mapper class (a class that runs the Map portion
+ * of a MapReduce job). The class is pre-filled with a template.
+ * 
+ */
+
+public class NewMapperWizard extends NewElementWizard implements INewWizard,
+    IRunnableWithProgress {
+  private Page page;
+
+  public NewMapperWizard() {
+    setWindowTitle("New Mapper");
+  }
+
+  public void run(IProgressMonitor monitor) {
+    try {
+      page.createType(monitor);
+    } catch (CoreException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    } catch (InterruptedException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    }
+  }
+
+  @Override
+  public void init(IWorkbench workbench, IStructuredSelection selection) {
+    super.init(workbench, selection);
+
+    page = new Page();
+    addPage(page);
+    page.setSelection(selection);
+  }
+
+  public static class Page extends NewTypeWizardPage {
+    private Button isCreateMapMethod;
+
+    public Page() {
+      super(true, "Mapper");
+
+      setTitle("Mapper");
+      setDescription("Create a new Mapper implementation.");
+      setImageDescriptor(ImageLibrary.get("wizard.mapper.new"));
+    }
+
+    public void setSelection(IStructuredSelection selection) {
+      initContainerPage(getInitialJavaElement(selection));
+      initTypePage(getInitialJavaElement(selection));
+    }
+
+    @Override
+    public void createType(IProgressMonitor monitor) throws CoreException,
+        InterruptedException {
+      super.createType(monitor);
+    }
+
+    @Override
+    protected void createTypeMembers(IType newType, ImportsManager imports,
+        IProgressMonitor monitor) throws CoreException {
+      super.createTypeMembers(newType, imports, monitor);
+      imports.addImport("java.io.IOException");
+      imports.addImport("org.apache.hadoop.io.WritableComparable");
+      imports.addImport("org.apache.hadoop.io.Writable");
+      imports.addImport("org.apache.hadoop.mapred.OutputCollector");
+      imports.addImport("org.apache.hadoop.mapred.Reporter");
+      newType
+          .createMethod(
+              "public void map(WritableComparable key, Writable values, OutputCollector output, Reporter reporter) throws IOException \n{\n}\n",
+              null, false, monitor);
+    }
+
+    public void createControl(Composite parent) {
+      // super.createControl(parent);
+
+      initializeDialogUnits(parent);
+      Composite composite = new Composite(parent, SWT.NONE);
+      GridLayout layout = new GridLayout();
+      layout.numColumns = 4;
+      composite.setLayout(layout);
+
+      createContainerControls(composite, 4);
+      createPackageControls(composite, 4);
+      createSeparator(composite, 4);
+      createTypeNameControls(composite, 4);
+      createSuperClassControls(composite, 4);
+      createSuperInterfacesControls(composite, 4);
+      // createSeparator(composite, 4);
+
+      setControl(composite);
+
+      setSuperClass("org.apache.hadoop.mapred.MapReduceBase", true);
+      setSuperInterfaces(Arrays
+          .asList(new String[] { "org.apache.hadoop.mapred.Mapper" }), true);
+
+      setFocus();
+      validate();
+    }
+
+    @Override
+    protected void handleFieldChanged(String fieldName) {
+      super.handleFieldChanged(fieldName);
+
+      validate();
+    }
+
+    private void validate() {
+      updateStatus(new IStatus[] { fContainerStatus, fPackageStatus,
+          fTypeNameStatus, fSuperClassStatus, fSuperInterfacesStatus });
+    }
+  }
+
+  @Override
+  public boolean performFinish() {
+    if (super.performFinish()) {
+      if (getCreatedElement() != null) {
+        openResource((IFile) page.getModifiedResource());
+        selectAndReveal(page.getModifiedResource());
+      }
+
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  @Override
+  protected void finishPage(IProgressMonitor monitor)
+      throws InterruptedException, CoreException {
+    this.run(monitor);
+  }
+
+  @Override
+  public IJavaElement getCreatedElement() {
+    return page.getCreatedType().getPrimaryElement();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/5a/5a97e8870c0d4a07de2c9701c7e141046b9b0302.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/5a/5a97e8870c0d4a07de2c9701c7e141046b9b0302.svn-base b/eclipse-plugin/.svn/pristine/5a/5a97e8870c0d4a07de2c9701c7e141046b9b0302.svn-base
new file mode 100644
index 0000000..5fb0b28
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/5a/5a97e8870c0d4a07de2c9701c7e141046b9b0302.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/5c/5c5da4357f6b27b52c400168a280491fbf0bd209.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/5c/5c5da4357f6b27b52c400168a280491fbf0bd209.svn-base b/eclipse-plugin/.svn/pristine/5c/5c5da4357f6b27b52c400168a280491fbf0bd209.svn-base
new file mode 100644
index 0000000..9d9a609
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/5c/5c5da4357f6b27b52c400168a280491fbf0bd209.svn-base
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.apache.hadoop.eclipse.servers.IHadoopServerListener;
+import org.apache.hadoop.eclipse.servers.ServerRegistry;
+import org.apache.hadoop.fs.FileSystem;
+
+/**
+ * Representation of the root element containing all DFS servers. This
+ * content registers an observer on Hadoop servers so as to update itself
+ * when servers are updated.
+ */
+public class DFSLocationsRoot implements DFSContent, IHadoopServerListener {
+
+  /**
+   * 
+   */
+  private final DFSContentProvider provider;
+
+  private Map<HadoopServer, DFSLocation> map =
+      new HashMap<HadoopServer, DFSLocation>();
+
+  /**
+   * Register a listeners to track DFS locations updates
+   * 
+   * @param provider the content provider this content is the root of
+   */
+  DFSLocationsRoot(DFSContentProvider provider) {
+    this.provider = provider;
+    ServerRegistry.getInstance().addListener(this);
+    this.refresh();
+  }
+
+  /*
+   * Implementation of IHadoopServerListener
+   */
+
+  /* @inheritDoc */
+  public synchronized void serverChanged(final HadoopServer location,
+      final int type) {
+
+    switch (type) {
+      case ServerRegistry.SERVER_STATE_CHANGED: {
+        this.provider.refresh(map.get(location));
+        break;
+      }
+
+      case ServerRegistry.SERVER_ADDED: {
+        DFSLocation dfsLoc = new DFSLocation(provider, location);
+        map.put(location, dfsLoc);
+        this.provider.refresh(this);
+        break;
+      }
+
+      case ServerRegistry.SERVER_REMOVED: {
+        map.remove(location);
+        this.provider.refresh(this);
+        break;
+      }
+    }
+  }
+
+  /**
+   * Recompute the map of Hadoop locations
+   */
+  private synchronized void reloadLocations() {
+    map.clear();
+    for (HadoopServer location : ServerRegistry.getInstance().getServers())
+      map.put(location, new DFSLocation(provider, location));
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return "DFS Locations";
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public synchronized DFSContent[] getChildren() {
+    return this.map.values().toArray(new DFSContent[this.map.size()]);
+  }
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    return (this.map.size() > 0);
+  }
+
+  /* @inheritDoc */
+  public void refresh() {
+    reloadLocations();
+    this.provider.refresh(this);
+  }
+
+  /*
+   * Actions
+   */
+
+  public void disconnect() {
+    Thread closeThread = new Thread() {
+      /* @inheritDoc */
+      @Override
+      public void run() {
+        try {
+          System.out.printf("Closing all opened File Systems...\n");
+          FileSystem.closeAll();
+          System.out.printf("File Systems closed\n");
+
+        } catch (IOException ioe) {
+          ioe.printStackTrace();
+        }
+      }
+    };
+
+    // Wait 5 seconds for the connections to be closed
+    closeThread.start();
+    try {
+      closeThread.join(5000);
+
+    } catch (InterruptedException ie) {
+      // Ignore
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/5d/5d746fa355c0b121ffc7b3f54f23a0cf5a537cba.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/5d/5d746fa355c0b121ffc7b3f54f23a0cf5a537cba.svn-base b/eclipse-plugin/.svn/pristine/5d/5d746fa355c0b121ffc7b3f54f23a0cf5a537cba.svn-base
new file mode 100644
index 0000000..ce83b9a
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/5d/5d746fa355c0b121ffc7b3f54f23a0cf5a537cba.svn-base
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+/**
+ * DFS Content that displays a message.
+ */
+class DFSMessage implements DFSContent {
+
+  private String message;
+
+  DFSMessage(String message) {
+    this.message = message;
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return this.message;
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public DFSContent[] getChildren() {
+    return null;
+  }
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    return false;
+  }
+
+  /* @inheritDoc */
+  public void refresh() {
+    // Nothing to do
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/5f/5fe1f54fce9c2ed7ec243445461fe352b0e7852b.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/5f/5fe1f54fce9c2ed7ec243445461fe352b0e7852b.svn-base b/eclipse-plugin/.svn/pristine/5f/5fe1f54fce9c2ed7ec243445461fe352b0e7852b.svn-base
new file mode 100644
index 0000000..0abd538
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/5f/5fe1f54fce9c2ed7ec243445461fe352b0e7852b.svn-base
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.eclipse.ErrorMessageDialog;
+import org.apache.hadoop.eclipse.server.ConfProp;
+import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.jface.dialogs.MessageDialog;
+
+/**
+ * DFS Path handling for DFS
+ */
+public abstract class DFSPath implements DFSContent {
+
+  protected final DFSContentProvider provider;
+
+  protected HadoopServer location;
+
+  private DistributedFileSystem dfs = null;
+
+  protected final Path path;
+
+  protected final DFSPath parent;
+
+  /**
+   * For debugging purpose
+   */
+  static Logger log = Logger.getLogger(DFSPath.class.getName());
+
+  /**
+   * Create a path representation for the given location in the given viewer
+   * 
+   * @param location
+   * @param path
+   * @param viewer
+   */
+  public DFSPath(DFSContentProvider provider, HadoopServer location)
+      throws IOException {
+
+    this.provider = provider;
+    this.location = location;
+    this.path = new Path("/");
+    this.parent = null;
+  }
+
+  /**
+   * Create a sub-path representation for the given parent path
+   * 
+   * @param parent
+   * @param path
+   */
+  protected DFSPath(DFSPath parent, Path path) {
+    this.provider = parent.provider;
+    this.location = parent.location;
+    this.dfs = parent.dfs;
+    this.parent = parent;
+    this.path = path;
+  }
+
+  protected void dispose() {
+    // Free the DFS connection
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    if (path.equals("/")) {
+      return location.getConfProp(ConfProp.FS_DEFAULT_URI);
+
+    } else {
+      return this.path.getName();
+    }
+  }
+
+  /**
+   * Does a recursive delete of the remote directory tree at this node.
+   */
+  public void delete() {
+    try {
+      getDFS().delete(this.path, true);
+
+    } catch (IOException e) {
+      e.printStackTrace();
+      MessageDialog.openWarning(null, "Delete file",
+          "Unable to delete file \"" + this.path + "\"\n" + e);
+    }
+  }
+
+  public DFSPath getParent() {
+    return parent;
+  }
+
+  public abstract void refresh();
+
+  /**
+   * Refresh the UI element for this content
+   */
+  public void doRefresh() {
+    provider.refresh(this);
+  }
+
+  /**
+   * Copy the DfsPath to the given local directory
+   * 
+   * @param directory the local directory
+   */
+  public abstract void downloadToLocalDirectory(IProgressMonitor monitor,
+      File dir);
+
+  public Path getPath() {
+    return this.path;
+  }
+
+  /**
+   * Gets a connection to the DFS
+   * 
+   * @return a connection to the DFS
+   * @throws IOException
+   */
+  DistributedFileSystem getDFS() throws IOException {
+    if (this.dfs == null) {
+      FileSystem fs = location.getDFS();
+      if (!(fs instanceof DistributedFileSystem)) {
+        ErrorMessageDialog.display("DFS Browser",
+            "The DFS Browser cannot browse anything else "
+                + "but a Distributed File System!");
+        throw new IOException("DFS Browser expects a DistributedFileSystem!");
+      }
+      this.dfs = (DistributedFileSystem) fs;
+    }
+    return this.dfs;
+  }
+
+  public abstract int computeDownloadWork();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/63/63fc2684b65445ffe30c97d9adf2c7b60d61f912.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/63/63fc2684b65445ffe30c97d9adf2c7b60d61f912.svn-base b/eclipse-plugin/.svn/pristine/63/63fc2684b65445ffe30c97d9adf2c7b60d61f912.svn-base
new file mode 100644
index 0000000..038497a
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/63/63fc2684b65445ffe30c97d9adf2c7b60d61f912.svn-base
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+public enum DFSActions {
+
+  DELETE("Delete"), REFRESH("Refresh"), DOWNLOAD("Download from DFS..."), OPEN(
+      "View"), MKDIR("Create new directory..."), UPLOAD_FILES(
+      "Upload files to DFS..."), UPLOAD_DIR("Upload directory to DFS..."), RECONNECT(
+      "Reconnect"), DISCONNECT("Disconnect");
+
+  final String title;
+
+  final String id;
+
+  private static final String PREFIX = "dfs.browser.action.";
+
+  public static DFSActions getById(String def) {
+    if (!def.startsWith(PREFIX))
+      return null;
+    return valueOf(def.substring(PREFIX.length()).toUpperCase());
+  }
+
+  DFSActions(String title) {
+    this.title = title;
+    this.id = PREFIX + this.name().toLowerCase();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/64/64c6bad5ee4ce2a98049b5b055420e2b72beb0b2.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/64/64c6bad5ee4ce2a98049b5b055420e2b72beb0b2.svn-base b/eclipse-plugin/.svn/pristine/64/64c6bad5ee4ce2a98049b5b055420e2b72beb0b2.svn-base
new file mode 100644
index 0000000..af8e6c1
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/64/64c6bad5ee4ce2a98049b5b055420e2b72beb0b2.svn-base
@@ -0,0 +1,350 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.reflect.InvocationTargetException;
+
+import org.apache.hadoop.eclipse.Activator;
+import org.apache.hadoop.eclipse.ErrorMessageDialog;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.eclipse.core.resources.IStorage;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IPath;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.PlatformObject;
+import org.eclipse.core.runtime.Status;
+import org.eclipse.jface.dialogs.MessageDialog;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.ui.PlatformUI;
+
+/**
+ * File handling methods for the DFS
+ */
+public class DFSFile extends DFSPath implements DFSContent {
+
+  protected long length;
+
+  protected short replication;
+
+  /**
+   * Constructor to upload a file on the distributed file system
+   * 
+   * @param parent
+   * @param path
+   * @param file
+   * @param monitor
+   */
+  public DFSFile(DFSPath parent, Path path, File file,
+      IProgressMonitor monitor) {
+
+    super(parent, path);
+    this.upload(monitor, file);
+  }
+
+  public DFSFile(DFSPath parent, Path path) {
+    super(parent, path);
+
+    try {
+      FileStatus fs = getDFS().getFileStatus(path);
+      this.length = fs.getLen();
+      this.replication = fs.getReplication();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+  }
+
+  /**
+   * Download and view contents of a file
+   * 
+   * @return a InputStream for the file
+   */
+  public InputStream open() throws IOException {
+
+    return getDFS().open(this.path);
+  }
+
+  /**
+   * Download this file to the local file system. This creates a download
+   * status monitor.
+   * 
+   * @param file
+   * @throws JSchException
+   * @throws IOException
+   * @throws InvocationTargetException
+   * @throws InterruptedException
+   * 
+   * @deprecated
+   */
+  public void downloadToLocalFile(final File file)
+      throws InvocationTargetException, InterruptedException {
+
+    PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
+        new IRunnableWithProgress() {
+          public void run(IProgressMonitor monitor)
+              throws InvocationTargetException {
+
+            DFSFile.this.downloadToLocalFile(monitor, file);
+          }
+        });
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void downloadToLocalDirectory(IProgressMonitor monitor, File dir) {
+
+    File dfsPath = new File(this.getPath().toString());
+    File destination = new File(dir, dfsPath.getName());
+
+    if (destination.exists()) {
+      boolean answer =
+          MessageDialog.openQuestion(null, "Overwrite existing local file?",
+              "The file you are attempting to download from the DFS "
+                  + this.getPath()
+                  + ", already exists in your local directory as "
+                  + destination + ".\n" + "Overwrite the existing file?");
+      if (!answer)
+        return;
+    }
+
+    try {
+      this.downloadToLocalFile(monitor, destination);
+
+    } catch (Exception e) {
+      e.printStackTrace();
+      MessageDialog.openWarning(null, "Download to local file system",
+          "Downloading of file \"" + this.path + "\" to local directory \""
+              + dir + "\" has failed.\n" + e);
+    }
+  }
+
+  /**
+   * Provides a detailed string for this file
+   * 
+   * @return the string formatted as
+   *         <tt>&lt;filename&gt; (&lt;size&gt;, r&lt;replication&gt;)</tt>
+   */
+  public String toDetailedString() {
+    final String[] units = { "b", "Kb", "Mb", "Gb", "Tb" };
+    int unit = 0;
+    double l = this.length;
+    while ((l >= 1024.0) && (unit < units.length)) {
+      unit += 1;
+      l /= 1024.0;
+    }
+
+    return String.format("%s (%.1f %s, r%d)", super.toString(), l,
+        units[unit], this.replication);
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return this.path.toString();
+  }
+
+  /*
+   * 
+   */
+
+  /**
+   * Download the DfsFile to a local file. Use the given monitor to report
+   * status of operation.
+   * 
+   * @param monitor the status monitor
+   * @param file the local file where to put the downloaded file
+   * @throws InvocationTargetException
+   */
+  public void downloadToLocalFile(IProgressMonitor monitor, File file)
+      throws InvocationTargetException {
+
+    final int taskSize = 1024;
+
+    monitor.setTaskName("Download file " + this.path);
+
+    BufferedOutputStream ostream = null;
+    DataInputStream istream = null;
+
+    try {
+      istream = getDFS().open(this.path);
+      ostream = new BufferedOutputStream(new FileOutputStream(file));
+
+      int bytes;
+      byte[] buffer = new byte[taskSize];
+
+      while ((bytes = istream.read(buffer)) >= 0) {
+        if (monitor.isCanceled())
+          return;
+        ostream.write(buffer, 0, bytes);
+        monitor.worked(1);
+      }
+
+    } catch (Exception e) {
+      throw new InvocationTargetException(e);
+
+    } finally {
+      // Clean all opened resources
+      if (istream != null) {
+        try {
+          istream.close();
+        } catch (IOException e) {
+          e.printStackTrace();
+          // nothing we can do here
+        }
+      }
+      try {
+        ostream.close();
+      } catch (IOException e) {
+        e.printStackTrace();
+        // nothing we can do here
+      }
+    }
+  }
+
+  /**
+   * Upload a local file to this file on the distributed file system
+   * 
+   * @param monitor
+   * @param file
+   */
+  public void upload(IProgressMonitor monitor, File file) {
+
+    final int taskSize = 1024;
+
+    monitor.setTaskName("Upload file " + this.path);
+
+    BufferedInputStream istream = null;
+    DataOutputStream ostream = null;
+
+    try {
+      istream = new BufferedInputStream(new FileInputStream(file));
+      ostream = getDFS().create(this.path);
+
+      int bytes;
+      byte[] buffer = new byte[taskSize];
+
+      while ((bytes = istream.read(buffer)) >= 0) {
+        if (monitor.isCanceled())
+          return;
+        ostream.write(buffer, 0, bytes);
+        monitor.worked(1);
+      }
+
+    } catch (Exception e) {
+      ErrorMessageDialog.display(String.format(
+          "Unable to uploade file %s to %s", file, this.path), e
+          .getLocalizedMessage());
+
+    } finally {
+      try {
+        if (istream != null)
+          istream.close();
+      } catch (IOException e) {
+        e.printStackTrace();
+        // nothing we can do here
+      }
+      try {
+        if (ostream != null)
+          ostream.close();
+      } catch (IOException e) {
+        e.printStackTrace();
+        // nothing we can do here
+      }
+    }
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void refresh() {
+    getParent().refresh();
+  }
+
+  /* @inheritDoc */
+  @Override
+  public int computeDownloadWork() {
+    return 1 + (int) (this.length / 1024);
+  }
+
+  /**
+   * Creates an adapter for the file to open it in the Editor
+   * 
+   * @return the IStorage
+   */
+  public IStorage getIStorage() {
+    return new IStorageAdapter();
+  }
+
+  /**
+   * IStorage adapter to open the file in the Editor
+   */
+  private class IStorageAdapter extends PlatformObject implements IStorage {
+
+    /* @inheritDoc */
+    public InputStream getContents() throws CoreException {
+      try {
+        return DFSFile.this.open();
+
+      } catch (IOException ioe) {
+        throw new CoreException(new Status(Status.ERROR,
+                Activator.PLUGIN_ID, 0, "Unable to open file \""
+                + DFSFile.this.path + "\"", ioe));
+      }
+    }
+
+    /* @inheritDoc */
+    public IPath getFullPath() {
+      return new org.eclipse.core.runtime.Path(DFSFile.this.path.toString());
+    }
+
+    /* @inheritDoc */
+    public String getName() {
+      return DFSFile.this.path.getName();
+    }
+
+    /* @inheritDoc */
+    public boolean isReadOnly() {
+      return true;
+    }
+
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public DFSContent[] getChildren() {
+    return null;
+  }
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    return false;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/66/664831887585006c954cc12b539d9d9ee1bdd9cc.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/66/664831887585006c954cc12b539d9d9ee1bdd9cc.svn-base b/eclipse-plugin/.svn/pristine/66/664831887585006c954cc12b539d9d9ee1bdd9cc.svn-base
new file mode 100644
index 0000000..38cf1cc
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/66/664831887585006c954cc12b539d9d9ee1bdd9cc.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/66/66d57de66c7d7cdbe265920c3722a7757851a5ee.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/66/66d57de66c7d7cdbe265920c3722a7757851a5ee.svn-base b/eclipse-plugin/.svn/pristine/66/66d57de66c7d7cdbe265920c3722a7757851a5ee.svn-base
new file mode 100644
index 0000000..5db0bc5
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/66/66d57de66c7d7cdbe265920c3722a7757851a5ee.svn-base
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.actions;
+
+import org.apache.hadoop.eclipse.ImageLibrary;
+import org.apache.hadoop.eclipse.servers.HadoopLocationWizard;
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.wizard.Wizard;
+import org.eclipse.jface.wizard.WizardDialog;
+
+
+/**
+ * Action corresponding to creating a new MapReduce Server.
+ */
+
+public class NewLocationAction extends Action {
+  public NewLocationAction() {
+    setText("New Hadoop location...");
+    setImageDescriptor(ImageLibrary.get("server.view.action.location.new"));
+  }
+
+  @Override
+  public void run() {
+    WizardDialog dialog = new WizardDialog(null, new Wizard() {
+      private HadoopLocationWizard page = new HadoopLocationWizard();
+
+      @Override
+      public void addPages() {
+        super.addPages();
+        setWindowTitle("New Hadoop location...");
+        addPage(page);
+      }
+
+      @Override
+      public boolean performFinish() {
+        page.performFinish();
+        return true;
+      }
+
+    });
+
+    dialog.create();
+    dialog.setBlockOnOpen(true);
+    dialog.open();
+
+    super.run();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/69/69015dbc918e9119bcb3defb71b7bf0e732c280f.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/69/69015dbc918e9119bcb3defb71b7bf0e732c280f.svn-base b/eclipse-plugin/.svn/pristine/69/69015dbc918e9119bcb3defb71b7bf0e732c280f.svn-base
new file mode 100644
index 0000000..2fa7f14
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/69/69015dbc918e9119bcb3defb71b7bf0e732c280f.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/6b/6bb21284e8e391c830466ab78376618d32fc8831.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/6b/6bb21284e8e391c830466ab78376618d32fc8831.svn-base b/eclipse-plugin/.svn/pristine/6b/6bb21284e8e391c830466ab78376618d32fc8831.svn-base
new file mode 100644
index 0000000..ca7cfe8
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/6b/6bb21284e8e391c830466ab78376618d32fc8831.svn-base
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<cheatsheet title="Open Browser">
+  <intro>
+    <description>This cheat sheet launches a browser to the Hadoop website.</description>
+  </intro>  
+  <item title="Open Browser">
+     <description>
+     				Go to http://hadoop.apache.org/core/, and follow
+					links to download the latest stable distribution of
+					Hadoop.
+
+
+     Use the following embedded command to launch the Hadoop Web site 
+        in a browser</description>
+     <command serialization=
+        "org.eclipse.ui.browser.openBrowser(url=http://hadoop.apache.org/core)"/>
+  </item>
+</cheatsheet>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/6c/6ccae6864d03b3bd5fcc872c2c62c2d2b24069df.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/6c/6ccae6864d03b3bd5fcc872c2c62c2d2b24069df.svn-base b/eclipse-plugin/.svn/pristine/6c/6ccae6864d03b3bd5fcc872c2c62c2d2b24069df.svn-base
new file mode 100644
index 0000000..bae2815
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/6c/6ccae6864d03b3bd5fcc872c2c62c2d2b24069df.svn-base
@@ -0,0 +1,138 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<compositeCheatsheet name="IBM MapReduce Tools for Eclipse">
+	<taskGroup name="Develop Hadoop Applications" kind="set">
+		<intro
+			href="http://www.alphaworks.ibm.com/tech/mapreducetools">
+			IBM(R) MapReduce Tools for Eclipse enables you to write
+			distributed applications based on the MapReduce paradigm
+			using the Apache Hadoop runtime. This cheat sheet will walk
+			you through the steps needed to write a MapReduce
+			application and run it on a Hadoop server.
+		</intro>
+		<onCompletion>
+
+		</onCompletion>
+		<taskGroup name="Initial Setup" kind="sequence" skip="true">
+			<intro>
+				This task takes you through the steps to setup the
+				Hadoop environment with the MapReduce Tools. If you
+				already have Hadoop installed and linked to Eclipse, you
+				can skip this task.
+			</intro>
+			<onCompletion>
+				Congratulations! You have now installed Hadoop on your
+				computer and linked it with the MapReduce Tools.
+			</onCompletion>
+			<task kind="cheatsheet"
+				name="Download and unzip Apache Hadoop distribution">
+				<intro>
+					Hadoop must be downloaded to a place where Eclipse
+					can access its libraries. This task covers the steps
+					needed to execute this task.
+				</intro>
+				<param name="showIntro" value="false" />
+				<param name="path" value="Setup.xml" />
+				<onCompletion>
+					The plugin currently supports Hadoop v0.7.2 through
+					0.12.2. Now click on the top-most link that you feel
+					comfortable installing.
+				</onCompletion>
+			</task>
+			<task kind="cheatsheet"
+				name="Specify path to Apache Hadoop distribution">
+				...
+				<intro>
+					This tutorial informs you how to set the default
+					Hadoop directory for the plugin.
+				</intro>
+				<param name="showIntro" value="false" />
+			 	<param name="path" value="SetHadoopPath.xml" />
+			</task>
+		</taskGroup>
+		<taskGroup name="Create and run a MapReduce project"
+			kind="sequence" skip="true">
+			<intro>
+				This section walks you through the steps to create and
+				run your MapReduce project.
+			</intro>
+
+			<task kind="cheatsheet" name="Create a MapReduce project"
+				skip="true">
+				<intro>
+					This tutorial guides you through the creation of a
+					simple MapReduce project with three MapReduce
+					classes: a Mapper, a Reducer, and a Driver.
+				</intro>
+				<param name="showIntro" value="false" />
+				<param name="path" value="CreateProj.xml" />
+				<onCompletion>
+					Congratulations! You have now mastered the steps for
+					creating a Hadoop project.
+				</onCompletion>
+			</task>
+			<task kind="cheatsheet"
+				name="Run a MapReduce application">
+				<param name="path" value="RunProj.xml" />
+				<onCompletion>
+					Congratulations! You have now mastered the steps for
+					implementing a Hadoop application.
+				</onCompletion>
+			</task>
+
+		</taskGroup>
+
+		<taskGroup name="Using a MapReduce cluster" kind="set"
+			skip="true">
+			<intro>
+				The MapReduce Tools for Eclipse plugin lets you 
+				browse and upload files to the DFS of a MapReduce cluster.
+			</intro>
+			<onCompletion>
+				Congratulations!  You have completed the tutorials on using a
+				MapReduce Cluster.
+			</onCompletion>
+			<task kind="cheatsheet"
+				name="Connect to a MapReduce cluster" skip="true">
+				<intro>
+					This tutorial explains how to show files in the DFS of a
+					MapReduce cluster.
+				</intro>
+				<param name="showIntro" value="false" />
+				<param name="path" value="ConnectDFS.xml" />
+			</task>
+			<task kind="cheatsheet" id="viewFiles"
+				name="Viewing file contents on the Hadoop Distributed File System (HDFS)">
+				<intro>
+					Simply double-click on any file in the DFS in the Project
+					Explorer view.
+				</intro>
+			</task>
+			<task kind="cheatsheet" 
+				name="Transfer files to the Hadoop Distributed File System (HDFS)">
+				<intro>
+					Right-click on an existing directory in the DFS.<br />
+					Choose the <b>Import from local directory option.</b>
+					<br />
+					Note that files can only be uploaded to the HDFS at this time.
+				</intro>
+			</task>
+		</taskGroup>
+	</taskGroup>
+</compositeCheatsheet>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/6c/6cfe9e5c78300cc232d1e5942d76ca1bc975120e.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/6c/6cfe9e5c78300cc232d1e5942d76ca1bc975120e.svn-base b/eclipse-plugin/.svn/pristine/6c/6cfe9e5c78300cc232d1e5942d76ca1bc975120e.svn-base
new file mode 100644
index 0000000..828e205
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/6c/6cfe9e5c78300cc232d1e5942d76ca1bc975120e.svn-base
@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.server;
+
+import java.io.File;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.eclipse.Activator;
+import org.apache.hadoop.eclipse.ErrorMessageDialog;
+import org.eclipse.core.resources.IResource;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jdt.core.ICompilationUnit;
+import org.eclipse.jdt.core.IJavaElement;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.ui.jarpackager.IJarExportRunnable;
+import org.eclipse.jdt.ui.jarpackager.JarPackageData;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.ui.PlatformUI;
+
+/**
+ * Methods for interacting with the jar file containing the
+ * Mapper/Reducer/Driver classes for a MapReduce job.
+ */
+
+public class JarModule implements IRunnableWithProgress {
+
+  static Logger log = Logger.getLogger(JarModule.class.getName());
+
+  private IResource resource;
+
+  private File jarFile;
+
+  public JarModule(IResource resource) {
+    this.resource = resource;
+  }
+
+  public String getName() {
+    return resource.getProject().getName() + "/" + resource.getName();
+  }
+
+  /**
+   * Creates a JAR file containing the given resource (Java class with
+   * main()) and all associated resources
+   * 
+   * @param resource the resource
+   * @return a file designing the created package
+   */
+  public void run(IProgressMonitor monitor) {
+
+    log.fine("Build jar");
+    JarPackageData jarrer = new JarPackageData();
+
+    jarrer.setExportJavaFiles(true);
+    jarrer.setExportClassFiles(true);
+    jarrer.setExportOutputFolders(true);
+    jarrer.setOverwrite(true);
+
+    try {
+      // IJavaProject project =
+      // (IJavaProject) resource.getProject().getNature(JavaCore.NATURE_ID);
+
+      // check this is the case before letting this method get called
+      Object element = resource.getAdapter(IJavaElement.class);
+      IType type = ((ICompilationUnit) element).findPrimaryType();
+      jarrer.setManifestMainClass(type);
+
+      // Create a temporary JAR file name
+      File baseDir = Activator.getDefault().getStateLocation().toFile();
+
+      String prefix =
+          String.format("%s_%s-", resource.getProject().getName(), resource
+              .getName());
+      File jarFile = File.createTempFile(prefix, ".jar", baseDir);
+      jarrer.setJarLocation(new Path(jarFile.getAbsolutePath()));
+
+      jarrer.setElements(resource.getProject().members(IResource.FILE));
+      IJarExportRunnable runnable =
+          jarrer.createJarExportRunnable(Display.getDefault()
+              .getActiveShell());
+      runnable.run(monitor);
+
+      this.jarFile = jarFile;
+
+    } catch (Exception e) {
+      e.printStackTrace();
+      throw new RuntimeException(e);
+    }
+  }
+
+  /**
+   * Allow the retrieval of the resulting JAR file
+   * 
+   * @return the generated JAR file
+   */
+  public File getJarFile() {
+    return this.jarFile;
+  }
+
+  /**
+   * Static way to create a JAR package for the given resource and showing a
+   * progress bar
+   * 
+   * @param resource
+   * @return
+   */
+  public static File createJarPackage(IResource resource) {
+
+    JarModule jarModule = new JarModule(resource);
+    try {
+      PlatformUI.getWorkbench().getProgressService().run(false, true,
+          jarModule);
+
+    } catch (Exception e) {
+      e.printStackTrace();
+      return null;
+    }
+
+    File jarFile = jarModule.getJarFile();
+    if (jarFile == null) {
+      ErrorMessageDialog.display("Run on Hadoop",
+          "Unable to create or locate the JAR file for the Job");
+      return null;
+    }
+
+    return jarFile;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/6d/6dd3931dffcb1ca81303d6d84c740555c85da505.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/6d/6dd3931dffcb1ca81303d6d84c740555c85da505.svn-base b/eclipse-plugin/.svn/pristine/6d/6dd3931dffcb1ca81303d6d84c740555c85da505.svn-base
new file mode 100644
index 0000000..8d4b2df
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/6d/6dd3931dffcb1ca81303d6d84c740555c85da505.svn-base
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import org.apache.hadoop.eclipse.servers.ServerRegistry;
+import org.eclipse.ui.plugin.AbstractUIPlugin;
+import org.osgi.framework.BundleContext;
+
+/**
+ * The activator class controls the plug-in life cycle
+ */
+public class Activator extends AbstractUIPlugin {
+
+  /**
+   * The plug-in ID
+   */
+  public static final String PLUGIN_ID = "org.apache.hadoop.eclipse";
+
+  /**
+   * The shared unique instance (singleton)
+   */
+  private static Activator plugin;
+
+  /**
+   * Constructor
+   */
+  public Activator() {
+    synchronized (Activator.class) {
+      if (plugin != null) {
+        // Not a singleton!?
+        throw new RuntimeException("Activator for " + PLUGIN_ID
+            + " is not a singleton");
+      }
+      plugin = this;
+    }
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void start(BundleContext context) throws Exception {
+    super.start(context);
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void stop(BundleContext context) throws Exception {
+    ServerRegistry.getInstance().dispose();
+    plugin = null;
+    super.stop(context);
+  }
+
+  /**
+   * Returns the shared unique instance (singleton)
+   * 
+   * @return the shared unique instance (singleton)
+   */
+  public static Activator getDefault() {
+    return plugin;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/6d/6df3199ab18b129dde91c5e96629f7c703979266.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/6d/6df3199ab18b129dde91c5e96629f7c703979266.svn-base b/eclipse-plugin/.svn/pristine/6d/6df3199ab18b129dde91c5e96629f7c703979266.svn-base
new file mode 100644
index 0000000..6fee03b
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/6d/6df3199ab18b129dde91c5e96629f7c703979266.svn-base differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/6e/6e73bf69ec52a59488dd1c099873507ecda6f375.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/6e/6e73bf69ec52a59488dd1c099873507ecda6f375.svn-base b/eclipse-plugin/.svn/pristine/6e/6e73bf69ec52a59488dd1c099873507ecda6f375.svn-base
new file mode 100644
index 0000000..7866c30
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/6e/6e73bf69ec52a59488dd1c099873507ecda6f375.svn-base
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import org.eclipse.core.resources.IFile;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.jdt.core.IJavaElement;
+import org.eclipse.jdt.internal.ui.wizards.NewElementWizard;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.ui.INewWizard;
+import org.eclipse.ui.IWorkbench;
+
+/**
+ * Wizard for creating a new Driver class (a class that runs a MapReduce job).
+ * 
+ */
+
+public class NewDriverWizard extends NewElementWizard implements INewWizard,
+    IRunnableWithProgress {
+  private NewDriverWizardPage page;
+
+  /*
+   * @Override public boolean performFinish() { }
+   */
+  public void run(IProgressMonitor monitor) {
+    try {
+      page.createType(monitor);
+    } catch (CoreException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    } catch (InterruptedException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    }
+  }
+
+  public NewDriverWizard() {
+    setWindowTitle("New MapReduce Driver");
+  }
+
+  @Override
+  public void init(IWorkbench workbench, IStructuredSelection selection) {
+    super.init(workbench, selection);
+
+    page = new NewDriverWizardPage();
+    addPage(page);
+    page.setSelection(selection);
+  }
+
+  @Override
+  /**
+   * Performs any actions appropriate in response to the user having pressed the
+   * Finish button, or refuse if finishing now is not permitted.
+   */
+  public boolean performFinish() {
+    if (super.performFinish()) {
+      if (getCreatedElement() != null) {
+        selectAndReveal(page.getModifiedResource());
+        openResource((IFile) page.getModifiedResource());
+      }
+
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  @Override
+  /**
+   * 
+   */
+  protected void finishPage(IProgressMonitor monitor)
+      throws InterruptedException, CoreException {
+    this.run(monitor);
+  }
+
+  @Override
+  public IJavaElement getCreatedElement() {
+    return page.getCreatedType().getPrimaryElement();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/6f/6f8e40c01e9a888e1052a3559f304733a0a86377.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/6f/6f8e40c01e9a888e1052a3559f304733a0a86377.svn-base b/eclipse-plugin/.svn/pristine/6f/6f8e40c01e9a888e1052a3559f304733a0a86377.svn-base
new file mode 100644
index 0000000..9e5fd14
--- /dev/null
+++ b/eclipse-plugin/.svn/pristine/6f/6f8e40c01e9a888e1052a3559f304733a0a86377.svn-base
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+
+<cheatsheet title="Set default Hadoop path tutorial">
+	<intro>
+		<description>
+			This tutorial informs you how to set the default Hadoop
+			directory for the plugin.
+		</description>
+	</intro>
+	<item title="Open Plugin Preferences window">
+		<description>
+			To set the default Hadoop directory, open the plugin
+			preferences from the menu option
+			<b>Window > Preferences</b>.  <br />
+			Go to the <b>Hadoop Home Directory</b>
+			preference, and enter the installation directory there.
+
+			Use the following embedded command to open the Preferences
+			window:
+		</description>
+
+		<action pluginId="org.eclipse.jdt.ui"
+			class="org.eclipse.ui.internal.OpenPreferencesAction" />
+	</item>
+</cheatsheet>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/a1719e04/eclipse-plugin/.svn/pristine/70/708aca1fb60f08ce8eca325d3dd5e0142d110df8.svn-base
----------------------------------------------------------------------
diff --git a/eclipse-plugin/.svn/pristine/70/708aca1fb60f08ce8eca325d3dd5e0142d110df8.svn-base b/eclipse-plugin/.svn/pristine/70/708aca1fb60f08ce8eca325d3dd5e0142d110df8.svn-base
new file mode 100644
index 0000000..e9b5a30
Binary files /dev/null and b/eclipse-plugin/.svn/pristine/70/708aca1fb60f08ce8eca325d3dd5e0142d110df8.svn-base differ