You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2007/08/16 22:43:16 UTC

svn commit: r566838 [3/4] - in /lucene/hadoop/trunk: ./ src/contrib/ src/contrib/eclipse-plugin/ src/contrib/eclipse-plugin/.settings/ src/contrib/eclipse-plugin/META-INF/ src/contrib/eclipse-plugin/resources/ src/contrib/eclipse-plugin/src/ src/contri...

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContentProvider.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContentProvider.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContentProvider.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContentProvider.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,203 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+import java.io.IOException;
+
+import org.apache.hadoop.eclipse.Activator;
+import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.apache.hadoop.eclipse.servers.IHadoopServerListener;
+import org.apache.hadoop.eclipse.servers.ServerRegistry;
+import org.eclipse.core.resources.ResourcesPlugin;
+import org.eclipse.core.runtime.FileLocator;
+import org.eclipse.core.runtime.IAdaptable;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.ILabelProvider;
+import org.eclipse.jface.viewers.ILabelProviderListener;
+import org.eclipse.jface.viewers.ITreeContentProvider;
+import org.eclipse.jface.viewers.StructuredViewer;
+import org.eclipse.jface.viewers.Viewer;
+import org.eclipse.swt.graphics.Image;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.ui.ISharedImages;
+import org.eclipse.ui.PlatformUI;
+import org.eclipse.ui.model.IWorkbenchAdapter;
+
+/**
+ * Handles viewing the DFS
+ */
+public class DFSContentProvider implements ITreeContentProvider,
+    ILabelProvider {
+
+  /**
+   * The viewer that displays this Tree content
+   */
+  private Viewer viewer;
+
+  private ImageDescriptor hadoopImage;
+
+  private ImageDescriptor folderImage;
+
+  private ImageDescriptor fileImage;
+
+  private ImageDescriptor dfsImage;
+
+  public DFSContentProvider() {
+    try {
+      hadoopImage =
+          ImageDescriptor.createFromURL((FileLocator.toFileURL(FileLocator
+              .find(Activator.getDefault().getBundle(), new Path(
+                  "resources/hadoop_small.gif"), null))));
+      dfsImage =
+          ImageDescriptor.createFromURL((FileLocator.toFileURL(FileLocator
+              .find(Activator.getDefault().getBundle(), new Path(
+                  "resources/files.gif"), null))));
+    } catch (IOException e) {
+      e.printStackTrace();
+      // no images, okay, will deal with that
+    }
+  }
+
+  public Object[] getChildren(Object parentElement) {
+    if (parentElement instanceof DummyWorkspace) {
+      return ResourcesPlugin.getWorkspace().getRoot().getProjects();
+    }
+    if (parentElement instanceof DFS) {
+      return ServerRegistry.getInstance().getServers().toArray();
+    } else if (parentElement instanceof HadoopServer) {
+      return new Object[] { new DfsFolder((HadoopServer) parentElement, "/",
+          viewer) };
+    } else if (parentElement instanceof DfsFolder) {
+      return ((DfsFolder) parentElement).getChildren();
+    }
+
+    return new Object[0];
+  }
+
+  public Object getParent(Object element) {
+    if (element instanceof DfsPath) {
+      return ((DfsPath) element).getParent();
+    } else if (element instanceof HadoopServer) {
+      return dfs;
+    } else {
+      return null;
+    }
+  }
+
+  public boolean hasChildren(Object element) {
+    return (element instanceof HadoopServer)
+        || (element instanceof DfsFolder) || (element instanceof DFS)
+        || (element instanceof DummyWorkspace);
+  }
+
+  public class DFS {
+    public DFS() {
+      ServerRegistry.getInstance().addListener(new IHadoopServerListener() {
+        public void serverChanged(final HadoopServer location, final int type) {
+          if (viewer != null) {
+            Display.getDefault().syncExec(new Runnable() {
+              public void run() {
+                if (type == ServerRegistry.SERVER_STATE_CHANGED) {
+                  ((StructuredViewer) viewer).refresh(location);
+                } else {
+                  ((StructuredViewer) viewer).refresh(ResourcesPlugin
+                      .getWorkspace().getRoot());
+                }
+              }
+            });
+          }
+        }
+      });
+    }
+
+    @Override
+    public String toString() {
+      return "MapReduce DFS";
+    }
+  }
+
+  private final DFS dfs = new DFS();
+
+  private final Object workspace = new DummyWorkspace();
+
+  private static class DummyWorkspace {
+    @Override
+    public String toString() {
+      return "Workspace";
+    }
+  };
+
+  public Object[] getElements(final Object inputElement) {
+    return ServerRegistry.getInstance().getServers().toArray();
+  }
+
+  public void dispose() {
+
+  }
+
+  public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
+    this.viewer = viewer;
+  }
+
+  public Image getImage(Object element) {
+    if (element instanceof DummyWorkspace) {
+      IWorkbenchAdapter a =
+          (IWorkbenchAdapter) ((IAdaptable) ResourcesPlugin.getWorkspace()
+              .getRoot()).getAdapter(IWorkbenchAdapter.class);
+      return a.getImageDescriptor(ResourcesPlugin.getWorkspace().getRoot())
+          .createImage();
+    } else if (element instanceof DFS) {
+      return dfsImage.createImage(true);
+    } else if (element instanceof HadoopServer) {
+      return hadoopImage.createImage(true);
+    } else if (element instanceof DfsFolder) {
+      return PlatformUI.getWorkbench().getSharedImages().getImageDescriptor(
+          ISharedImages.IMG_OBJ_FOLDER).createImage();
+    } else if (element instanceof DfsFile) {
+      return PlatformUI.getWorkbench().getSharedImages().getImageDescriptor(
+          ISharedImages.IMG_OBJ_FILE).createImage();
+    }
+
+    return null;
+  }
+
+  public String getText(Object element) {
+    if (element instanceof DummyWorkspace) {
+      IWorkbenchAdapter a =
+          (IWorkbenchAdapter) ((IAdaptable) ResourcesPlugin.getWorkspace()
+              .getRoot()).getAdapter(IWorkbenchAdapter.class);
+      return a.getLabel(ResourcesPlugin.getWorkspace().getRoot());
+    } else {
+      return element.toString();
+    }
+  }
+
+  public void addListener(ILabelProviderListener listener) {
+
+  }
+
+  public boolean isLabelProperty(Object element, String property) {
+    return false;
+  }
+
+  public void removeListener(ILabelProviderListener listener) {
+
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsFile.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsFile.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsFile.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,157 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.lang.reflect.InvocationTargetException;
+
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.jface.dialogs.MessageDialog;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.ui.PlatformUI;
+
+import com.jcraft.jsch.ChannelExec;
+import com.jcraft.jsch.JSchException;
+
+/**
+ * File handling methods for the DFS
+ */
+public class DfsFile extends DfsPath {
+
+  public DfsFile(DfsPath parent, String path) {
+    super(parent, path);
+  }
+
+  /**
+   * Download and view contents of a file in the DFS NOTE: may not work on
+   * files >1 MB.
+   * 
+   * @return a FileInputStream for the file
+   */
+  public FileInputStream open() throws JSchException, IOException,
+      InvocationTargetException, InterruptedException {
+
+    File tempFile =
+        File.createTempFile("hadoop" + System.currentTimeMillis(), "tmp");
+    tempFile.deleteOnExit();
+
+    this.downloadToLocalFile(tempFile);
+
+    // file size greater than 1 MB
+    if (tempFile.length() > 1024 * 1024) {
+      boolean answer =
+          MessageDialog.openQuestion(null, "Show large file from DFS?",
+              "The file you are attempting to download from the DFS, "
+                  + this.getPath() + " is over 1MB in size. \n"
+                  + "Opening this file may cause performance problems."
+                  + " You can open the file with your favourite editor at "
+                  + tempFile.getAbsolutePath()
+                  + " (it's already saved there)."
+                  + " Continue opening the file in eclipse?");
+      if (!answer) {
+        return null;
+      }
+    }
+
+    return new FileInputStream(tempFile);
+  }
+
+  public void downloadToLocalFile(File localFile) throws JSchException,
+      IOException, InvocationTargetException, InterruptedException {
+
+    final ChannelExec exec =
+
+    exec(" dfs " + DfsFolder.s_whichFS + " -cat " + getPath());
+
+    final OutputStream os =
+        new BufferedOutputStream(new FileOutputStream(localFile));
+
+    try {
+      PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
+          new IRunnableWithProgress() {
+            public void run(IProgressMonitor monitor)
+                throws InvocationTargetException {
+              try {
+                monitor.beginTask("View file from Distributed File System",
+                    IProgressMonitor.UNKNOWN);
+                exec.connect();
+                BufferedInputStream stream =
+                    new BufferedInputStream(exec.getInputStream());
+
+                byte[] buffer = new byte[1024];
+                int bytes;
+
+                while ((bytes = stream.read(buffer)) >= 0) {
+                  if (monitor.isCanceled()) {
+                    os.close();
+                    return;
+                  }
+
+                  monitor.worked(1);
+                  os.write(buffer, 0, bytes);
+                }
+
+                monitor.done();
+              } catch (Exception e) {
+                throw new InvocationTargetException(e);
+              }
+            }
+          });
+    } finally {
+      if (exec.isConnected()) {
+        exec.disconnect();
+      }
+      os.close();
+    }
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void downloadToLocalDirectory(String localDirectory)
+      throws InvocationTargetException, JSchException, InterruptedException,
+      IOException {
+
+    File dir = new File(localDirectory);
+    if (!dir.exists() || !dir.isDirectory())
+      return; // TODO display error message
+
+    File dfsPath = new File(this.getPath());
+    File destination = new File(dir, dfsPath.getName());
+
+    if (destination.exists()) {
+      boolean answer =
+          MessageDialog.openQuestion(null, "Overwrite existing local file?",
+              "The file you are attempting to download from the DFS "
+                  + this.getPath()
+                  + ", already exists in your local directory as "
+                  + destination + ".\n" + "Overwrite the existing file?");
+      if (!answer)
+        return;
+    }
+
+    this.downloadToLocalFile(destination);
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsFolder.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsFolder.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsFolder.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsFolder.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,324 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.lang.reflect.InvocationTargetException;
+import java.rmi.dgc.VMID;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.eclipse.Activator;
+import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Status;
+import org.eclipse.core.runtime.SubProgressMonitor;
+import org.eclipse.core.runtime.jobs.Job;
+import org.eclipse.jface.dialogs.ProgressMonitorDialog;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.jface.viewers.Viewer;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.ui.PlatformUI;
+
+import com.jcraft.jsch.ChannelExec;
+import com.jcraft.jsch.JSchException;
+import com.jcraft.jsch.SftpException;
+
+/**
+ * Folder handling methods for the DFS
+ */
+
+public class DfsFolder extends DfsPath {
+
+  protected final static String s_whichFS = ""; // "-fs local";
+
+  static Logger log = Logger.getLogger(DfsFolder.class.getName());
+
+  private Object[] children;
+
+  private boolean loading = false;
+
+  protected DfsFolder(HadoopServer location, String path, Viewer viewer) {
+    super(location, path, viewer);
+  }
+
+  private DfsFolder(DfsPath parent, String path) {
+    super(parent, path);
+  }
+
+  public Object[] getChildren() {
+    ChannelExec channel = null;
+    if (children == null) {
+      doRefresh();
+      return new Object[] { "Loading..." };
+    } else {
+      return children;
+    }
+  }
+
+  @Override
+  /**
+   * Forces a refresh of the items in the current DFS node
+   */
+  public void doRefresh() {
+    new Job("Refresh DFS Children") {
+      @Override
+      protected IStatus run(IProgressMonitor monitor) {
+        try {
+          ChannelExec channel =
+              exec(" dfs " + s_whichFS + " -ls " + getPath());
+          InputStream is = channel.getInputStream();
+          BufferedReader in =
+              new BufferedReader(new InputStreamReader(
+                  new BufferedInputStream(is)));
+
+          if (!channel.isConnected()) {
+            channel.connect();
+          }
+
+          try {
+            // initial "found n items" line ignorable
+            if (in.readLine() == null) {
+              children =
+                  new Object[] { "An error occurred: empty result from dfs -ls" };
+            }
+
+            String line;
+            List<DfsPath> children = new ArrayList<DfsPath>();
+            while ((line = in.readLine()) != null) {
+              String[] parts = line.split("\t");
+
+              for (int i = 0; i < parts.length; i++) {
+                log.fine(parts[0]);
+              }
+
+              if (parts[1].equals("<dir>")) {
+                children.add(new DfsFolder(DfsFolder.this, parts[0]));
+              } else {
+                children.add(new DfsFile(DfsFolder.this, parts[0]));
+              }
+            }
+
+            DfsFolder.this.children = children.toArray();
+
+            DfsFolder.super.doRefresh();
+
+            return Status.OK_STATUS;
+          } finally {
+            if (channel.isConnected()) {
+              channel.disconnect();
+            }
+          }
+        } catch (Exception e) {
+          e.printStackTrace();
+          return new Status(IStatus.ERROR, Activator.PLUGIN_ID, -1,
+              "Refreshing DFS node failed: " + e.getLocalizedMessage(), e);
+        }
+      }
+    }.schedule();
+  }
+
+  @Override
+  /**
+   * Does a recursive delete of the remote directory tree at this node.
+   */
+  public void delete() throws JSchException {
+    doExec("dfs " + s_whichFS + " -rmr " + getPath());
+  }
+
+  /**
+   * Upload a local directory and its contents to the remote DFS
+   * 
+   * @param directory source directory to upload
+   * @throws SftpException
+   * @throws JSchException
+   * @throws InvocationTargetException
+   * @throws InterruptedException
+   */
+  public void put(final String directory) throws SftpException,
+      JSchException, InvocationTargetException, InterruptedException {
+    ProgressMonitorDialog progress =
+        new ProgressMonitorDialog((Display.getCurrent() == null) ? null
+            : Display.getCurrent().getActiveShell());
+    progress.setCancelable(true);
+
+    PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
+        new IRunnableWithProgress() {
+          public void run(IProgressMonitor monitor)
+              throws InvocationTargetException, InterruptedException {
+            String guid = new VMID().toString().replace(':', '_');
+
+            monitor.beginTask("Secure Copy", 100);
+            scp(directory, "/tmp/hadoop_scp_" + guid,
+                new SubProgressMonitor(monitor, 60));
+
+            try {
+              SubProgressMonitor sub = new SubProgressMonitor(monitor, 1);
+              if (monitor.isCanceled()) {
+                return;
+              }
+
+              final File dir = new File(directory);
+
+              sub.beginTask("Move files from staging server to DFS", 1);
+              ChannelExec exec =
+                  exec(" dfs " + s_whichFS
+                      + " -moveFromLocal /tmp/hadoop_scp_" + guid + " \""
+                      + getPath() + "/" + dir.getName() + "\"");
+              BufferedReader reader =
+                  new BufferedReader(new InputStreamReader(
+                      new BufferedInputStream(exec.getInputStream())));
+
+              if (!monitor.isCanceled()) {
+                exec.connect();
+                String line = reader.readLine();
+                sub.worked(1);
+              }
+
+              if (exec.isConnected()) {
+                exec.disconnect();
+              }
+
+              sub.done();
+
+              monitor.done();
+              doRefresh();
+            } catch (Exception e) {
+              log.log(Level.SEVERE, "", e);
+              throw new InvocationTargetException(e);
+            }
+          }
+
+          public void scp(String from, String to, IProgressMonitor monitor) {
+            File file = new File(from);
+            ChannelExec channel = null;
+
+            monitor.beginTask("scp from " + from + " to " + to, 100 * (file
+                .isDirectory() ? file.list().length + 1 : 1));
+
+            if (monitor.isCanceled()) {
+              return;
+            }
+
+            if (file.isDirectory()) {
+              // mkdir
+              try {
+                channel = (ChannelExec) getSession().openChannel("exec");
+                channel.setCommand(" mkdir " + to);
+                InputStream in = channel.getInputStream();
+                channel.connect();
+                // in.read(); // wait for a response, which
+                // we'll then ignore
+              } catch (JSchException e) {
+                // BUG(jz) abort operation and display error
+                throw new RuntimeException(e);
+              } catch (IOException e) {
+                throw new RuntimeException(e);
+              } finally {
+                if (channel.isConnected()) {
+                  channel.disconnect();
+                }
+              }
+
+              monitor.worked(100);
+
+              String[] children = file.list();
+              for (int i = 0; i < children.length; i++) {
+                File child = new File(file, children[i]);
+
+                // recurse
+                scp(new File(file, children[i]).getAbsolutePath(), to + "/"
+                    + children[i], new SubProgressMonitor(monitor, 100));
+              }
+            } else {
+              InputStream filein = null;
+
+              try {
+                channel = (ChannelExec) getSession().openChannel("exec");
+                (channel).setCommand("scp -p -t " + to);
+                BufferedOutputStream out =
+                    new BufferedOutputStream(channel.getOutputStream());
+                InputStream in = channel.getInputStream();
+                channel.connect();
+
+                if (in.read() == 0) {
+                  int step = (int) (100 / new File(from).length());
+                  out.write(("C0644 " + new File(from).length() + " "
+                      + new File(to).getName() + "\n").getBytes());
+                  out.flush();
+                  if (in.read() != 0) {
+                    throw new RuntimeException("Copy failed");
+                  }
+
+                  filein =
+                      new BufferedInputStream(new FileInputStream(from));
+
+                  byte[] buffer = new byte[1024];
+                  int bytes;
+                  while ((bytes = filein.read(buffer)) > -1) {
+                    if (monitor.isCanceled()) {
+                      return;
+                    }
+
+                    out.write(buffer, 0, bytes);
+                    monitor.worked(step);
+                  }
+
+                  out.write("\0".getBytes());
+                  out.flush();
+
+                  if (in.read() != 0) {
+                    throw new RuntimeException("Copy failed");
+                  }
+                  out.close();
+                } else {
+                  // problems with copy
+                  throw new RuntimeException("Copy failed");
+                }
+              } catch (JSchException e) {
+                e.printStackTrace();
+                throw new RuntimeException(e);
+              } catch (IOException e) {
+                throw new RuntimeException(e);
+              } finally {
+                if (channel.isConnected()) {
+                  channel.disconnect();
+                }
+                try {
+                  filein.close();
+                } catch (IOException e) {
+                }
+              }
+            }
+
+            monitor.done();
+          }
+        });
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsPath.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsPath.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsPath.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsPath.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,202 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.eclipse.dfs;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.lang.reflect.InvocationTargetException;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.eclipse.Activator;
+import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.eclipse.core.runtime.IAdaptable;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Status;
+import org.eclipse.jface.viewers.StructuredViewer;
+import org.eclipse.jface.viewers.Viewer;
+import org.eclipse.swt.widgets.Display;
+
+import com.jcraft.jsch.ChannelExec;
+import com.jcraft.jsch.JSchException;
+import com.jcraft.jsch.Session;
+
+/**
+ * DFS Path handling for DFS
+ */
+public class DfsPath implements IAdaptable {
+
+  private Session shell;
+
+  private HadoopServer location;
+
+  private String path;
+
+  private final Viewer viewer;
+
+  private DfsPath parent;
+
+  static Logger log = Logger.getLogger(DfsPath.class.getName());
+
+  public DfsPath(HadoopServer location, String path, Viewer viewer) {
+    this.location = location;
+    this.path = path;
+    this.viewer = viewer;
+  }
+
+  protected String getPath() {
+    return this.path;
+  }
+
+  protected ChannelExec exec(String command) throws JSchException {
+    ChannelExec channel = (ChannelExec) getSession().openChannel("exec");
+    channel.setCommand(location.getInstallPath() + "/bin/hadoop " + command);
+    channel.setErrStream(System.err);
+    // channel.connect();
+
+    return channel;
+  }
+
+  protected DfsPath(HadoopServer location, String path, Session shell,
+      Viewer viewer) {
+    this(location, path, viewer);
+
+    this.shell = shell;
+  }
+
+  protected DfsPath(DfsPath parent, String path) {
+    this(parent.location, path, parent.shell, parent.viewer);
+    this.parent = parent;
+  }
+
+  protected Session getSession() throws JSchException {
+    if (shell == null) {
+      // this.shell =
+      // JSchUtilities.createJSch().getSession(location.getUser(),
+      // location.getHostname());
+      this.shell = location.createSession();
+    }
+
+    if (!shell.isConnected()) {
+      shell.connect();
+    }
+
+    return shell;
+  }
+
+  protected void dispose() {
+    if ((this.shell != null) && this.shell.isConnected()) {
+      shell.disconnect();
+    }
+  }
+
+  @Override
+  public String toString() {
+    if ((path.length() < 1) || path.equals("/")) {
+      return "DFS @ " + location.getName();
+    } else {
+      String[] parts = path.split("/");
+      return parts[parts.length - 1];
+    }
+  }
+
+  protected void doExec(final String command) {
+    org.eclipse.core.runtime.jobs.Job job =
+        new org.eclipse.core.runtime.jobs.Job("DFS operation: " + command) {
+          @Override
+          protected IStatus run(IProgressMonitor monitor) {
+            ChannelExec exec = null;
+            monitor.beginTask("Execute remote dfs  command", 100);
+            try {
+              exec = exec(" " + command);
+              monitor.worked(33);
+
+              exec.connect();
+              monitor.worked(33);
+
+              BufferedReader reader =
+                  new BufferedReader(new InputStreamReader(
+                      new BufferedInputStream(exec.getInputStream())));
+              String response = reader.readLine(); // TIDY(jz)
+              monitor.worked(34);
+
+              monitor.done();
+
+              refresh();
+
+              return Status.OK_STATUS;
+            } catch (Exception e) {
+              e.printStackTrace();
+              return new Status(IStatus.ERROR, Activator.PLUGIN_ID, -1,
+                  "DFS operation failed: " + e.getLocalizedMessage(), e);
+            } finally {
+              if (exec != null) {
+                exec.disconnect();
+              }
+            }
+          }
+        };
+
+    job.setUser(true);
+    job.schedule();
+  }
+
+  public void delete() throws JSchException {
+    doExec("dfs " + DfsFolder.s_whichFS + " -rm " + path);
+  }
+
+  public Object getParent() {
+    return parent;
+  }
+
+  public void refresh() {
+    if (parent != null) {
+      parent.doRefresh();
+    } else {
+      doRefresh();
+    }
+  }
+
+  protected void doRefresh() {
+    Display.getDefault().syncExec(new Runnable() {
+      public void run() {
+        ((StructuredViewer) viewer).refresh(DfsPath.this);
+      }
+    });
+  }
+
+  public Object getAdapter(Class type) {
+    log.fine(type.toString());
+    return null;
+  }
+
+  /**
+   * Copy the DfsPath to the given local directory
+   * 
+   * @param directory the local directory
+   */
+  public void downloadToLocalDirectory(String directory)
+      throws InvocationTargetException, JSchException, InterruptedException,
+      IOException {
+
+    // Not implemented here; by default, do nothing
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/LaunchShortcut.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/LaunchShortcut.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/LaunchShortcut.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/LaunchShortcut.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.launch;
+
+import java.util.logging.Logger;
+
+import org.apache.hadoop.eclipse.actions.RunOnHadoopActionDelegate;
+import org.eclipse.core.resources.IResource;
+import org.eclipse.debug.ui.ILaunchShortcut;
+import org.eclipse.jface.viewers.ISelection;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.jface.viewers.StructuredSelection;
+import org.eclipse.ui.IEditorPart;
+import org.eclipse.ui.actions.ActionDelegate;
+
+
+/**
+ * Add a shortcut "Run on Hadoop" to the Run menu
+ */
+
+public class LaunchShortcut implements ILaunchShortcut {
+  static Logger log = Logger.getLogger(LaunchShortcut.class.getName());
+
+  private ActionDelegate delegate = new RunOnHadoopActionDelegate();
+
+  public LaunchShortcut() {
+  }
+
+  public void launch(final ISelection selection, String mode) {
+    if (selection instanceof IStructuredSelection) {
+      delegate.selectionChanged(null, selection);
+      delegate.run(null);
+    }
+  }
+
+  public void launch(final IEditorPart editor, String mode) {
+    delegate.selectionChanged(null, new StructuredSelection(editor
+        .getEditorInput().getAdapter(IResource.class))); // hmm(jz)
+    // :-)
+    delegate.run(null);
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/LocalMapReduceLaunchTabGroup.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/LocalMapReduceLaunchTabGroup.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/LocalMapReduceLaunchTabGroup.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/LocalMapReduceLaunchTabGroup.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,182 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.launch;
+
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.debug.core.ILaunchConfiguration;
+import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
+import org.eclipse.debug.ui.AbstractLaunchConfigurationTab;
+import org.eclipse.debug.ui.AbstractLaunchConfigurationTabGroup;
+import org.eclipse.debug.ui.CommonTab;
+import org.eclipse.debug.ui.ILaunchConfigurationDialog;
+import org.eclipse.debug.ui.ILaunchConfigurationTab;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.core.JavaModelException;
+import org.eclipse.jdt.core.dom.AST;
+import org.eclipse.jdt.core.search.SearchEngine;
+import org.eclipse.jdt.debug.ui.launchConfigurations.JavaArgumentsTab;
+import org.eclipse.jdt.debug.ui.launchConfigurations.JavaClasspathTab;
+import org.eclipse.jdt.debug.ui.launchConfigurations.JavaJRETab;
+import org.eclipse.jdt.ui.IJavaElementSearchConstants;
+import org.eclipse.jdt.ui.JavaUI;
+import org.eclipse.jface.dialogs.ProgressMonitorDialog;
+import org.eclipse.jface.window.Window;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.Event;
+import org.eclipse.swt.widgets.Label;
+import org.eclipse.swt.widgets.Listener;
+import org.eclipse.swt.widgets.Text;
+import org.eclipse.ui.dialogs.SelectionDialog;
+
+/**
+ * 
+ * Handler for Local MapReduce job launches
+ * 
+ * TODO(jz) this may not be needed as we almost always deploy to a remote server
+ * and not locally, where we do do it locally we may just be able to exec
+ * scripts without going to java
+ * 
+ */
+public class LocalMapReduceLaunchTabGroup extends
+    AbstractLaunchConfigurationTabGroup {
+
+  public LocalMapReduceLaunchTabGroup() {
+    // TODO Auto-generated constructor stub
+  }
+
+  public void createTabs(ILaunchConfigurationDialog dialog, String mode) {
+    setTabs(new ILaunchConfigurationTab[] { new MapReduceLaunchTab(),
+        new JavaArgumentsTab(), new JavaJRETab(), new JavaClasspathTab(),
+        new CommonTab() });
+  }
+
+  public static class MapReduceLaunchTab extends AbstractLaunchConfigurationTab {
+    private Text combinerClass;
+
+    private Text reducerClass;
+
+    private Text mapperClass;
+
+    @Override
+    public boolean canSave() {
+      return true;
+    }
+
+    @Override
+    public boolean isValid(ILaunchConfiguration launchConfig) {
+      // todo: only if all classes are of proper types
+      return true;
+    }
+
+    public void createControl(final Composite parent) {
+      Composite panel = new Composite(parent, SWT.NONE);
+      GridLayout layout = new GridLayout(3, false);
+      panel.setLayout(layout);
+
+      Label mapperLabel = new Label(panel, SWT.NONE);
+      mapperLabel.setText("Mapper");
+      mapperClass = new Text(panel, SWT.SINGLE | SWT.BORDER);
+      createRow(parent, panel, mapperClass);
+
+      Label reducerLabel = new Label(panel, SWT.NONE);
+      reducerLabel.setText("Reducer");
+      reducerClass = new Text(panel, SWT.SINGLE | SWT.BORDER);
+      createRow(parent, panel, reducerClass);
+
+      Label combinerLabel = new Label(panel, SWT.NONE);
+      combinerLabel.setText("Combiner");
+      combinerClass = new Text(panel, SWT.SINGLE | SWT.BORDER);
+      createRow(parent, panel, combinerClass);
+
+      panel.pack();
+      setControl(panel);
+    }
+
+    private void createRow(final Composite parent, Composite panel,
+        final Text text) {
+      text.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
+      Button button = new Button(panel, SWT.BORDER);
+      button.setText("Browse...");
+      button.addListener(SWT.Selection, new Listener() {
+        public void handleEvent(Event arg0) {
+          try {
+            AST ast = AST.newAST(3);
+
+            SelectionDialog dialog = JavaUI.createTypeDialog(parent.getShell(),
+                new ProgressMonitorDialog(parent.getShell()), SearchEngine
+                    .createWorkspaceScope(),
+                IJavaElementSearchConstants.CONSIDER_CLASSES, false);
+            dialog.setMessage("Select Mapper type (implementing )");
+            dialog.setBlockOnOpen(true);
+            dialog.setTitle("Select Mapper Type");
+            dialog.open();
+
+            if ((dialog.getReturnCode() == Window.OK)
+                && (dialog.getResult().length > 0)) {
+              IType type = (IType) dialog.getResult()[0];
+              text.setText(type.getFullyQualifiedName());
+              setDirty(true);
+            }
+          } catch (JavaModelException e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+          }
+        }
+      });
+    }
+
+    public String getName() {
+      return "Hadoop";
+    }
+
+    public void initializeFrom(ILaunchConfiguration configuration) {
+      try {
+        mapperClass.setText(configuration.getAttribute(
+            "org.apache.hadoop.eclipse.launch.mapper", ""));
+        reducerClass.setText(configuration.getAttribute(
+            "org.apache.hadoop.eclipse.launch.reducer", ""));
+        combinerClass.setText(configuration.getAttribute(
+            "org.apache.hadoop.eclipse.launch.combiner", ""));
+      } catch (CoreException e) {
+        // TODO Auto-generated catch block
+        e.printStackTrace();
+        setErrorMessage(e.getMessage());
+      }
+    }
+
+    public void performApply(ILaunchConfigurationWorkingCopy configuration) {
+      configuration.setAttribute("org.apache.hadoop.eclipse.launch.mapper",
+          mapperClass.getText());
+      configuration.setAttribute(
+          "org.apache.hadoop.eclipse.launch.reducer", reducerClass
+              .getText());
+      configuration.setAttribute(
+          "org.apache.hadoop.eclipse.launch.combiner", combinerClass
+              .getText());
+    }
+
+    public void setDefaults(ILaunchConfigurationWorkingCopy configuration) {
+
+    }
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/MutexRule.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/MutexRule.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/MutexRule.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/MutexRule.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.launch;
+
+import org.eclipse.core.runtime.jobs.ISchedulingRule;
+
+public class MutexRule implements ISchedulingRule {
+  private final String id;
+
+  public MutexRule(String id) {
+    this.id = id;
+  }
+
+  public boolean contains(ISchedulingRule rule) {
+    return (rule instanceof MutexRule) && ((MutexRule) rule).id.equals(id);
+  }
+
+  public boolean isConflicting(ISchedulingRule rule) {
+    return (rule instanceof MutexRule) && ((MutexRule) rule).id.equals(id);
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/SWTUserInfo.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/SWTUserInfo.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/SWTUserInfo.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/SWTUserInfo.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,264 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.launch;
+
+import org.eclipse.jface.dialogs.Dialog;
+import org.eclipse.jface.dialogs.MessageDialog;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.Control;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.swt.widgets.Label;
+import org.eclipse.swt.widgets.Shell;
+import org.eclipse.swt.widgets.Text;
+
+import com.jcraft.jsch.UIKeyboardInteractive;
+import com.jcraft.jsch.UserInfo;
+
+/**
+ * Data structure for retaining user login information
+ */
+public abstract class SWTUserInfo implements UserInfo, UIKeyboardInteractive {
+
+  public SWTUserInfo() {
+  }
+
+  public String getPassphrase() {
+    return this.getPassword();
+  }
+
+  public abstract String getPassword();
+
+  public abstract void setPassword(String pass);
+
+  public void setPassphrase(String pass) {
+    this.setPassword(pass);
+  }
+
+  public boolean promptPassphrase(final String arg0) {
+    Display.getDefault().syncExec(new Runnable() {
+      public void run() {
+        SWTUserInfo.this.setPassword(new PasswordDialog(null).prompt(arg0));
+      }
+    });
+
+    return this.getPassword() != null;
+  }
+
+  public boolean promptPassword(final String arg0) {
+    // check if password is already set to prevent the second session from
+    // querying again -- eyhung
+    // how to prevent bad passwords?
+    if (this.getPassword() == null) {
+      Display.getDefault().syncExec(new Runnable() {
+        public void run() {
+          Shell parent = Display.getDefault().getActiveShell();
+          String password = new PasswordDialog(parent).prompt(arg0);
+          SWTUserInfo.this.setPassword(password);
+        }
+      });
+    }
+    return this.getPassword() != null;
+  }
+
+  private boolean result;
+
+  public boolean promptYesNo(final String arg0) {
+
+    Display.getDefault().syncExec(new Runnable() {
+      public void run() {
+        result =
+            MessageDialog.openQuestion(
+                Display.getDefault().getActiveShell(),
+                "SSH Question Dialog", arg0);
+      }
+    });
+
+    return result;
+  }
+
+  public void showMessage(final String arg0) {
+    Display.getDefault().syncExec(new Runnable() {
+      public void run() {
+        MessageDialog.openInformation(null, "SSH Message", arg0);
+      }
+    });
+  }
+
+  private String[] interactiveAnswers;
+
+  /* @inheritDoc */
+  public String[] promptKeyboardInteractive(final String destination,
+      final String name, final String instruction, final String[] prompt,
+      final boolean[] echo) {
+    Display.getDefault().syncExec(new Runnable() {
+      public void run() {
+        Shell parent = Display.getDefault().getActiveShell();
+        interactiveAnswers =
+            new KeyboardInteractiveDialog(parent).prompt(destination, name,
+                instruction, prompt, echo);
+      }
+    });
+    return interactiveAnswers;
+  }
+
+  /**
+   * Simple password prompting dialog
+   */
+  public static class PasswordDialog extends Dialog {
+    private Text text;
+
+    private String password;
+
+    private Label title;
+
+    private String message;
+
+    protected PasswordDialog(Shell parentShell) {
+      super(parentShell);
+    }
+
+    public String prompt(String message) {
+      this.setBlockOnOpen(true);
+      this.message = message;
+
+      if (this.open() == OK) {
+        return password;
+      } else {
+        return null;
+      }
+    }
+
+    @Override
+    protected void okPressed() {
+      this.password = text.getText();
+      super.okPressed();
+    }
+
+    @Override
+    protected Control createDialogArea(Composite parent) {
+      Composite panel = (Composite) super.createDialogArea(parent);
+      panel.setLayout(new GridLayout(2, false));
+      panel.setLayoutData(new GridData(GridData.FILL_BOTH));
+
+      title = new Label(panel, SWT.NONE);
+      GridData span2 = new GridData(GridData.FILL_HORIZONTAL);
+      span2.horizontalSpan = 2;
+      title.setLayoutData(span2);
+      title.setText(message);
+
+      getShell().setText(message);
+
+      Label label = new Label(panel, SWT.NONE);
+      label.setText("password");
+
+      text = new Text(panel, SWT.BORDER | SWT.SINGLE);
+      GridData data = new GridData(GridData.FILL_HORIZONTAL);
+      data.grabExcessHorizontalSpace = true;
+      text.setLayoutData(data);
+      text.setEchoChar('*');
+
+      return panel;
+    }
+  }
+
+  /**
+   * Keyboard interactive prompting dialog
+   */
+  public static class KeyboardInteractiveDialog extends Dialog {
+
+    private String destination;
+
+    private String name;
+
+    private String instruction;
+
+    private String[] prompt;
+
+    private boolean[] echo;
+
+    private Text[] text;
+
+    private String[] answer;
+
+    protected KeyboardInteractiveDialog(Shell parentShell) {
+      super(parentShell);
+    }
+
+    public String[] prompt(String destination, String name,
+        String instruction, String[] prompt, boolean[] echo) {
+
+      this.destination = destination;
+      this.name = name;
+      this.instruction = instruction;
+      this.prompt = prompt;
+      this.echo = echo;
+
+      this.setBlockOnOpen(true);
+
+      if (this.open() == OK)
+        return answer;
+      else
+        return null;
+    }
+
+    @Override
+    protected void okPressed() {
+      answer = new String[text.length];
+      for (int i = 0; i < text.length; ++i) {
+        answer[i] = text[i].getText();
+      }
+      super.okPressed();
+    }
+
+    @Override
+    protected Control createDialogArea(Composite parent) {
+      Composite panel = (Composite) super.createDialogArea(parent);
+      panel.setLayout(new GridLayout(2, false));
+      panel.setLayoutData(new GridData(GridData.FILL_BOTH));
+
+      Label title = new Label(panel, SWT.NONE);
+      GridData span2 = new GridData(GridData.FILL_HORIZONTAL);
+      span2.horizontalSpan = 2;
+      title.setLayoutData(span2);
+      title.setText(destination + ": " + name);
+
+      getShell().setText(instruction);
+
+      text = new Text[prompt.length];
+
+      for (int i = 0; i < text.length; ++i) {
+        Label label = new Label(panel, SWT.NONE);
+        label.setText("password");
+
+        text[i] = new Text(panel, SWT.BORDER | SWT.SINGLE);
+        GridData data = new GridData(GridData.FILL_HORIZONTAL);
+        data.grabExcessHorizontalSpace = true;
+        text[i].setLayoutData(data);
+        if (!echo[i])
+          text[i].setEchoChar('*');
+      }
+
+      return panel;
+    }
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/StartHadoopLaunchTabGroup.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/StartHadoopLaunchTabGroup.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/StartHadoopLaunchTabGroup.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/StartHadoopLaunchTabGroup.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.launch;
+
+import org.eclipse.debug.ui.AbstractLaunchConfigurationTabGroup;
+import org.eclipse.debug.ui.CommonTab;
+import org.eclipse.debug.ui.ILaunchConfigurationDialog;
+import org.eclipse.debug.ui.ILaunchConfigurationTab;
+import org.eclipse.jdt.debug.ui.launchConfigurations.JavaArgumentsTab;
+import org.eclipse.jdt.debug.ui.launchConfigurations.JavaClasspathTab;
+import org.eclipse.jdt.debug.ui.launchConfigurations.JavaJRETab;
+
+/**
+ * Create the tab group for the dialog window for starting a Hadoop job.
+ */
+
+public class StartHadoopLaunchTabGroup extends
+    AbstractLaunchConfigurationTabGroup {
+
+  public StartHadoopLaunchTabGroup() {
+    // TODO Auto-generated constructor stub
+  }
+
+  /**
+   * TODO(jz) consider the appropriate tabs for this case
+   */
+  public void createTabs(ILaunchConfigurationDialog dialog, String mode) {
+    setTabs(new ILaunchConfigurationTab[] { new JavaArgumentsTab(),
+        new JavaJRETab(), new JavaClasspathTab(), new CommonTab() });
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/StartMapReduceServer.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/StartMapReduceServer.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/StartMapReduceServer.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/StartMapReduceServer.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,373 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.launch;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.rmi.dgc.VMID;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.eclipse.Activator;
+import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.apache.hadoop.eclipse.server.JarModule;
+import org.apache.hadoop.eclipse.servers.ServerRegistry;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Status;
+import org.eclipse.core.runtime.SubProgressMonitor;
+import org.eclipse.debug.core.ILaunch;
+import org.eclipse.debug.core.ILaunchConfiguration;
+import org.eclipse.debug.core.model.ILaunchConfigurationDelegate;
+import org.eclipse.jface.dialogs.MessageDialog;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.graphics.Color;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.ui.console.ConsolePlugin;
+import org.eclipse.ui.console.IConsole;
+import org.eclipse.ui.console.IOConsoleOutputStream;
+import org.eclipse.ui.console.MessageConsole;
+import org.eclipse.ui.console.MessageConsoleStream;
+
+import com.jcraft.jsch.Channel;
+import com.jcraft.jsch.ChannelExec;
+import com.jcraft.jsch.JSchException;
+import com.jcraft.jsch.Session;
+
+/**
+ * Transfer a jar file and run it on the specified MapReduce server.
+ */
+
+public class StartMapReduceServer implements ILaunchConfigurationDelegate {
+
+  private static final Logger log = Logger.getLogger(StartMapReduceServer.class
+      .getName());
+
+  private static final int SSH_FAILED_CODE = 999;
+
+  private static final IStatus SSH_FAILED_STATUS1 = new Status(IStatus.ERROR,
+      Activator.PLUGIN_ID, SSH_FAILED_CODE,
+      "SSH Connection to hadoop server failed", null);
+
+  private static final IStatus SSH_FAILED_STATUS2 = new Status(IStatus.ERROR,
+      Activator.PLUGIN_ID, SSH_FAILED_CODE,
+      "SSH Connection to start SCP failed", null);
+
+  private static final IStatus SSH_FAILED_STATUS3 = new Status(IStatus.ERROR,
+      Activator.PLUGIN_ID, SSH_FAILED_CODE,
+      "SCP Connection to hadoop server failed", null);
+
+  private static final int TIMEOUT = 15000;
+
+  private Color black;
+
+  private Color red;
+
+  public StartMapReduceServer() {
+    Display.getDefault().syncExec(new Runnable() {
+      public void run() {
+        black = Display.getDefault().getSystemColor(SWT.COLOR_BLACK);
+        red = Display.getDefault().getSystemColor(SWT.COLOR_RED);
+      }
+    });
+  }
+
+  static int checkAck(InputStream in) throws IOException {
+    int b = in.read();
+    // b may be 0 for success,
+    // 1 for error,
+    // 2 for fatal error,
+    // -1
+    if (b == 0) {
+      return b;
+    }
+    if (b == -1) {
+      log.info("checkAck returned -1");
+      return b;
+    }
+
+    if ((b == 1) || (b == 2)) {
+      StringBuffer sb = new StringBuffer();
+      int c;
+      do {
+        c = in.read();
+        sb.append((char) c);
+      } while (c != '\n');
+
+      if (b == 1) { // error
+        System.out.print(sb.toString());
+      }
+      if (b == 2) { // fatal error
+        System.out.print(sb.toString());
+      }
+    }
+    return b;
+  }
+
+  /**
+   * Send the file and launch the hadoop job.
+   */
+  public void launch(ILaunchConfiguration configuration, String mode,
+      ILaunch launch, IProgressMonitor monitor) throws CoreException {
+    Map attributes = configuration.getAttributes();
+
+    log.log(Level.FINE, "Preparing hadoop launch", configuration);
+
+    String hostname = configuration.getAttribute("hadoop.host", "");
+    int serverid = configuration.getAttribute("hadoop.serverid", 0);
+    String user = configuration.getAttribute("hadoop.user", "");
+    String path = configuration.getAttribute("hadoop.path", "");
+
+    String dir = ensureTrailingSlash(path);
+
+    log.log(Level.FINER, "Computed Server URL", new Object[] { dir, user,
+        hostname });
+
+    HadoopServer server = ServerRegistry.getInstance().getServer(serverid);
+
+    try {
+      Session session = server.createSession();
+      // session.setTimeout(TIMEOUT);
+
+      log.log(Level.FINER, "Connected");
+
+      /*
+       * COMMENTED(jz) removing server start/stop support for now if (!
+       * attributes.containsKey("hadoop.jar")) { // start or stop server if(
+       * server.getServerState() == IServer.STATE_STARTING ) { String command =
+       * dir + "bin/start-all.sh"; execInConsole(session, command); } else if(
+       * server.getServerState() == IServer.STATE_STOPPING ) { String command =
+       * dir + "bin/stop-all.sh"; execInConsole(session, command); } }
+       */
+
+      if (false) {
+      } else {
+        FileInputStream fis = null;
+        String jarFile, remoteFile = null;
+
+        if (attributes.containsKey("hadoop.jar")) {
+          jarFile = (String) attributes.get("hadoop.jar");
+        } else {
+          String memento = (String) attributes.get("hadoop.jarrable");
+          JarModule fromMemento = JarModule.fromMemento(memento);
+          jarFile = fromMemento.buildJar(new SubProgressMonitor(monitor, 100))
+              .toString();
+        }
+
+        if (jarFile.lastIndexOf('/') > 0) {
+          remoteFile = jarFile.substring(jarFile.lastIndexOf('/') + 1);
+        } else if (jarFile.lastIndexOf('\\') > 0) {
+          remoteFile = jarFile.substring(jarFile.lastIndexOf('\\') + 1);
+        }
+
+        // exec 'scp -t -p hadoop.jar' remotely
+
+        String command = "scp -p -t " + remoteFile;
+        Channel channel = session.openChannel("exec");
+        ((ChannelExec) channel).setCommand(command);
+
+        // get I/O streams for remote scp
+        OutputStream out = channel.getOutputStream();
+        final InputStream in = channel.getInputStream();
+
+        channel.connect();
+
+        if (checkAck(in) != 0) {
+          throw new CoreException(SSH_FAILED_STATUS1);
+        }
+
+        // send "C0644 filesize filename", where filename should not
+        // include '/'
+        long filesize = (new File(jarFile)).length();
+        command = "C0644 " + filesize + " ";
+        if (jarFile.lastIndexOf('/') > 0) {
+          command += jarFile.substring(jarFile.lastIndexOf('/') + 1);
+        } else {
+          command += jarFile;
+        }
+
+        command += "\n";
+        out.write(command.getBytes());
+        out.flush();
+        if (checkAck(in) != 0) {
+          throw new CoreException(SSH_FAILED_STATUS2);
+        }
+
+        // send a content of jarFile
+        fis = new FileInputStream(jarFile);
+        byte[] buf = new byte[1024];
+        while (true) {
+          int len = fis.read(buf, 0, buf.length);
+          if (len <= 0) {
+            break;
+          }
+          out.write(buf, 0, len); // out.flush();
+        }
+
+        fis.close();
+        fis = null;
+        // send '\0'
+        buf[0] = 0;
+        out.write(buf, 0, 1);
+        out.flush();
+        if (checkAck(in) != 0) {
+          throw new CoreException(SSH_FAILED_STATUS3);
+        }
+        out.close();
+        channel.disconnect();
+
+        // move the jar file to a temp directory
+        String jarDir = "/tmp/hadoopjar"
+            + new VMID().toString().replace(':', '_');
+        command = "mkdir " + jarDir + ";mv " + remoteFile + " " + jarDir;
+        channel = session.openChannel("exec");
+        ((ChannelExec) channel).setCommand(command);
+        channel.connect();
+        channel.disconnect();
+
+        session.disconnect();
+
+        // we create a new session with a zero timeout to prevent the
+        // console stream
+        // from stalling -- eyhung
+        final Session session2 = server.createSessionNoTimeout();
+
+        // now remotely execute hadoop with the just sent-over jarfile
+        command = dir + "bin/hadoop jar " + jarDir + "/" + remoteFile;
+        log.fine("Running command: " + command);
+        execInConsole(session2, command, jarDir + "/" + remoteFile);
+
+        // the jar file is not deleted anymore, but placed in a temp dir
+        // -- eyhung
+      }
+    } catch (final JSchException e) {
+      e.printStackTrace();
+      Display.getDefault().syncExec(new Runnable() {
+        public void run() {
+          MessageDialog.openError(Display.getDefault().getActiveShell(),
+              "Problems connecting to MapReduce Server", 
+              e.getLocalizedMessage());
+        }
+      });
+    } catch (IOException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    }
+  }
+
+  /**
+   * Show the job output in the console.
+   * @param session The SSH session object
+   * @param command The command to run remotely
+   * @param jarFile The jar file containing the classes for the Hadoop job
+   * @throws JSchException
+   */
+  private void execInConsole(final Session session, final String command,
+      final String jarFile) throws JSchException {
+    final ChannelExec channel = (ChannelExec) session.openChannel("exec");
+
+    final MessageConsole console = new MessageConsole("Hadoop: " + command,
+        null);
+    final MessageConsoleStream stream = console.newMessageStream();
+
+    final IOConsoleOutputStream out = console.newOutputStream();
+    final IOConsoleOutputStream err = console.newOutputStream();
+
+    out.setColor(black);
+    err.setColor(red);
+
+    ConsolePlugin.getDefault().getConsoleManager().addConsoles(
+        new IConsole[] { console });
+    ConsolePlugin.getDefault().getConsoleManager().showConsoleView(console);
+
+    channel.setCommand(command);
+    channel.setInputStream(null);
+
+    channel.connect();
+    new Thread() {
+      @Override
+      public void run() {
+        try {
+
+          BufferedReader hadoopOutput = new BufferedReader(
+              new InputStreamReader(channel.getInputStream()));
+
+          String stdoutLine;
+          while ((stdoutLine = hadoopOutput.readLine()) != null) {
+            out.write(stdoutLine);
+            out.write('\n');
+            continue;
+          }
+
+          channel.disconnect();
+
+          // meaningless call meant to prevent console from being
+          // garbage collected -- eyhung
+          console.getName();
+          ChannelExec channel2 = (ChannelExec) session.openChannel("exec");
+          channel2.setCommand("rm -rf "
+              + jarFile.substring(0, jarFile.lastIndexOf("/")));
+          log.fine("Removing temp file "
+              + jarFile.substring(0, jarFile.lastIndexOf("/")));
+          channel2.connect();
+          channel2.disconnect();
+
+        } catch (Exception e) {
+        }
+      }
+    }.start();
+
+    new Thread() {
+      @Override
+      public void run() {
+        try {
+
+          BufferedReader hadoopErr = new BufferedReader(new InputStreamReader(
+              channel.getErrStream()));
+
+          // String stdoutLine;
+          String stderrLine;
+          while ((stderrLine = hadoopErr.readLine()) != null) {
+            err.write(stderrLine);
+            err.write('\n');
+            continue;
+          }
+
+        } catch (Exception e) {
+        }
+      }
+    }.start();
+
+  }
+
+  private String ensureTrailingSlash(String dir) {
+    if (!dir.endsWith("/")) {
+      dir += "/";
+    }
+    return dir;
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/HadoopHomeDirPreferencePage.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/HadoopHomeDirPreferencePage.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/HadoopHomeDirPreferencePage.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/HadoopHomeDirPreferencePage.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.eclipse.preferences;
+
+import org.apache.hadoop.eclipse.Activator;
+import org.eclipse.jface.preference.DirectoryFieldEditor;
+import org.eclipse.jface.preference.FieldEditorPreferencePage;
+import org.eclipse.ui.IWorkbench;
+import org.eclipse.ui.IWorkbenchPreferencePage;
+
+/**
+ * This class represents a preference page that is contributed to the
+ * Preferences dialog. By subclassing <samp>FieldEditorPreferencePage</samp>,
+ * we can use the field support built into JFace that allows us to create a
+ * page that is small and knows how to save, restore and apply itself.
+ * <p>
+ * This page is used to modify preferences only. They are stored in the
+ * preference store that belongs to the main plug-in class. That way,
+ * preferences can be accessed directly via the preference store.
+ */
+
+public class HadoopHomeDirPreferencePage extends FieldEditorPreferencePage
+    implements IWorkbenchPreferencePage {
+
+  public HadoopHomeDirPreferencePage() {
+    super(GRID);
+    setPreferenceStore(Activator.getDefault().getPreferenceStore());
+    setTitle("MapReduce Tools");
+    setDescription("MapReduce Preferences");
+  }
+
+  /**
+   * Creates the field editors. Field editors are abstractions of the common
+   * GUI blocks needed to manipulate various types of preferences. Each field
+   * editor knows how to save and restore itself.
+   */
+  @Override
+  public void createFieldEditors() {
+    addField(new DirectoryFieldEditor(PreferenceConstants.P_PATH,
+        "&Hadoop main directory:", getFieldEditorParent()));
+
+  }
+
+  /* @inheritDoc */
+  public void init(IWorkbench workbench) {
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceConstants.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceConstants.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceConstants.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceConstants.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.preferences;
+
+/**
+ * Constant definitions for plug-in preferences
+ */
+public class PreferenceConstants {
+
+  public static final String P_PATH = "pathPreference";
+
+  // public static final String P_BOOLEAN = "booleanPreference";
+  //
+  // public static final String P_CHOICE = "choicePreference";
+  //
+  // public static final String P_STRING = "stringPreference";
+  //	
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceInitializer.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceInitializer.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceInitializer.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceInitializer.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.preferences;
+
+import org.eclipse.core.runtime.preferences.AbstractPreferenceInitializer;
+
+/**
+ * Class used to initialize default preference values.
+ */
+public class PreferenceInitializer extends AbstractPreferenceInitializer {
+
+  /* @inheritDoc */
+  @Override
+  public void initializeDefaultPreferences() {
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopJob.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopJob.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopJob.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopJob.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.server;
+
+/**
+ * Helper class to pretty-print status for a hadoop job running on a MapReduce server.
+ */
+
+public class HadoopJob {
+  String name;
+  
+  /**
+   * Hadoop Job Id (useful to kill the job)
+   */
+  String jobId;
+
+  boolean completed;
+
+  String totalMaps;
+
+  String totalReduces;
+
+  String completedMaps;
+
+  String completedReduces;
+
+  String mapPercentage;
+
+  String reducePercentage;
+
+  private HadoopServer server;
+
+  public HadoopJob(HadoopServer server) {
+    this.server = server;
+  }
+
+  public void print() {
+    System.out.println("Job name = " + name);
+    System.out.println("Job id = " + jobId);
+    System.out.println("Job total maps = " + totalMaps);
+    System.out.println("Job completed maps = " + completedMaps);
+    System.out.println("Map percentage complete = " + mapPercentage);
+    System.out.println("Job total reduces = " + totalReduces);
+    System.out.println("Job completed reduces = " + completedReduces);
+    System.out.println("Reduce percentage complete = " + reducePercentage);
+    System.out.flush();
+  }
+
+  public String getId() {
+    return this.name;
+  }
+  
+  public String getJobId() {
+    return this.jobId;
+  }
+  
+  public boolean isCompleted() {
+    return this.completed;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    return (o instanceof HadoopJob) && ((HadoopJob) o).name.equals(name);
+  }
+
+  public String getState() {
+    return (!completed) ? "Running" : "Completed";
+  }
+
+  public String getStatus() {
+    StringBuffer s = new StringBuffer();
+
+    s.append("Maps : " + completedMaps + "/" + totalMaps);
+    s.append(" (" + mapPercentage + ")");
+    s.append("  Reduces : " + completedReduces + "/" + totalReduces);
+    s.append(" (" + reducePercentage + ")");
+
+    return s.toString();
+  }
+
+  public HadoopServer getServer() {
+    return this.server;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopPathPage.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopPathPage.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopPathPage.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopPathPage.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.server;
+
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.swt.graphics.Image;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.ui.IEditorInput;
+import org.eclipse.ui.IEditorPart;
+import org.eclipse.ui.IEditorSite;
+import org.eclipse.ui.IPropertyListener;
+import org.eclipse.ui.IWorkbenchPartSite;
+import org.eclipse.ui.PartInitException;
+
+public class HadoopPathPage implements IEditorPart {
+
+  public IEditorInput getEditorInput() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  public IEditorSite getEditorSite() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  public void init(IEditorSite site, IEditorInput input)
+      throws PartInitException {
+    // TODO Auto-generated method stub
+
+  }
+
+  public void addPropertyListener(IPropertyListener listener) {
+    // TODO Auto-generated method stub
+
+  }
+
+  public void createPartControl(Composite parent) {
+    // TODO Auto-generated method stub
+
+  }
+
+  public void dispose() {
+    // TODO Auto-generated method stub
+
+  }
+
+  public IWorkbenchPartSite getSite() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  public String getTitle() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  public Image getTitleImage() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  public String getTitleToolTip() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  public void removePropertyListener(IPropertyListener listener) {
+    // TODO Auto-generated method stub
+
+  }
+
+  public void setFocus() {
+    // TODO Auto-generated method stub
+
+  }
+
+  public Object getAdapter(Class adapter) {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  public void doSave(IProgressMonitor monitor) {
+    // TODO Auto-generated method stub
+
+  }
+
+  public void doSaveAs() {
+    // TODO Auto-generated method stub
+
+  }
+
+  public boolean isDirty() {
+    // TODO Auto-generated method stub
+    return false;
+  }
+
+  public boolean isSaveAsAllowed() {
+    // TODO Auto-generated method stub
+    return false;
+  }
+
+  public boolean isSaveOnCloseNeeded() {
+    // TODO Auto-generated method stub
+    return false;
+  }
+
+}