You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ta...@apache.org on 2007/10/25 20:58:39 UTC

svn commit: r588310 [4/4] - in /lucene/hadoop/trunk/src/contrib/eclipse-plugin: ./ .settings/ META-INF/ resources/ resources/Components/ resources/Old/ src/java/org/apache/hadoop/eclipse/ src/java/org/apache/hadoop/eclipse/actions/ src/java/org/apache/...

Modified: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/RunOnHadoopWizard.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/RunOnHadoopWizard.java?rev=588310&r1=588309&r2=588310&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/RunOnHadoopWizard.java (original)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/RunOnHadoopWizard.java Thu Oct 25 11:58:32 2007
@@ -18,15 +18,33 @@
 
 package org.apache.hadoop.eclipse.servers;
 
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.eclipse.Activator;
+import org.apache.hadoop.eclipse.ErrorMessageDialog;
 import org.apache.hadoop.eclipse.server.HadoopServer;
 import org.apache.hadoop.eclipse.server.JarModule;
+import org.apache.hadoop.mapred.JobConf;
+import org.eclipse.core.resources.IFile;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IPath;
 import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
+import org.eclipse.jdt.launching.IJavaLaunchConfigurationConstants;
+import org.eclipse.jdt.launching.IRuntimeClasspathEntry;
+import org.eclipse.jdt.launching.JavaRuntime;
 import org.eclipse.jface.viewers.TableViewer;
 import org.eclipse.jface.wizard.Wizard;
 import org.eclipse.jface.wizard.WizardPage;
 import org.eclipse.swt.SWT;
 import org.eclipse.swt.events.SelectionEvent;
 import org.eclipse.swt.events.SelectionListener;
+import org.eclipse.swt.layout.FillLayout;
 import org.eclipse.swt.layout.GridData;
 import org.eclipse.swt.layout.GridLayout;
 import org.eclipse.swt.widgets.Button;
@@ -39,120 +57,219 @@
  * Wizard for publishing a job to a Hadoop server.
  */
 
-public class RunOnHadoopWizard extends Wizard implements SelectionListener {
+public class RunOnHadoopWizard extends Wizard {
 
-  private DefineHadoopServerLocWizardPage createNewPage;
+  private MainWizardPage mainPage;
 
-  private MainPage mainPage;
+  private HadoopLocationWizard createNewPage;
 
-  private final JarModule jar;
+  /**
+   * The file resource (containing a main()) to run on the Hadoop location
+   */
+  private IFile resource;
 
-  private boolean complete = false;
+  /**
+   * The launch configuration to update
+   */
+  private ILaunchConfigurationWorkingCopy iConf;
 
   private IProgressMonitor progressMonitor;
 
-  public RunOnHadoopWizard(JarModule jar) {
-    this.jar = jar;
+  public RunOnHadoopWizard(IFile resource,
+      ILaunchConfigurationWorkingCopy iConf) {
+    this.resource = resource;
+    this.iConf = iConf;
     setForcePreviousAndNextButtons(true);
     setNeedsProgressMonitor(true);
     setWindowTitle("Run on Hadoop");
   }
 
+  /**
+   * This wizard contains 2 pages:
+   * <li> the first one lets the user choose an already existing location
+   * <li> the second one allows the user to create a new location, in case it
+   * does not already exist
+   */
+  /* @inheritDoc */
   @Override
   public void addPages() {
-    super.addPages();
-    mainPage = new MainPage();
-    addPage(mainPage);
-    createNewPage = new DefineHadoopServerLocWizardPage();
-    addPage(createNewPage);
+    addPage(this.mainPage = new MainWizardPage());
+    addPage(new HadoopLocationWizard());
   }
 
-  @Override
   /**
    * Performs any actions appropriate in response to the user having pressed
    * the Finish button, or refuse if finishing now is not permitted.
    */
+  /* @inheritDoc */
+  @Override
   public boolean performFinish() {
+
+    /*
+     * Create a new location or get an existing one
+     */
     HadoopServer location = null;
     if (mainPage.createNew.getSelection()) {
       location = createNewPage.performFinish();
+
     } else if (mainPage.table.getSelection().length == 1) {
       location = (HadoopServer) mainPage.table.getSelection()[0].getData();
     }
 
-    if (location != null) {
-      location.runJar(jar, progressMonitor);
+    if (location == null)
+      return false;
 
-      return true;
+    /*
+     * Get the base directory of the plug-in for storing configurations and
+     * JARs
+     */
+    File baseDir = Activator.getDefault().getStateLocation().toFile();
+
+    // Package the Job into a JAR
+    File jarFile = JarModule.createJarPackage(resource);
+    if (jarFile == null) {
+      ErrorMessageDialog.display("Run on Hadoop",
+          "Unable to create or locate the JAR file for the Job");
+      return false;
     }
 
-    return false;
+    /*
+     * Generate a temporary Hadoop configuration directory and add it to the
+     * classpath of the launch configuration
+     */
+
+    File confDir;
+    try {
+      confDir = File.createTempFile("hadoop-conf-", "", baseDir);
+      confDir.delete();
+      confDir.mkdirs();
+      if (!confDir.isDirectory()) {
+        ErrorMessageDialog.display("Run on Hadoop",
+            "Cannot create temporary directory: " + confDir);
+        return false;
+      }
+    } catch (IOException ioe) {
+      ioe.printStackTrace();
+      return false;
+    }
+
+    // Prepare the Hadoop configuration
+    JobConf conf = new JobConf(location.getConfiguration());
+    conf.setJar(jarFile.getAbsolutePath());
+
+    // Write it to the disk file
+    try {
+      // File confFile = File.createTempFile("hadoop-site-", ".xml",
+      // confDir);
+      File confFile = new File(confDir, "hadoop-site.xml");
+      FileOutputStream fos = new FileOutputStream(confFile);
+      conf.write(fos);
+      fos.close();
+
+    } catch (IOException ioe) {
+      ioe.printStackTrace();
+      return false;
+    }
+
+    // Setup the Launch class path
+    List<String> classPath;
+    try {
+      classPath =
+          iConf.getAttribute(
+              IJavaLaunchConfigurationConstants.ATTR_CLASSPATH,
+              new ArrayList());
+      IPath confIPath = new Path(confDir.getAbsolutePath());
+      IRuntimeClasspathEntry cpEntry =
+          JavaRuntime.newArchiveRuntimeClasspathEntry(confIPath);
+      classPath.add(0, cpEntry.getMemento());
+      iConf.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH,
+          classPath);
+
+    } catch (CoreException e) {
+      e.printStackTrace();
+      return false;
+    }
+
+    // location.runResource(resource, progressMonitor);
+    return true;
   }
 
-  public void refreshButtons() {
+  private void refreshButtons() {
     getContainer().updateButtons();
   }
 
-  @Override
   /**
    * Allows finish when an existing server is selected or when a new server
    * location is defined
    */
+  /* @inheritDoc */
+  @Override
   public boolean canFinish() {
-
-    if (mainPage.chooseExisting.getSelection()
-        && (mainPage.table.getSelectionCount() > 0)) {
-      return true;
-    } else {
-      return (createNewPage.isPageComplete());
-      // check first
-    }
+    if (mainPage != null)
+      return mainPage.canFinish();
+    return false;
   }
 
-  public class MainPage extends WizardPage {
+  /**
+   * This is the main page of the wizard. It allows the user either to choose
+   * an already existing location or to indicate he wants to create a new
+   * location.
+   */
+  public class MainWizardPage extends WizardPage {
 
     private Button createNew;
 
     private Table table;
 
-    public Button chooseExisting;
+    private Button chooseExisting;
 
-    public MainPage() {
+    public MainWizardPage() {
       super("Select or define server to run on");
-      setTitle("Select Hadoop Server");
-      setDescription("Select a Hadoop Server to run on.");
+      setTitle("Select Hadoop location");
+      setDescription("Select a Hadoop location to run on.");
     }
 
+    /* @inheritDoc */
     @Override
     public boolean canFlipToNextPage() {
       return createNew.getSelection();
     }
 
+    /* @inheritDoc */
     public void createControl(Composite parent) {
-      Composite control = new Composite(parent, SWT.NONE);
-      control.setLayout(new GridLayout(4, false));
+      Composite panel = new Composite(parent, SWT.NONE);
+      panel.setLayout(new GridLayout(1, false));
 
-      Label label = new Label(control, SWT.FILL);
+      // Label
+      Label label = new Label(panel, SWT.NONE);
       label.setText("Select a Hadoop Server to run on.");
-      GridData data = new GridData(GridData.FILL_BOTH);
-      data.grabExcessVerticalSpace = false;
-      data.horizontalSpan = 4;
-      label.setLayoutData(data);
+      GridData gData = new GridData(GridData.FILL_BOTH);
+      gData.grabExcessVerticalSpace = false;
+      label.setLayoutData(gData);
 
-      createNew = new Button(control, SWT.RADIO);
+      // Create location button
+      createNew = new Button(panel, SWT.RADIO);
       createNew.setText("Define a new Hadoop server location");
-      createNew.setLayoutData(data);
-      createNew.addSelectionListener(RunOnHadoopWizard.this);
+      createNew.setLayoutData(gData);
+      createNew.addSelectionListener(new SelectionListener() {
+        public void widgetDefaultSelected(SelectionEvent e) {
+        }
 
+        public void widgetSelected(SelectionEvent e) {
+          setPageComplete(true);
+          RunOnHadoopWizard.this.refreshButtons();
+        }
+      });
       createNew.setSelection(true);
 
-      chooseExisting = new Button(control, SWT.RADIO);
+      // Select existing location button
+      chooseExisting = new Button(panel, SWT.RADIO);
       chooseExisting
           .setText("Choose an existing server from the list below");
-      chooseExisting.setLayoutData(data);
-      chooseExisting.addSelectionListener(RunOnHadoopWizard.this);
-
+      chooseExisting.setLayoutData(gData);
       chooseExisting.addSelectionListener(new SelectionListener() {
+        public void widgetDefaultSelected(SelectionEvent e) {
+        }
 
         public void widgetSelected(SelectionEvent e) {
           if (chooseExisting.getSelection()
@@ -161,50 +278,44 @@
               table.setSelection(0);
             }
           }
+          RunOnHadoopWizard.this.refreshButtons();
         }
-
-        public void widgetDefaultSelected(SelectionEvent e) {
-        }
-
       });
 
-      Composite serverList = new Composite(control, SWT.NONE);
-      GridData span = new GridData(GridData.FILL_BOTH);
-      span.horizontalSpan = 4;
-      serverList.setLayoutData(span);
-      GridLayout layout = new GridLayout(4, false);
-      layout.marginTop = 12;
-      serverList.setLayout(layout);
+      // Table of existing locations
+      Composite serverListPanel = new Composite(panel, SWT.FILL);
+      gData = new GridData(GridData.FILL_BOTH);
+      gData.horizontalSpan = 1;
+      serverListPanel.setLayoutData(gData);
+
+      FillLayout layout = new FillLayout();
+      layout.marginHeight = layout.marginWidth = 12;
+      serverListPanel.setLayout(layout);
 
       table =
-          new Table(serverList, SWT.SINGLE | SWT.H_SCROLL | SWT.V_SCROLL
-              | SWT.FULL_SELECTION);
+          new Table(serverListPanel, SWT.BORDER | SWT.H_SCROLL
+              | SWT.V_SCROLL | SWT.FULL_SELECTION);
       table.setHeaderVisible(true);
       table.setLinesVisible(true);
-      GridData d = new GridData(GridData.FILL_HORIZONTAL);
-      d.horizontalSpan = 4;
-      d.heightHint = 300;
-      table.setLayoutData(d);
-
-      TableColumn nameColumn = new TableColumn(table, SWT.SINGLE);
-      nameColumn.setText("Name");
-      nameColumn.setWidth(160);
-
-      TableColumn hostColumn = new TableColumn(table, SWT.SINGLE);
-      hostColumn.setText("Location");
-      hostColumn.setWidth(200);
 
-      table.addSelectionListener(new SelectionListener() {
-        public void widgetSelected(SelectionEvent e) {
-          chooseExisting.setSelection(true);
-          createNew.setSelection(false); // shouldnt be necessary,
-          // but got a visual bug once
-
-          refreshButtons();
-        }
+      TableColumn nameColumn = new TableColumn(table, SWT.LEFT);
+      nameColumn.setText("Location");
+      nameColumn.setWidth(450);
+
+      TableColumn hostColumn = new TableColumn(table, SWT.LEFT);
+      hostColumn.setText("Master host name");
+      hostColumn.setWidth(250);
 
+      // If the user select one entry, switch to "chooseExisting"
+      table.addSelectionListener(new SelectionListener() {
         public void widgetDefaultSelected(SelectionEvent e) {
+        }
 
+        public void widgetSelected(SelectionEvent e) {
+          chooseExisting.setSelection(true);
+          createNew.setSelection(false);
+          setPageComplete(table.getSelectionCount() == 1);
+          RunOnHadoopWizard.this.refreshButtons();
         }
       });
 
@@ -213,22 +324,31 @@
           new HadoopServerSelectionListContentProvider();
       viewer.setContentProvider(provider);
       viewer.setLabelProvider(provider);
-      viewer.setInput(new Object()); // don't care, get from singleton
-      // server registry
+      viewer.setInput(new Object());
+      // don't care, get from singleton server registry
 
-      setControl(control);
+      this.setControl(panel);
     }
-  }
 
-  public void widgetDefaultSelected(SelectionEvent e) {
-    // TODO Auto-generated method stub
+    /**
+     * Returns whether this page state allows the Wizard to finish or not
+     * 
+     * @return can the wizard finish or not?
+     */
+    public boolean canFinish() {
+      if (!isControlCreated())
+        return false;
 
-  }
+      if (this.createNew.getSelection())
+        return getNextPage().isPageComplete();
 
-  public void widgetSelected(SelectionEvent e) {
-    refreshButtons();
+      return this.chooseExisting.getSelection();
+    }
   }
 
+  /**
+   * @param progressMonitor
+   */
   public void setProgressMonitor(IProgressMonitor progressMonitor) {
     this.progressMonitor = progressMonitor;
   }

Modified: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/ServerRegistry.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/ServerRegistry.java?rev=588310&r1=588309&r2=588310&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/ServerRegistry.java (original)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/ServerRegistry.java Thu Oct 25 11:58:32 2007
@@ -18,24 +18,31 @@
 
 package org.apache.hadoop.eclipse.servers;
 
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
 import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.FileWriter;
+import java.io.FilenameFilter;
 import java.io.IOException;
-import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashSet;
-import java.util.List;
+import java.util.Map;
 import java.util.Set;
+import java.util.TreeMap;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.eclipse.Activator;
 import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.eclipse.jface.dialogs.MessageDialog;
 
 /**
- * Registry for storing Hadoop Servers
+ * Register of Hadoop locations.
+ * 
+ * Each location corresponds to a Hadoop {@link Configuration} stored as an
+ * XML file in the workspace plug-in configuration directory:
+ * <p>
+ * <tt>
+ * &lt;workspace-dir&gt;/.metadata/.plugins/org.apache.hadoop.eclipse/locations/*.xml
+ * </tt>
+ * 
  */
 public class ServerRegistry {
 
@@ -47,10 +54,21 @@
 
   public static final int SERVER_STATE_CHANGED = 2;
 
+  private final File baseDir =
+      Activator.getDefault().getStateLocation().toFile();
+
+  private final File saveDir = new File(baseDir, "locations");
+
   private ServerRegistry() {
+    if (saveDir.exists() && !saveDir.isDirectory())
+      saveDir.delete();
+    if (!saveDir.exists())
+      saveDir.mkdirs();
+
+    load();
   }
 
-  private List<HadoopServer> servers;
+  private Map<String, HadoopServer> servers;
 
   private Set<IHadoopServerListener> listeners =
       new HashSet<IHadoopServerListener>();
@@ -59,142 +77,89 @@
     return INSTANCE;
   }
 
-  public List<HadoopServer> getServers() {
-    return Collections.unmodifiableList(getServersInternal());
+  public synchronized Collection<HadoopServer> getServers() {
+    return Collections.unmodifiableCollection(servers.values());
   }
 
   /**
-   * Returns the list of currently defined servers. The list is read from the
-   * file if it is not in memory.
-   * 
-   * @return the list of hadoop servers
+   * Load all available locations from the workspace configuration directory.
    */
-  private List<HadoopServer> getServersInternal() {
+  private synchronized void load() {
+    Map<String, HadoopServer> map = new TreeMap<String, HadoopServer>();
+    for (File file : saveDir.listFiles()) {
+      try {
+        HadoopServer server = new HadoopServer(file);
+        map.put(server.getLocationName(), server);
 
-    if (servers == null) {
-      servers = new ArrayList<HadoopServer>();
+      } catch (Exception exn) {
+        System.err.println(exn);
+      }
+    }
+    this.servers = map;
+  }
 
-      File store =
-          Activator.getDefault().getStateLocation().append("SERVERS.txt")
-              .toFile();
-
-      if (!store.exists()) {
-        try {
-          store.createNewFile();
-        } catch (IOException e) {
-          // pretty fatal error here - we cant save or restore
-          throw new RuntimeException(e);
-        }
+  private synchronized void store() {
+    try {
+      File dir = File.createTempFile("locations", "new", baseDir);
+      dir.delete();
+      dir.mkdirs();
+
+      for (HadoopServer server : servers.values()) {
+        server.storeSettingsToFile(new File(dir, server.getLocationName()
+            + ".xml"));
       }
 
-      BufferedReader reader = null;
-      try {
-        reader = new BufferedReader(new FileReader(store));
-        String line;
-        while ((line = reader.readLine()) != null) {
-          try {
-            String[] parts = line.split("\t");
-            if (parts.length == 1) {
-              String location = parts[0];
-              parts = new String[] { location, "Hadoop Server" };
-            }
-
-            if (parts.length > 2) {
-              servers.add(new HadoopServer(parts[0], parts[1], parts[2],
-                  parts[3]));
-            } else {
-              servers.add(new HadoopServer(parts[0], parts[1]));
-            }
-
-            servers.get(servers.size() - 1).setId(servers.size() - 1);
-
-          } catch (Exception e) {
-            // TODO(jz) show message and ignore - still want rest of
-            // servers if we can get them
-            e.printStackTrace();
-          }
-        }
-      } catch (FileNotFoundException e) {
-        e.printStackTrace();
-      } catch (IOException e) {
-        // TODO(jz) show message and ignore - may have corrupt
-        // configuration
-        e.printStackTrace();
-      } finally {
-        if (reader != null) {
-          try {
-            reader.close();
-          } catch (IOException e) {
-            /* nothing we can do */
-          }
+      FilenameFilter XMLFilter = new FilenameFilter() {
+        public boolean accept(File dir, String name) {
+          String lower = name.toLowerCase();
+          return lower.endsWith(".xml");
         }
-      }
-    }
+      };
 
-    return servers;
+      File backup = new File(baseDir, "locations.backup");
+      if (backup.exists()) {
+        for (File file : backup.listFiles(XMLFilter))
+          if (!file.delete())
+            throw new IOException("Unable to delete backup location file: "
+                + file);
+        if (!backup.delete())
+          throw new IOException(
+              "Unable to delete backup location directory: " + backup);
+      }
+
+      saveDir.renameTo(backup);
+      dir.renameTo(saveDir);
+
+    } catch (IOException ioe) {
+      ioe.printStackTrace();
+      MessageDialog.openError(null,
+          "Saving configuration of Hadoop locations failed", ioe.toString());
+    }
   }
 
-  public synchronized void removeServer(HadoopServer server) {
-    getServersInternal().remove(server);
-    fireListeners(server, SERVER_REMOVED);
-    save();
+  public void dispose() {
+    for (HadoopServer server : getServers()) {
+      server.dispose();
+    }
   }
 
-  public synchronized void addServer(HadoopServer server) {
-    getServersInternal().add(server);
-    fireListeners(server, SERVER_ADDED);
-    save();
+  public synchronized HadoopServer getServer(String location) {
+    return servers.get(location);
   }
 
-  /**
-   * Save the list of servers to the plug-in configuration file, currently
-   * SERVERS.txt in
-   * <workspace-dir>/.metadata/.plugins/org.apache.hadoop.eclipse/SERVERS.txt
+  /*
+   * HadoopServer map listeners
    */
-  private synchronized void save() {
-    File store =
-        Activator.getDefault().getStateLocation().append("SERVERS.txt")
-            .toFile();
-    BufferedWriter writer = null;
 
-    if (!store.exists()) {
-      try {
-        store.createNewFile();
-      } catch (IOException e) {
-        // pretty fatal error here - we can't save or restore
-        throw new RuntimeException(e);
-      }
-    }
-
-    try {
-      writer = new BufferedWriter(new FileWriter(store));
-      int i = 0;
-      for (HadoopServer server : servers) {
-        server.setId(i++);
-        writer.append(server.toString() + "\t" + server.getName());
-        if (server.getTunnelHostName() != null) {
-          writer.append("\t" + server.getTunnelHostName() + "\t"
-              + server.getTunnelUserName());
-        }
-        writer.newLine();
-      }
-    } catch (IOException e) {
-      // TODO(jz) show error message
-      e.printStackTrace();
-    } finally {
-      if (writer != null) {
-        try {
-          writer.close();
-        } catch (IOException e) {
-          /* nothing we can do */
-        }
-      }
+  public void addListener(IHadoopServerListener l) {
+    synchronized (listeners) {
+      listeners.add(l);
     }
   }
 
-  public void addListener(IHadoopServerListener l) {
+  public void removeListener(IHadoopServerListener l) {
     synchronized (listeners) {
-      listeners.add(l);
+      listeners.remove(l);
     }
   }
 
@@ -206,24 +171,33 @@
     }
   }
 
-  public void stateChanged(HadoopServer job) {
-    fireListeners(job, SERVER_STATE_CHANGED);
+  public synchronized void removeServer(HadoopServer server) {
+    this.servers.remove(server.getLocationName());
+    store();
+    fireListeners(server, SERVER_REMOVED);
   }
 
-  public void removeListener(IHadoopServerListener l) {
-    synchronized (listeners) {
-      listeners.remove(l);
-    }
+  public synchronized void addServer(HadoopServer server) {
+    this.servers.put(server.getLocationName(), server);
+    store();
+    fireListeners(server, SERVER_ADDED);
   }
 
-  public void dispose() {
-    for (HadoopServer server : getServers()) {
-      server.dispose();
-    }
-  }
+  /**
+   * Update one Hadoop location
+   * 
+   * @param originalName the original location name (might have changed)
+   * @param server the location
+   */
+  public synchronized void updateServer(String originalName,
+      HadoopServer server) {
 
-  public HadoopServer getServer(int serverid) {
-    return servers.get(serverid);
+    // Update the map if the location name has changed
+    if (!server.getLocationName().equals(originalName)) {
+      servers.remove(originalName);
+      servers.put(server.getLocationName(), server);
+    }
+    store();
+    fireListeners(server, SERVER_STATE_CHANGED);
   }
-
 }

Modified: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/view/servers/ServerView.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/view/servers/ServerView.java?rev=588310&r1=588309&r2=588310&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/view/servers/ServerView.java (original)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/view/servers/ServerView.java Thu Oct 25 11:58:32 2007
@@ -18,34 +18,31 @@
 
 package org.apache.hadoop.eclipse.view.servers;
 
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.eclipse.Activator;
-import org.apache.hadoop.eclipse.actions.EditServerAction;
-import org.apache.hadoop.eclipse.actions.NewServerAction;
+import java.util.Collection;
+
+import org.apache.hadoop.eclipse.ImageLibrary;
+import org.apache.hadoop.eclipse.actions.EditLocationAction;
+import org.apache.hadoop.eclipse.actions.NewLocationAction;
 import org.apache.hadoop.eclipse.server.HadoopJob;
 import org.apache.hadoop.eclipse.server.HadoopServer;
 import org.apache.hadoop.eclipse.server.IJobListener;
 import org.apache.hadoop.eclipse.server.JarModule;
 import org.apache.hadoop.eclipse.servers.IHadoopServerListener;
 import org.apache.hadoop.eclipse.servers.ServerRegistry;
-import org.eclipse.core.runtime.FileLocator;
-import org.eclipse.core.runtime.Path;
-import org.eclipse.debug.internal.ui.DebugPluginImages;
-import org.eclipse.debug.ui.IDebugUIConstants;
 import org.eclipse.jface.action.Action;
 import org.eclipse.jface.action.IAction;
-import org.eclipse.jface.resource.ImageDescriptor;
-import org.eclipse.jface.viewers.IContentProvider;
+import org.eclipse.jface.action.IMenuListener;
+import org.eclipse.jface.action.IMenuManager;
+import org.eclipse.jface.action.MenuManager;
+import org.eclipse.jface.dialogs.MessageDialog;
 import org.eclipse.jface.viewers.ILabelProviderListener;
 import org.eclipse.jface.viewers.ISelection;
-import org.eclipse.jface.viewers.IStructuredContentProvider;
+import org.eclipse.jface.viewers.ISelectionChangedListener;
 import org.eclipse.jface.viewers.IStructuredSelection;
 import org.eclipse.jface.viewers.ITableLabelProvider;
 import org.eclipse.jface.viewers.ITreeContentProvider;
 import org.eclipse.jface.viewers.ITreeSelection;
+import org.eclipse.jface.viewers.SelectionChangedEvent;
 import org.eclipse.jface.viewers.TreeViewer;
 import org.eclipse.jface.viewers.Viewer;
 import org.eclipse.swt.SWT;
@@ -53,6 +50,7 @@
 import org.eclipse.swt.layout.GridData;
 import org.eclipse.swt.widgets.Composite;
 import org.eclipse.swt.widgets.Display;
+import org.eclipse.swt.widgets.Menu;
 import org.eclipse.swt.widgets.Tree;
 import org.eclipse.swt.widgets.TreeColumn;
 import org.eclipse.ui.IViewSite;
@@ -60,30 +58,73 @@
 import org.eclipse.ui.actions.ActionFactory;
 import org.eclipse.ui.part.ViewPart;
 
-import com.jcraft.jsch.Channel;
-import com.jcraft.jsch.ChannelExec;
-import com.jcraft.jsch.JSchException;
-import com.jcraft.jsch.Session;
-
 /**
- * Code for displaying/updating the MapReduce Servers view panel
+ * Map/Reduce locations view: displays all available Hadoop locations and the
+ * Jobs running/finished on these locations
  */
-public class ServerView extends ViewPart implements IContentProvider,
-    IStructuredContentProvider, ITreeContentProvider, ITableLabelProvider,
-    IJobListener, IHadoopServerListener {
+public class ServerView extends ViewPart implements ITreeContentProvider,
+    ITableLabelProvider, IJobListener, IHadoopServerListener {
+
+  /**
+   * Deletion action: delete a Hadoop location, kill a running job or remove
+   * a finished job entry
+   */
+  class DeleteAction extends Action {
+
+    DeleteAction() {
+      setText("Delete");
+      setImageDescriptor(ImageLibrary.get("server.view.action.delete"));
+    }
+
+    /* @inheritDoc */
+    @Override
+    public void run() {
+      ISelection selection =
+          getViewSite().getSelectionProvider().getSelection();
+      if ((selection != null) && (selection instanceof IStructuredSelection)) {
+        Object selItem =
+            ((IStructuredSelection) selection).getFirstElement();
+
+        if (selItem instanceof HadoopServer) {
+          HadoopServer location = (HadoopServer) selItem;
+          if (MessageDialog.openConfirm(Display.getDefault()
+              .getActiveShell(), "Confirm delete Hadoop location",
+              "Do you really want to remove the Hadoop location: "
+                  + location.getLocationName())) {
+            ServerRegistry.getInstance().removeServer(location);
+          }
+
+        } else if (selItem instanceof HadoopJob) {
+
+          // kill the job
+          HadoopJob job = (HadoopJob) selItem;
+          if (job.isCompleted()) {
+            // Job already finished, remove the entry
+            job.getLocation().purgeJob(job);
+
+          } else {
+            // Job is running, kill the job?
+            if (MessageDialog.openConfirm(Display.getDefault()
+                .getActiveShell(), "Confirm kill running Job",
+                "Do you really want to kill running Job: " + job.getJobId())) {
+              job.kill();
+            }
+          }
+        }
+      }
+    }
+  }
 
   /**
    * This object is the root content for this content provider
    */
   private static final Object CONTENT_ROOT = new Object();
 
-  private final IAction DELETE = new DeleteAction();
-
-  private final IAction PROPERTIES = new EditServerAction(this);
+  private final IAction deleteAction = new DeleteAction();
 
-  private final IAction NEWSERVER = new NewServerAction();
+  private final IAction editServerAction = new EditLocationAction(this);
 
-  private Map<String, Image> images = new HashMap<String, Image>();
+  private final IAction newLocationAction = new NewLocationAction();
 
   private TreeViewer viewer;
 
@@ -94,32 +135,12 @@
   @Override
   public void init(IViewSite site) throws PartInitException {
     super.init(site);
-
-    try {
-      images.put("hadoop", ImageDescriptor.createFromURL(
-          (FileLocator.toFileURL(FileLocator.find(Activator.getDefault()
-              .getBundle(), new Path("resources/hadoop_small.gif"), null))))
-          .createImage(true));
-      images.put("job", ImageDescriptor.createFromURL(
-          (FileLocator.toFileURL(FileLocator.find(Activator.getDefault()
-              .getBundle(), new Path("resources/job.gif"), null))))
-          .createImage(true));
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
   }
 
   /* @inheritDoc */
   @Override
   public void dispose() {
-    for (String key : images.keySet()) {
-      if (images.containsKey(key))
-        ((Image) images.get(key)).dispose();
-    }
-
     ServerRegistry.getInstance().removeListener(this);
-
-    images.clear();
   }
 
   /**
@@ -135,12 +156,12 @@
     main.setLayoutData(new GridData(GridData.FILL_BOTH));
 
     TreeColumn serverCol = new TreeColumn(main, SWT.SINGLE);
-    serverCol.setText("Server");
-    serverCol.setWidth(185);
+    serverCol.setText("Location");
+    serverCol.setWidth(300);
     serverCol.setResizable(true);
 
     TreeColumn locationCol = new TreeColumn(main, SWT.SINGLE);
-    locationCol.setText("Location");
+    locationCol.setText("Master node");
     locationCol.setWidth(185);
     locationCol.setResizable(true);
 
@@ -157,72 +178,83 @@
     viewer = new TreeViewer(main);
     viewer.setContentProvider(this);
     viewer.setLabelProvider(this);
-    viewer.setInput(CONTENT_ROOT); // dont care
+    viewer.setInput(CONTENT_ROOT); // don't care
 
     getViewSite().setSelectionProvider(viewer);
+    
     getViewSite().getActionBars().setGlobalActionHandler(
-        ActionFactory.DELETE.getId(), DELETE);
-
-    getViewSite().getActionBars().getToolBarManager().add(PROPERTIES);
+        ActionFactory.DELETE.getId(), deleteAction);
+    getViewSite().getActionBars().getToolBarManager().add(editServerAction);
+    getViewSite().getActionBars().getToolBarManager().add(newLocationAction);
 
-    // getViewSite().getActionBars().getToolBarManager().add(new
-    // StartAction());
-    getViewSite().getActionBars().getToolBarManager().add(NEWSERVER);
+    createActions();
+    createContextMenu();
   }
 
-  // NewServerAction moved to actions package for cheat sheet access --
-  // eyhung
-
-  public class DeleteAction extends Action {
-    @Override
-    public void run() {
-      ISelection selection =
-          getViewSite().getSelectionProvider().getSelection();
-      if ((selection != null) && (selection instanceof IStructuredSelection)) {
-        Object selItem =
-            ((IStructuredSelection) selection).getFirstElement();
-
-        if (selItem instanceof HadoopServer) {
-          HadoopServer location = (HadoopServer) selItem;
-          ServerRegistry.getInstance().removeServer(location);
-
-        } else if (selItem instanceof HadoopJob) {
-
-          // kill the job
-          HadoopJob job = (HadoopJob) selItem;
-          HadoopServer server = job.getServer();
-          String jobId = job.getJobId();
+  /**
+   * Actions
+   */
+  private void createActions() {
+    /*
+     * addItemAction = new Action("Add...") { public void run() { addItem(); } };
+     * addItemAction.setImageDescriptor(ImageLibrary
+     * .get("server.view.location.new"));
+     */
+    /*
+     * deleteItemAction = new Action("Delete") { public void run() {
+     * deleteItem(); } };
+     * deleteItemAction.setImageDescriptor(getImageDescriptor("delete.gif"));
+     * 
+     * selectAllAction = new Action("Select All") { public void run() {
+     * selectAll(); } };
+     */
+    // Add selection listener.
+    viewer.addSelectionChangedListener(new ISelectionChangedListener() {
+      public void selectionChanged(SelectionChangedEvent event) {
+        updateActionEnablement();
+      }
+    });
+  }
 
-          if (job.isCompleted())
-            return;
+  private void addItem() {
+    System.out.printf("ADD ITEM\n");
+  }
 
-          try {
-            Session session = server.createSession();
+  private void updateActionEnablement() {
+    IStructuredSelection sel = (IStructuredSelection) viewer.getSelection();
+    // deleteItemAction.setEnabled(sel.size() > 0);
+  }
 
-            String command =
-                server.getInstallPath() + "/bin/hadoop job -kill " + jobId;
-            Channel channel = session.openChannel("exec");
-            ((ChannelExec) channel).setCommand(command);
-            channel.connect();
-            channel.disconnect();
-
-            session.disconnect();
-          } catch (JSchException e) {
-            e.printStackTrace();
-          }
-        }
+  /**
+   * Contextual menu
+   */
+  private void createContextMenu() {
+    // Create menu manager.
+    MenuManager menuMgr = new MenuManager();
+    menuMgr.setRemoveAllWhenShown(true);
+    menuMgr.addMenuListener(new IMenuListener() {
+      public void menuAboutToShow(IMenuManager mgr) {
+        fillContextMenu(mgr);
       }
-    }
-  }
+    });
 
-  public static class StartAction extends Action {
-    public StartAction() {
-      setText("Start");
-
-      // NOTE(jz) - all below from internal api, worst case no images
-      setImageDescriptor(DebugPluginImages
-          .getImageDescriptor(IDebugUIConstants.IMG_ACT_RUN));
-    }
+    // Create menu.
+    Menu menu = menuMgr.createContextMenu(viewer.getControl());
+    viewer.getControl().setMenu(menu);
+
+    // Register menu for extension.
+    getSite().registerContextMenu(menuMgr, viewer);
+  }
+
+  private void fillContextMenu(IMenuManager mgr) {
+    mgr.add(newLocationAction);
+    mgr.add(editServerAction);
+    mgr.add(deleteAction);
+    /*
+     * mgr.add(new GroupMarker(IWorkbenchActionConstants.MB_ADDITIONS));
+     * mgr.add(deleteItemAction); mgr.add(new Separator());
+     * mgr.add(selectAllAction);
+     */
   }
 
   /* @inheritDoc */
@@ -231,6 +263,10 @@
 
   }
 
+  /*
+   * IHadoopServerListener implementation
+   */
+
   /* @inheritDoc */
   public void serverChanged(HadoopServer location, int type) {
     Display.getDefault().syncExec(new Runnable() {
@@ -240,6 +276,10 @@
     });
   }
 
+  /*
+   * IStructuredContentProvider implementation
+   */
+
   /* @inheritDoc */
   public void inputChanged(final Viewer viewer, Object oldInput,
       Object newInput) {
@@ -249,17 +289,30 @@
       ServerRegistry.getInstance().addListener(this);
   }
 
+  /**
+   * The root elements displayed by this view are the existing Hadoop
+   * locations
+   */
   /* @inheritDoc */
   public Object[] getElements(Object inputElement) {
     return ServerRegistry.getInstance().getServers().toArray();
   }
 
+  /*
+   * ITreeStructuredContentProvider implementation
+   */
+
+  /**
+   * Each location contains a child entry for each job it runs.
+   */
   /* @inheritDoc */
-  public Object[] getChildren(Object parentElement) {
-    if (parentElement instanceof HadoopServer) {
-      ((HadoopServer) parentElement).addJobListener(this);
+  public Object[] getChildren(Object parent) {
 
-      return ((HadoopServer) parentElement).getChildren();
+    if (parent instanceof HadoopServer) {
+      HadoopServer location = (HadoopServer) parent;
+      location.addJobListener(this);
+      Collection<HadoopJob> jobs = location.getJobs();
+      return jobs.toArray();
     }
 
     return null;
@@ -269,9 +322,11 @@
   public Object getParent(Object element) {
     if (element instanceof HadoopServer) {
       return CONTENT_ROOT;
+
     } else if (element instanceof HadoopJob) {
-      return ((HadoopJob) element).getServer();
+      return ((HadoopJob) element).getLocation();
     }
+
     return null;
   }
 
@@ -281,6 +336,10 @@
     return (element instanceof HadoopServer);
   }
 
+  /*
+   * ITableLabelProvider implementation
+   */
+
   /* @inheritDoc */
   public void addListener(ILabelProviderListener listener) {
     // no listeners handling
@@ -298,9 +357,10 @@
   /* @inheritDoc */
   public Image getColumnImage(Object element, int columnIndex) {
     if ((columnIndex == 0) && (element instanceof HadoopServer)) {
-      return images.get("hadoop");
+      return ImageLibrary.getImage("server.view.location.entry");
+
     } else if ((columnIndex == 0) && (element instanceof HadoopJob)) {
-      return images.get("job");
+      return ImageLibrary.getImage("server.view.job.entry");
     }
     return null;
   }
@@ -312,9 +372,9 @@
 
       switch (columnIndex) {
         case 0:
-          return server.getName();
+          return server.getLocationName();
         case 1:
-          return server.getHostName().toString();
+          return server.getMasterHostName().toString();
         case 2:
           return server.getState();
         case 3:
@@ -325,11 +385,11 @@
 
       switch (columnIndex) {
         case 0:
-          return job.getId();
+          return job.getJobId();
         case 1:
           return "";
         case 2:
-          return job.getState();
+          return job.getState().toString();
         case 3:
           return job.getStatus();
       }
@@ -349,21 +409,38 @@
     return null;
   }
 
+  /*
+   * IJobListener (Map/Reduce Jobs listener) implementation
+   */
+
+  /* @inheritDoc */
   public void jobAdded(HadoopJob job) {
     viewer.refresh();
   }
 
+  /* @inheritDoc */
+  public void jobRemoved(HadoopJob job) {
+    viewer.refresh();
+  }
+
+  /* @inheritDoc */
   public void jobChanged(HadoopJob job) {
     viewer.refresh(job);
   }
 
+  /* @inheritDoc */
   public void publishDone(JarModule jar) {
     viewer.refresh();
   }
 
+  /* @inheritDoc */
   public void publishStart(JarModule jar) {
     viewer.refresh();
   }
+
+  /*
+   * Miscellaneous
+   */
 
   /**
    * Return the currently selected server (null if there is no selection or