You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hdt.apache.org by rs...@apache.org on 2014/06/23 10:43:33 UTC

[1/2] git commit: HDT-13: Validaing Hadoop preference home for the selected version.

Repository: incubator-hdt
Updated Branches:
  refs/heads/hadoop-eclipse-merge-development b6634e66d -> bbc139af5


 HDT-13: Validaing Hadoop preference home for the selected version.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hdt/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hdt/commit/d63110a4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hdt/tree/d63110a4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hdt/diff/d63110a4

Branch: refs/heads/hadoop-eclipse-merge-development
Commit: d63110a4ec9bb5debae25f127bb8085fb0ff48df
Parents: b6634e6
Author: Rahul Sharma <rs...@apache.org>
Authored: Fri Jun 20 16:34:10 2014 +0530
Committer: Rahul Sharma <rs...@apache.org>
Committed: Mon Jun 23 10:03:10 2014 +0530

----------------------------------------------------------------------
 .../ui/preferences/MapReducePreferencePage.java | 119 ++++++++++++++-----
 1 file changed, 87 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/d63110a4/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/MapReducePreferencePage.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/MapReducePreferencePage.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/MapReducePreferencePage.java
index b711f91..240fc64 100644
--- a/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/MapReducePreferencePage.java
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/MapReducePreferencePage.java
@@ -17,19 +17,25 @@
  */
 package org.apache.hdt.ui.preferences;
 
+import org.apache.hdt.core.AbstractHadoopHomeReader;
 import org.apache.hdt.core.HadoopVersion;
 import org.apache.hdt.ui.Activator;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.Path;
 import org.eclipse.jface.preference.ComboFieldEditor;
 import org.eclipse.jface.preference.DirectoryFieldEditor;
+import org.eclipse.jface.preference.FieldEditor;
 import org.eclipse.jface.preference.FieldEditorPreferencePage;
+import org.eclipse.jface.preference.StringFieldEditor;
+import org.eclipse.jface.util.PropertyChangeEvent;
 import org.eclipse.ui.IWorkbench;
 import org.eclipse.ui.IWorkbenchPreferencePage;
 
 /**
  * This class represents a preference page that is contributed to the
- * Preferences dialog. By sub-classing <tt>FieldEditorPreferencePage</tt>,
- * we can use the field support built into JFace that allows us to create a
- * page that is small and knows how to save, restore and apply itself.
+ * Preferences dialog. By sub-classing <tt>FieldEditorPreferencePage</tt>, we
+ * can use the field support built into JFace that allows us to create a page
+ * that is small and knows how to save, restore and apply itself.
  * 
  * <p>
  * This page is used to modify preferences only. They are stored in the
@@ -37,39 +43,88 @@ import org.eclipse.ui.IWorkbenchPreferencePage;
  * preferences can be accessed directly via the preference store.
  */
 
-public class MapReducePreferencePage extends FieldEditorPreferencePage
-    implements IWorkbenchPreferencePage {
+public class MapReducePreferencePage extends FieldEditorPreferencePage implements IWorkbenchPreferencePage {
 
-  public MapReducePreferencePage() {
-    super(GRID);
-    setPreferenceStore(Activator.getDefault().getPreferenceStore());
-    setTitle("Hadoop Map/Reduce Tools");
-    // setDescription("Hadoop Map/Reduce Preferences");
-  }
+	private StringFieldEditor hadoopHomeDirEditor;
+	private ComboFieldEditor hadoopVersionEditor;
+	private String hadoopVersionValue;
+	private String hadoopHomeValue;
 
-  /**
-   * Creates the field editors. Field editors are abstractions of the common
-   * GUI blocks needed to manipulate various types of preferences. Each field
-   * editor knows how to save and restore itself.
-   */
-  @Override
-  public void createFieldEditors() {
-    addField(new DirectoryFieldEditor(PreferenceConstants.P_PATH,
-        "&Hadoop installation directory:", getFieldEditorParent()));
-    HadoopVersion[] versions = HadoopVersion.values();
-    String[][] values= new String[versions.length][2];
-    int pos=0;
-	for(HadoopVersion ver:versions){
-		values[pos][0]=values[pos][1]=ver.getDisplayName();
-		pos++;
+	public MapReducePreferencePage() {
+		super(GRID);
+		setPreferenceStore(Activator.getDefault().getPreferenceStore());
+		setTitle("Hadoop Map/Reduce Tools");
+		// setDescription("Hadoop Map/Reduce Preferences");
 	}
-    addField(new ComboFieldEditor(PreferenceConstants.P_VERSION,
-            "&Hadoop Version:",values,getFieldEditorParent()));
 
-  }
+	/**
+	 * Creates the field editors. Field editors are abstractions of the common
+	 * GUI blocks needed to manipulate various types of preferences. Each field
+	 * editor knows how to save and restore itself.
+	 */
+	@Override
+	public void createFieldEditors() {
+		DirectoryFieldEditor editor = new DirectoryFieldEditor(PreferenceConstants.P_PATH, "&Hadoop installation directory:", getFieldEditorParent());
+		addField(editor);
+		HadoopVersion[] versions = HadoopVersion.values();
+		String[][] values = new String[versions.length][2];
+		int pos = 0;
+		for (HadoopVersion ver : versions) {
+			values[pos][0] = values[pos][1] = ver.getDisplayName();
+			pos++;
+		}
+		ComboFieldEditor options = new ComboFieldEditor(PreferenceConstants.P_VERSION, "&Hadoop Version:", values, getFieldEditorParent());
+		addField(options);
+		hadoopVersionEditor = options;
+		hadoopHomeDirEditor = editor;
+		hadoopVersionValue = HadoopVersion.Version1.getDisplayName();
+	}
+
+	public void propertyChange(PropertyChangeEvent event) {
+		super.propertyChange(event);
+		if (event.getSource().equals(hadoopVersionEditor)) {
+			hadoopVersionValue = event.getNewValue().toString();
+		}
+		if (event.getSource().equals(hadoopHomeDirEditor)) {
+			hadoopHomeValue = event.getNewValue().toString();
+		}
+		if (event.getProperty().equals(FieldEditor.VALUE)) {
+			checkState();
+		}
+	}
 
-  /* @inheritDoc */
-  public void init(IWorkbench workbench) {
-  }
+	@Override
+	protected void checkState() {
+		super.checkState();
+		if(hadoopHomeValue==null || hadoopVersionValue==null){
+			setErrorMessage("Please set Hadoop Home/Version.");
+			setValid(false);
+			return;
+		}
+		AbstractHadoopHomeReader homeReader;
+		try {
+			homeReader = AbstractHadoopHomeReader.createReader(hadoopVersionValue);
+			if (!homeReader.validateHadoopHome(new Path(hadoopHomeValue).toFile())) {
+				setErrorMessage("Invalid Hadoop Home.");
+				setValid(false);
+			} else {
+				setErrorMessage(null);
+				setValid(true);
+			}
+		} catch (CoreException e) {
+			e.printStackTrace();
+		}
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * 
+	 * @see
+	 * org.eclipse.ui.IWorkbenchPreferencePage#init(org.eclipse.ui.IWorkbench)
+	 */
+	@Override
+	public void init(IWorkbench workbench) {
+		// TODO Auto-generated method stub
 
+	}
 }


[2/2] git commit: - Fixing cleanup on destory server

Posted by rs...@apache.org.
- Fixing cleanup on destory server


Project: http://git-wip-us.apache.org/repos/asf/incubator-hdt/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hdt/commit/bbc139af
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hdt/tree/bbc139af
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hdt/diff/bbc139af

Branch: refs/heads/hadoop-eclipse-merge-development
Commit: bbc139af52c9d43908c4b2f99c02686524b9fd66
Parents: d63110a
Author: Rahul Sharma <rs...@apache.org>
Authored: Mon Jun 23 13:51:41 2014 +0530
Committer: Rahul Sharma <rs...@apache.org>
Committed: Mon Jun 23 13:51:41 2014 +0530

----------------------------------------------------------------------
 .../org/apache/hdt/core/internal/hdfs/HDFSManager.java   | 11 +++++++++++
 1 file changed, 11 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/bbc139af/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSManager.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSManager.java b/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSManager.java
index 43ebf1f..8d27d23 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSManager.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSManager.java
@@ -207,6 +207,7 @@ public class HDFSManager {
 									"HDFS Error", "Unable to create HDFS site :"+e.getMessage());
 						}
 					});
+					deleteServer(getServer(hdfsURI.toString()));
 					return e.getStatus();
 				} finally {
 					monitor.done();
@@ -280,7 +281,17 @@ public class HDFSManager {
 		String projectName = this.serverToProjectMap.remove(server);
 		this.projectToServerMap.remove(projectName);
 		this.uriToServerMap.remove(server.getUri());
+		this.uriToServerCacheMap.remove(server.getUri());
 		HadoopManager.INSTANCE.saveServers();
+		String tmpUri = server.getUri();
+		while (tmpUri != null && uriToServerCacheMap.containsKey(tmpUri)) {
+			uriToServerCacheMap.remove(tmpUri);
+			int lastSlashIndex = tmpUri.lastIndexOf('/');
+			tmpUri = lastSlashIndex < 0 ? null : tmpUri.substring(0, lastSlashIndex);
+		}
+		if(hdfsClientsMap.containsKey(server.getUri().toString())){
+			hdfsClientsMap.remove(server.getUri().toString());
+		}
 	}
 
 	/**