You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hdt.apache.org by rs...@apache.org on 2014/06/26 10:36:38 UTC

[15/27] git commit: HDT 7: - fixing bug : Cluster view loads onlly Hadoop 1.1 based cluster

 HDT 7:
  - fixing bug : Cluster view loads onlly Hadoop 1.1  based cluster


Project: http://git-wip-us.apache.org/repos/asf/incubator-hdt/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hdt/commit/9d78cbdb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hdt/tree/9d78cbdb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hdt/diff/9d78cbdb

Branch: refs/heads/hadoop-eclipse-merge
Commit: 9d78cbdb45294608954cf82863f869ab48104095
Parents: 34799ce
Author: Rahul Sharma <rs...@apache.org>
Authored: Wed Jun 11 10:54:11 2014 +0530
Committer: Rahul Sharma <rs...@apache.org>
Committed: Wed Jun 11 10:54:11 2014 +0530

----------------------------------------------------------------------
 .../hdt/core/launch/AbstractHadoopCluster.java  | 69 +++++++++++++++-
 .../hdt/hadoop/release/HadoopCluster.java       | 70 ++--------------
 .../hdt/hadoop2/release/HadoopCluster.java      | 84 +++-----------------
 .../internal/launch/HadoopLocationWizard.java   |  3 +
 4 files changed, 85 insertions(+), 141 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/9d78cbdb/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java b/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java
index 47d00f4..e2fa064 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java
@@ -21,16 +21,27 @@ package org.apache.hdt.core.launch;
 import java.io.File;
 import java.io.IOException;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.Iterator;
+import java.util.Map;
 import java.util.Map.Entry;
 
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+
 import org.apache.hdt.core.Activator;
-import org.apache.hdt.core.internal.HadoopManager;
 import org.apache.log4j.Logger;
 import org.eclipse.core.runtime.CoreException;
 import org.eclipse.core.runtime.IConfigurationElement;
 import org.eclipse.core.runtime.Platform;
 import org.eclipse.core.runtime.Status;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.w3c.dom.Text;
+import org.xml.sax.SAXException;
 
 public abstract class AbstractHadoopCluster {
 	
@@ -68,15 +79,17 @@ public abstract class AbstractHadoopCluster {
 
 	abstract public String getState();
 
-	abstract public boolean loadFromXML(File file) throws IOException;
+	abstract protected boolean loadConfiguration(Map<String, String> configuration);
 	
 	abstract public boolean isAvailable() throws CoreException;
 	
 	abstract public String getVersion();
 	
 	public static AbstractHadoopCluster createCluster(File file) throws CoreException, IOException {
-		AbstractHadoopCluster hadoopCluster = createCluster(ConfProp.PI_HADOOP_VERSION.defVal);
-		hadoopCluster.loadFromXML(file);
+		Map<String, String> configuration = loadXML(file);
+		String version = configuration.get(ConfProp.PI_HADOOP_VERSION.name);
+		AbstractHadoopCluster hadoopCluster = createCluster(version!=null?version:ConfProp.PI_HADOOP_VERSION.defVal);
+		hadoopCluster.loadConfiguration(configuration);
 		return hadoopCluster;
 	}
 
@@ -97,6 +110,54 @@ public abstract class AbstractHadoopCluster {
 		hadoopCluster.load(existing);
 		return hadoopCluster;
 	}
+	
+	
+	protected static Map<String,String> loadXML(File file) {
+		DocumentBuilder builder;
+		Document document;
+		try {
+			builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
+			document = builder.parse(file);
+		} catch (ParserConfigurationException e) {
+			e.printStackTrace();
+			return null;
+		} catch (SAXException e) {
+			e.printStackTrace();
+			return null;
+		} catch (IOException e) {
+			e.printStackTrace();
+			return null;
+		}
+		Element root = document.getDocumentElement();
+		if (!"configuration".equals(root.getTagName()))
+			return null;
+		NodeList props = root.getChildNodes();
+		Map<String,String> configuration= new HashMap<String, String>();
+		for (int i = 0; i < props.getLength(); i++) {
+			Node propNode = props.item(i);
+			if (!(propNode instanceof Element))
+				continue;
+			Element prop = (Element) propNode;
+			if (!"property".equals(prop.getTagName()))
+				return null;
+			NodeList fields = prop.getChildNodes();
+			String attr = null;
+			String value = null;
+			for (int j = 0; j < fields.getLength(); j++) {
+				Node fieldNode = fields.item(j);
+				if (!(fieldNode instanceof Element))
+					continue;
+				Element field = (Element) fieldNode;
+				if ("name".equals(field.getTagName()))
+					attr = ((Text) field.getFirstChild()).getData();
+				if ("value".equals(field.getTagName()) && field.hasChildNodes())
+					value = ((Text) field.getFirstChild()).getData();
+			}
+			if (attr != null && value != null)
+				configuration.put(attr, value);
+		}
+		return configuration;
+	}
 
 	/**
 	 * @param propName

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/9d78cbdb/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java b/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java
index 0014bb6..167ae29 100644
--- a/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java
+++ b/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java
@@ -248,18 +248,6 @@ public class HadoopCluster extends AbstractHadoopCluster {
 	}
 
 	/**
-	 * Creates a location from a file
-	 * 
-	 * @throws IOException
-	 * @throws SAXException
-	 * @throws ParserConfigurationException
-	 */
-	public HadoopCluster(File file) throws ParserConfigurationException, SAXException, IOException {
-		this();
-		this.loadFromXML(file);
-	}
-
-	/**
 	 * Create a new Hadoop location by copying an already existing one.
 	 * 
 	 * @param source
@@ -369,61 +357,13 @@ public class HadoopCluster extends AbstractHadoopCluster {
 		this.conf = new Configuration(((HadoopCluster) existing).conf);
 	}
 
-	/**
-	 * Overwrite this location with settings available in the given XML file.
-	 * The existing configuration is preserved if the XML file is invalid.
-	 * 
-	 * @param file
-	 *            the file path of the XML file
-	 * @return validity of the XML file
-	 * @throws ParserConfigurationException
-	 * @throws IOException
-	 * @throws SAXException
-	 */
-	public boolean loadFromXML(File file) {
-
+	protected boolean loadConfiguration(Map<String, String> configuration) {
 		Configuration newConf = new Configuration(this.conf);
-		DocumentBuilder builder;
-		Document document;
-		try {
-			builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
-			document = builder.parse(file);
-		} catch (ParserConfigurationException e) {
-			e.printStackTrace();
-			return false;
-		} catch (SAXException e) {
-			e.printStackTrace();
+		if(configuration ==null)
 			return false;
-		} catch (IOException e) {
-			e.printStackTrace();
-			return false;
-		}
-		Element root = document.getDocumentElement();
-		if (!"configuration".equals(root.getTagName()))
-			return false;
-		NodeList props = root.getChildNodes();
-		for (int i = 0; i < props.getLength(); i++) {
-			Node propNode = props.item(i);
-			if (!(propNode instanceof Element))
-				continue;
-			Element prop = (Element) propNode;
-			if (!"property".equals(prop.getTagName()))
-				return false;
-			NodeList fields = prop.getChildNodes();
-			String attr = null;
-			String value = null;
-			for (int j = 0; j < fields.getLength(); j++) {
-				Node fieldNode = fields.item(j);
-				if (!(fieldNode instanceof Element))
-					continue;
-				Element field = (Element) fieldNode;
-				if ("name".equals(field.getTagName()))
-					attr = ((Text) field.getFirstChild()).getData();
-				if ("value".equals(field.getTagName()) && field.hasChildNodes())
-					value = ((Text) field.getFirstChild()).getData();
-			}
-			if (attr != null && value != null)
-				newConf.set(attr, value);
+		
+		for (Entry<String, String> entry : configuration.entrySet()) {
+			newConf.set(entry.getKey() , entry.getValue());
 		}
 
 		this.conf = newConf;

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/9d78cbdb/org.apache.hdt.hadoop2.release/src/org/apache/hdt/hadoop2/release/HadoopCluster.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop2.release/src/org/apache/hdt/hadoop2/release/HadoopCluster.java b/org.apache.hdt.hadoop2.release/src/org/apache/hdt/hadoop2/release/HadoopCluster.java
index b200a9f..56f1880 100644
--- a/org.apache.hdt.hadoop2.release/src/org/apache/hdt/hadoop2/release/HadoopCluster.java
+++ b/org.apache.hdt.hadoop2.release/src/org/apache/hdt/hadoop2/release/HadoopCluster.java
@@ -248,18 +248,6 @@ public class HadoopCluster extends AbstractHadoopCluster {
         }
    
 	/**
-	 * Creates a location from a file
-	 * 
-	 * @throws IOException
-	 * @throws SAXException
-	 * @throws ParserConfigurationException
-	 */
-	public HadoopCluster(File file) throws ParserConfigurationException, SAXException, IOException {
-		this();
-		this.loadFromXML(file);
-	}
-
-	/**
 	 * Create a new Hadoop location by copying an already existing one.
 	 * 
 	 * @param source
@@ -370,66 +358,18 @@ public class HadoopCluster extends AbstractHadoopCluster {
 		this.conf = new Configuration(((HadoopCluster) existing).conf);
 	}
 
-	/**
-	 * Overwrite this location with settings available in the given XML file.
-	 * The existing configuration is preserved if the XML file is invalid.
-	 * 
-	 * @param file
-	 *            the file path of the XML file
-	 * @return validity of the XML file
-	 * @throws ParserConfigurationException
-	 * @throws IOException
-	 * @throws SAXException
-	 */
-	public boolean loadFromXML(File file) {
-
-		Configuration newConf = new Configuration(this.conf);
-		DocumentBuilder builder;
-		Document document;
-		try {
-			builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
-			document = builder.parse(file);
-		} catch (ParserConfigurationException e) {
-			e.printStackTrace();
-			return false;
-		} catch (SAXException e) {
-			e.printStackTrace();
-			return false;
-		} catch (IOException e) {
-			e.printStackTrace();
-			return false;
-		}
-		Element root = document.getDocumentElement();
-		if (!"configuration".equals(root.getTagName()))
-			return false;
-		NodeList props = root.getChildNodes();
-		for (int i = 0; i < props.getLength(); i++) {
-			Node propNode = props.item(i);
-			if (!(propNode instanceof Element))
-				continue;
-			Element prop = (Element) propNode;
-			if (!"property".equals(prop.getTagName()))
-				return false;
-			NodeList fields = prop.getChildNodes();
-			String attr = null;
-			String value = null;
-			for (int j = 0; j < fields.getLength(); j++) {
-				Node fieldNode = fields.item(j);
-				if (!(fieldNode instanceof Element))
-					continue;
-				Element field = (Element) fieldNode;
-				if ("name".equals(field.getTagName()))
-					attr = ((Text) field.getFirstChild()).getData();
-				if ("value".equals(field.getTagName()) && field.hasChildNodes())
-					value = ((Text) field.getFirstChild()).getData();
-			}
-			if (attr != null && value != null)
-				newConf.set(attr, value);
-		}
-
-		this.conf = newConf;
-		return true;
-	}
+        protected boolean loadConfiguration(Map<String, String> configuration) {
+            Configuration newConf = new Configuration(this.conf);
+            if (configuration == null)
+                return false;
+            for (Entry<String, String> entry : configuration.entrySet()) {
+                newConf.set(entry.getKey(), entry.getValue());
+            }
+    
+    
+            this.conf = newConf;
+            return true;
+        }
 
 	/**
 	 * Sets a Hadoop configuration property value

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/9d78cbdb/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/HadoopLocationWizard.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/HadoopLocationWizard.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/HadoopLocationWizard.java
index 7f3cbfb..bcf5944 100644
--- a/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/HadoopLocationWizard.java
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/launch/HadoopLocationWizard.java
@@ -684,6 +684,8 @@ public class HadoopLocationWizard extends WizardPage {
 								public void run() {
 									try {
 										location = AbstractHadoopCluster.createCluster(selection);
+										location.setConfPropValue(ConfProp.PI_HADOOP_VERSION, selection);
+										location.setConfPropValue(ConfProp.PI_LOCATION_NAME, "");
 										for (TabListener tab : mediator.tabs) {
 											tab.reloadData();
 										}
@@ -854,6 +856,7 @@ public class HadoopLocationWizard extends WizardPage {
 			notifyChange(ConfProp.PI_SOCKS_PROXY_ENABLE,location.getConfPropValue(ConfProp.PI_SOCKS_PROXY_ENABLE));
 			notifyChange(ConfProp.PI_SOCKS_PROXY_HOST,location.getConfPropValue(ConfProp.PI_SOCKS_PROXY_HOST));
 			notifyChange(ConfProp.PI_SOCKS_PROXY_PORT,location.getConfPropValue(ConfProp.PI_SOCKS_PROXY_PORT));
+			notifyChange(ConfProp.PI_LOCATION_NAME,location.getConfPropValue(ConfProp.PI_LOCATION_NAME));
 		}
 
 		public void notifyChange(ConfProp prop, String propValue) {