You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hdt.apache.org by rs...@apache.org on 2014/06/20 10:37:45 UTC
[6/6] git commit: HDT-61 : - Extrating hadoop home validation to
plugin - using hadoop home validator in NewProjectWizard and MapReduceNature
- setting version as part of prefrence
HDT-61 :
- Extrating hadoop home validation to plugin
- using hadoop home validator in NewProjectWizard and MapReduceNature
- setting version as part of prefrence
Project: http://git-wip-us.apache.org/repos/asf/incubator-hdt/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hdt/commit/bf1a4949
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hdt/tree/bf1a4949
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hdt/diff/bf1a4949
Branch: refs/heads/hadoop-eclipse-merge-development
Commit: bf1a4949564d7f78556d941dde1ba971fba02204
Parents: c308e97
Author: Rahul Sharma <rs...@apache.org>
Authored: Mon Jun 16 14:50:05 2014 +0530
Committer: Rahul Sharma <rs...@apache.org>
Committed: Tue Jun 17 10:11:07 2014 +0530
----------------------------------------------------------------------
org.apache.hdt.core/plugin.xml | 1 +
....apache.hadoop.eclipse.hadoopHomeReader.exsd | 126 +++++++++++++++++++
.../hdt/core/AbstractHadoopHomeReader.java | 46 +++++++
.../hdt/core/natures/MapReduceNature.java | 28 +----
org.apache.hdt.hadoop.release/plugin.xml | 8 ++
.../hdt/hadoop/release/HadoopHomeReader.java | 77 ++++++++++++
org.apache.hdt.hadoop2.release/plugin.xml | 7 ++
.../hdt/hadoop2/release/HadoopHomeReader.java | 101 +++++++++++++++
.../internal/mr/NewMapReduceProjectWizard.java | 82 ++++++++++--
.../ui/preferences/MapReducePreferencePage.java | 11 ++
.../hdt/ui/preferences/PreferenceConstants.java | 2 +
11 files changed, 454 insertions(+), 35 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/bf1a4949/org.apache.hdt.core/plugin.xml
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/plugin.xml b/org.apache.hdt.core/plugin.xml
index 94f3d49..86ca57b 100644
--- a/org.apache.hdt.core/plugin.xml
+++ b/org.apache.hdt.core/plugin.xml
@@ -20,6 +20,7 @@
<extension-point id="org.apache.hdt.core.hdfsClient" name="Apache Hadoop HDFS Client" schema="schema/org.apache.hadoop.eclipse.hdfsclient.exsd"/>
<extension-point id="org.apache.hdt.core.zookeeperClient" name="Apache Hadoop ZooKeeper Client" schema="schema/org.apache.hadoop.eclipse.zookeeperClient.exsd"/>
<extension-point id="org.apache.hdt.core.hadoopCluster" name="Apache Hadoop Cluster" schema="schema/org.apache.hadoop.eclipse.hadoopCluster.exsd"/>
+ <extension-point id="org.apache.hdt.core.hadoopHomeReader" name="Apache Hadoop Home Location Reader" schema="schema/org.apache.hadoop.eclipse.hadoopHomeReader.exsd"/>
<extension
id="org.apache.hadoop.hdfs.filesystem"
http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/bf1a4949/org.apache.hdt.core/schema/org.apache.hadoop.eclipse.hadoopHomeReader.exsd
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/schema/org.apache.hadoop.eclipse.hadoopHomeReader.exsd b/org.apache.hdt.core/schema/org.apache.hadoop.eclipse.hadoopHomeReader.exsd
new file mode 100644
index 0000000..bfd8941
--- /dev/null
+++ b/org.apache.hdt.core/schema/org.apache.hadoop.eclipse.hadoopHomeReader.exsd
@@ -0,0 +1,126 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<schema targetNamespace="org.apache.hdt.core" xmlns="http://www.w3.org/2001/XMLSchema">
+<annotation>
+ <appinfo>
+ <meta.schema plugin="org.apache.hdt.core" id="org.apache.hdt.core.hadoopHomeReader" name="Apache Hadoop Home Location Reader"/>
+ </appinfo>
+ <documentation>
+ [Enter description of this extension point.]
+ </documentation>
+ </annotation>
+
+ <element name="extension">
+ <annotation>
+ <appinfo>
+ <meta.element />
+ </appinfo>
+ </annotation>
+ <complexType>
+ <choice>
+ <sequence>
+ <element ref="hadoopHomeReader" minOccurs="0" maxOccurs="unbounded"/>
+ </sequence>
+ </choice>
+ <attribute name="point" type="string" use="required">
+ <annotation>
+ <documentation>
+
+ </documentation>
+ </annotation>
+ </attribute>
+ <attribute name="id" type="string">
+ <annotation>
+ <documentation>
+
+ </documentation>
+ </annotation>
+ </attribute>
+ <attribute name="name" type="string">
+ <annotation>
+ <documentation>
+
+ </documentation>
+ <appinfo>
+ <meta.attribute translatable="true"/>
+ </appinfo>
+ </annotation>
+ </attribute>
+ </complexType>
+ </element>
+
+ <element name="hadoopHomeReader">
+ <complexType>
+ <attribute name="class" type="string" use="required">
+ <annotation>
+ <documentation>
+
+ </documentation>
+ <appinfo>
+ <meta.attribute kind="java" basedOn="org.apache.hdt.core.AbstractHadoopHomeReader:"/>
+ </appinfo>
+ </annotation>
+ </attribute>
+ <attribute name="protocolVersion" type="string" use="required">
+ <annotation>
+ <documentation>
+
+ </documentation>
+ </annotation>
+ </attribute>
+ </complexType>
+ </element>
+
+ <annotation>
+ <appinfo>
+ <meta.section type="since"/>
+ </appinfo>
+ <documentation>
+ [Enter the first release in which this extension point appears.]
+ </documentation>
+ </annotation>
+
+ <annotation>
+ <appinfo>
+ <meta.section type="examples"/>
+ </appinfo>
+ <documentation>
+ [Enter extension point usage example here.]
+ </documentation>
+ </annotation>
+
+ <annotation>
+ <appinfo>
+ <meta.section type="apiinfo"/>
+ </appinfo>
+ <documentation>
+ [Enter API information here.]
+ </documentation>
+ </annotation>
+
+ <annotation>
+ <appinfo>
+ <meta.section type="implementation"/>
+ </appinfo>
+ <documentation>
+ [Enter information about supplied implementation of this extension point.]
+ </documentation>
+ </annotation>
+
+
+</schema>
http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/bf1a4949/org.apache.hdt.core/src/org/apache/hdt/core/AbstractHadoopHomeReader.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/AbstractHadoopHomeReader.java b/org.apache.hdt.core/src/org/apache/hdt/core/AbstractHadoopHomeReader.java
new file mode 100644
index 0000000..aa61296
--- /dev/null
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/AbstractHadoopHomeReader.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.core;
+
+import java.io.File;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IConfigurationElement;
+import org.eclipse.core.runtime.Platform;
+import org.eclipse.core.runtime.Status;
+
+public abstract class AbstractHadoopHomeReader {
+ private static final Logger logger = Logger.getLogger(AbstractHadoopHomeReader.class);
+ public abstract boolean validateHadoopHome(File location);
+ public abstract List<File> getHadoopJars(File location);
+
+ public static AbstractHadoopHomeReader createReader(String hadoopVersion) throws CoreException {
+ logger.debug("Creating hadoop home reader");
+ IConfigurationElement[] elementsFor = Platform.getExtensionRegistry().getConfigurationElementsFor("org.apache.hdt.core.hadoopHomeReader");
+ for (IConfigurationElement configElement : elementsFor) {
+ String version = configElement.getAttribute("protocolVersion");
+ if (version.equalsIgnoreCase(hadoopVersion)) {
+ return (AbstractHadoopHomeReader)configElement.createExecutableExtension("class");
+ }
+ }
+ throw new CoreException(new Status(Status.ERROR,Activator.BUNDLE_ID,"No Reader found for hadoop version"+hadoopVersion));
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/bf1a4949/org.apache.hdt.core/src/org/apache/hdt/core/natures/MapReduceNature.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/natures/MapReduceNature.java b/org.apache.hdt.core/src/org/apache/hdt/core/natures/MapReduceNature.java
index e93ee9a..d350def 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/natures/MapReduceNature.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/natures/MapReduceNature.java
@@ -19,13 +19,13 @@
package org.apache.hdt.core.natures;
import java.io.File;
-import java.io.FilenameFilter;
import java.net.URL;
-import java.util.ArrayList;
import java.util.Iterator;
+import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.apache.hdt.core.AbstractHadoopHomeReader;
import org.apache.hdt.core.Activator;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IProjectNature;
@@ -60,12 +60,10 @@ public class MapReduceNature implements IProjectNature {
public void configure() throws CoreException {
String hadoopHomePath = project.getPersistentProperty(new QualifiedName(Activator.BUNDLE_ID, "hadoop.runtime.path"));
- File hadoopHome = new Path(hadoopHomePath).toFile();
- File hadoopLib = new File(hadoopHome, "lib");
-
- final ArrayList<File> coreJars = new ArrayList<File>();
- coreJars.addAll(getJarFiles(hadoopHome));
- coreJars.addAll(getJarFiles(hadoopLib));
+ String hadoopVersion = project.getPersistentProperty(new QualifiedName(Activator.BUNDLE_ID, "hadoop.version"));
+
+ AbstractHadoopHomeReader homeReader = AbstractHadoopHomeReader.createReader(hadoopVersion);
+ final List<File> coreJars = homeReader.getHadoopJars(new Path(hadoopHomePath).toFile());
// Add Hadoop libraries onto classpath
IJavaProject javaProject = JavaCore.create(getProject());
@@ -96,20 +94,6 @@ public class MapReduceNature implements IProjectNature {
}
}
- private ArrayList<File> getJarFiles(File hadoopHome) {
- FilenameFilter jarFileFilter = new FilenameFilter() {
- @Override
- public boolean accept(File dir, String name) {
- return name.endsWith(".jar");
- }
- };
- final ArrayList<File> jars = new ArrayList<File>();
- for (String hadopCoreLibFileName : hadoopHome.list(jarFileFilter)) {
- jars.add(new File(hadoopHome, hadopCoreLibFileName));
- }
- return jars;
- }
-
/**
* Deconfigure a project from MapReduce status. Currently unimplemented.
*/
http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/bf1a4949/org.apache.hdt.hadoop.release/plugin.xml
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop.release/plugin.xml b/org.apache.hdt.hadoop.release/plugin.xml
index 476bdcd..62cb794 100644
--- a/org.apache.hdt.hadoop.release/plugin.xml
+++ b/org.apache.hdt.hadoop.release/plugin.xml
@@ -39,5 +39,13 @@
protocolVersion="1.1">
</hadoopCluster>
</extension>
+ <extension
+ point="org.apache.hdt.core.hadoopHomeReader">
+ <hadoopHomeReader
+ class="org.apache.hdt.hadoop.release.HadoopHomeReader"
+ protocolVersion="1.1">
+ </hadoopHomeReader>
+ </extension>
+
</plugin>
http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/bf1a4949/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopHomeReader.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopHomeReader.java b/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopHomeReader.java
new file mode 100644
index 0000000..ef0952d
--- /dev/null
+++ b/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopHomeReader.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hdt.hadoop.release;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hdt.core.AbstractHadoopHomeReader;
+import org.eclipse.core.runtime.Path;
+
+public class HadoopHomeReader extends AbstractHadoopHomeReader {
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ * org.apache.hdt.core.AbstractHadoopHomeReader#validateHadoopHome(java.
+ * io.File)
+ */
+ @Override
+ public boolean validateHadoopHome(File location) {
+ FilenameFilter gotHadoopJar = new FilenameFilter() {
+ public boolean accept(File dir, String name) {
+ return (name.startsWith("hadoop") && name.endsWith(".jar") && (name.indexOf("test") == -1) && (name.indexOf("examples") == -1));
+ }
+ };
+ return location.exists() && (location.list(gotHadoopJar).length > 0);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ * org.apache.hdt.core.AbstractHadoopHomeReader#getHadoopJars(java.io.File)
+ */
+ @Override
+ public List<File> getHadoopJars(File hadoopHome) {
+ File hadoopLib = new File(hadoopHome, "lib");
+
+ final ArrayList<File> coreJars = new ArrayList<File>();
+ coreJars.addAll(getJarFiles(hadoopHome));
+ coreJars.addAll(getJarFiles(hadoopLib));
+ return coreJars;
+ }
+
+ private ArrayList<File> getJarFiles(File hadoopHome) {
+ FilenameFilter jarFileFilter = new FilenameFilter() {
+ @Override
+ public boolean accept(File dir, String name) {
+ return name.endsWith(".jar");
+ }
+ };
+ final ArrayList<File> jars = new ArrayList<File>();
+ for (String hadopCoreLibFileName : hadoopHome.list(jarFileFilter)) {
+ jars.add(new File(hadoopHome, hadopCoreLibFileName));
+ }
+ return jars;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/bf1a4949/org.apache.hdt.hadoop2.release/plugin.xml
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop2.release/plugin.xml b/org.apache.hdt.hadoop2.release/plugin.xml
index b200aca..2b14915 100644
--- a/org.apache.hdt.hadoop2.release/plugin.xml
+++ b/org.apache.hdt.hadoop2.release/plugin.xml
@@ -32,4 +32,11 @@
protocolVersion="2.2">
</hdfsClient>
</extension>
+ <extension
+ point="org.apache.hdt.core.hadoopHomeReader">
+ <hadoopHomeReader
+ class="org.apache.hdt.hadoop2.release.HadoopHomeReader"
+ protocolVersion="2.2">
+ </hadoopHomeReader>
+ </extension>
</plugin>
http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/bf1a4949/org.apache.hdt.hadoop2.release/src/org/apache/hdt/hadoop2/release/HadoopHomeReader.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop2.release/src/org/apache/hdt/hadoop2/release/HadoopHomeReader.java b/org.apache.hdt.hadoop2.release/src/org/apache/hdt/hadoop2/release/HadoopHomeReader.java
new file mode 100644
index 0000000..a45086c
--- /dev/null
+++ b/org.apache.hdt.hadoop2.release/src/org/apache/hdt/hadoop2/release/HadoopHomeReader.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hdt.hadoop2.release;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hdt.core.AbstractHadoopHomeReader;
+
+public class HadoopHomeReader extends AbstractHadoopHomeReader {
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ * org.apache.hdt.core.AbstractHadoopHomeReader#validateHadoopHome(java.
+ * io.File)
+ */
+ @Override
+ public boolean validateHadoopHome(File location) {
+ File hadoopBin = new File(location, "bin");
+ File hadoopSBIn = new File(location, "sbin");
+ FilenameFilter gotHadoopYarn = new FilenameFilter() {
+ public boolean accept(File dir, String name) {
+ return (name.indexOf("yarn") != -1);
+ }
+ };
+ return hadoopBin.exists() && (hadoopBin.list(gotHadoopYarn).length > 0)
+ && hadoopSBIn.exists() && (hadoopSBIn.list(gotHadoopYarn).length > 0);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ * org.apache.hdt.core.AbstractHadoopHomeReader#getHadoopJars(java.io.File)
+ */
+ @Override
+ public List<File> getHadoopJars(File hadoopHome) {
+ File mrCommonHome = FileUtils.getFile(hadoopHome, "share","hadoop","common");
+ File mrCommonLib = FileUtils.getFile(mrCommonHome,"lib");
+ File hdfsHome = FileUtils.getFile(hadoopHome, "share","hadoop","hdfs");
+ File hdfsLib = FileUtils.getFile(hdfsHome,"lib");
+ File yarnHome = FileUtils.getFile(hadoopHome, "share","hadoop","yarn");
+ File yarnLib = FileUtils.getFile(yarnHome,"lib");
+ File mrHome = FileUtils.getFile(hadoopHome, "share","hadoop","mapreduce");
+ File mrLib = FileUtils.getFile(mrHome,"lib");
+
+ FilenameFilter jarFileFilter = new FilenameFilter() {
+ Set<String> selectedFileName= new HashSet<String>();
+ @Override
+ public boolean accept(File dir, String name) {
+ boolean accept = name.endsWith(".jar")
+ && !selectedFileName.contains(name);
+ if(accept){
+ selectedFileName.add(name);
+ }
+ return accept;
+ }
+ };
+ final ArrayList<File> coreJars = new ArrayList<File>();
+ coreJars.addAll(getJarFiles(mrCommonHome,jarFileFilter));
+ coreJars.addAll(getJarFiles(mrCommonLib,jarFileFilter));
+ coreJars.addAll(getJarFiles(hdfsHome,jarFileFilter));
+ coreJars.addAll(getJarFiles(hdfsLib,jarFileFilter));
+ coreJars.addAll(getJarFiles(yarnHome,jarFileFilter));
+ coreJars.addAll(getJarFiles(yarnLib,jarFileFilter));
+ coreJars.addAll(getJarFiles(mrHome,jarFileFilter));
+ coreJars.addAll(getJarFiles(mrLib,jarFileFilter));
+ return coreJars;
+ }
+
+ private ArrayList<File> getJarFiles(File hadoopHome, FilenameFilter jarFileFilter) {
+ final ArrayList<File> jars = new ArrayList<File>();
+ for (String hadopCoreLibFileName : hadoopHome.list(jarFileFilter)) {
+ jars.add(new File(hadoopHome, hadopCoreLibFileName));
+ }
+ return jars;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/bf1a4949/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewMapReduceProjectWizard.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewMapReduceProjectWizard.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewMapReduceProjectWizard.java
index 3963828..4b88403 100644
--- a/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewMapReduceProjectWizard.java
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/internal/mr/NewMapReduceProjectWizard.java
@@ -18,12 +18,12 @@
package org.apache.hdt.ui.internal.mr;
-import java.io.File;
-import java.io.FilenameFilter;
import java.lang.reflect.InvocationTargetException;
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.apache.hdt.core.AbstractHadoopHomeReader;
+import org.apache.hdt.core.HadoopVersion;
import org.apache.hdt.core.natures.MapReduceNature;
import org.apache.hdt.ui.Activator;
import org.apache.hdt.ui.ImageLibrary;
@@ -55,10 +55,14 @@ import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.DirectoryDialog;
+import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Group;
+import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Link;
+import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.INewWizard;
import org.eclipse.ui.IWorkbench;
@@ -132,9 +136,13 @@ public class NewMapReduceProjectWizard extends Wizard implements INewWizard, IEx
}
static class HadoopFirstPage extends WizardNewProjectCreationPage implements SelectionListener {
- public HadoopFirstPage() {
+ public HadoopFirstPage() throws CoreException {
super("New Hadoop Project");
setImageDescriptor(ImageLibrary.get("wizard.mapreduce.project.new"));
+ String prefVersion = Activator.getDefault().getPreferenceStore().getString(PreferenceConstants.P_VERSION);
+ prefVersion = prefVersion != null && !prefVersion.isEmpty() ? prefVersion :
+ HadoopVersion.Version1.getDisplayName();
+ homeReader = AbstractHadoopHomeReader.createReader(prefVersion);
}
private Link openPreferences;
@@ -151,6 +159,12 @@ public class NewMapReduceProjectWizard extends Wizard implements INewWizard, IEx
public String currentPath;
+ AbstractHadoopHomeReader homeReader;
+
+ private Combo hadoopVersion;
+
+ private String hadoopVersionText;
+
// private Button generateDriver;
@Override
@@ -204,6 +218,47 @@ public class NewMapReduceProjectWizard extends Wizard implements INewWizard, IEx
browse.setEnabled(false);
browse.addSelectionListener(this);
+ /*
+ * HDFS version
+ */
+ {
+ Label label = new Label(group, SWT.NONE);
+ label.setText("&Hadoop Version:");
+ Combo options = new Combo(group, SWT.SINGLE | SWT.BORDER | SWT.READ_ONLY);
+ options.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
+ for (HadoopVersion ver : HadoopVersion.values()) {
+ options.add(ver.getDisplayName());
+ }
+ options.addListener(SWT.Selection, new Listener() {
+ public void handleEvent(Event e) {
+ try {
+ if (!hadoopVersionText.equalsIgnoreCase(hadoopVersion.getText())) {
+ homeReader = AbstractHadoopHomeReader.createReader(hadoopVersion.getText());
+ hadoopVersionText = hadoopVersion.getText();
+ getContainer().updateButtons();
+ }
+ } catch (CoreException e1) {
+ e1.printStackTrace();
+ }
+ }
+
+ });
+
+ hadoopVersion = options;
+ if (hadoopVersionText == null || hadoopVersionText.isEmpty())
+ hadoopVersionText = HadoopVersion.Version1.getDisplayName();
+
+ int pos = 0;
+ for (String item : options.getItems()) {
+ if (item.equalsIgnoreCase(hadoopVersionText)) {
+ options.select(pos);
+ break;
+ }
+ pos++;
+ }
+ options.setEnabled(false);
+ }
+
projectHadoop.addSelectionListener(this);
workspaceHadoop.addSelectionListener(this);
@@ -230,24 +285,18 @@ public class NewMapReduceProjectWizard extends Wizard implements INewWizard, IEx
}
private boolean validateHadoopLocation() {
- FilenameFilter gotHadoopJar = new FilenameFilter() {
- public boolean accept(File dir, String name) {
- return (name.startsWith("hadoop") && name.endsWith(".jar") && (name.indexOf("test") == -1) && (name.indexOf("examples") == -1));
- }
- };
-
if (workspaceHadoop.getSelection()) {
this.currentPath = path;
- return new Path(path).toFile().exists() && (new Path(path).toFile().list(gotHadoopJar).length > 0);
+ return homeReader.validateHadoopHome(new Path(path).toFile());
} else {
this.currentPath = location.getText();
- File file = new Path(location.getText()).toFile();
- return file.exists() && (new Path(location.getText()).toFile().list(gotHadoopJar).length > 0);
+ return homeReader.validateHadoopHome(new Path(location.getText()).toFile());
}
}
private void updateHadoopDirLabelFromPreferences() {
path = Activator.getDefault().getPreferenceStore().getString(PreferenceConstants.P_PATH);
+ hadoopVersionText = Activator.getDefault().getPreferenceStore().getString(PreferenceConstants.P_VERSION);
if ((path != null) && (path.length() > 0)) {
workspaceHadoop.setText("Use default Hadoop");
@@ -288,9 +337,11 @@ public class NewMapReduceProjectWizard extends Wizard implements INewWizard, IEx
} else if (projectHadoop.getSelection()) {
location.setEnabled(true);
browse.setEnabled(true);
+ hadoopVersion.setEnabled(true);
} else {
location.setEnabled(false);
browse.setEnabled(false);
+ hadoopVersion.setEnabled(false);
}
getContainer().updateButtons();
@@ -304,7 +355,11 @@ public class NewMapReduceProjectWizard extends Wizard implements INewWizard, IEx
* JavaProjectWizardSecondPage(firstPage) );
*/
- firstPage = new HadoopFirstPage();
+ try {
+ firstPage = new HadoopFirstPage();
+ } catch (CoreException e) {
+ e.printStackTrace();
+ }
javaPage = new NewJavaProjectWizardPage(ResourcesPlugin.getWorkspace().getRoot(), firstPage);
// newDriverPage = new NewDriverWizardPage(false);
// newDriverPage.setPageComplete(false); // ensure finish button
@@ -345,6 +400,7 @@ public class NewMapReduceProjectWizard extends Wizard implements INewWizard, IEx
description.setNatureIds(natures);
project.setPersistentProperty(new QualifiedName(Activator.PLUGIN_ID, "hadoop.runtime.path"), firstPage.currentPath);
+ project.setPersistentProperty(new QualifiedName(Activator.PLUGIN_ID, "hadoop.version"), firstPage.hadoopVersionText);
project.setDescription(description, new NullProgressMonitor());
String[] natureIds = project.getDescription().getNatureIds();
http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/bf1a4949/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/MapReducePreferencePage.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/MapReducePreferencePage.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/MapReducePreferencePage.java
index b653b10..b711f91 100644
--- a/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/MapReducePreferencePage.java
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/MapReducePreferencePage.java
@@ -17,7 +17,9 @@
*/
package org.apache.hdt.ui.preferences;
+import org.apache.hdt.core.HadoopVersion;
import org.apache.hdt.ui.Activator;
+import org.eclipse.jface.preference.ComboFieldEditor;
import org.eclipse.jface.preference.DirectoryFieldEditor;
import org.eclipse.jface.preference.FieldEditorPreferencePage;
import org.eclipse.ui.IWorkbench;
@@ -54,6 +56,15 @@ public class MapReducePreferencePage extends FieldEditorPreferencePage
public void createFieldEditors() {
addField(new DirectoryFieldEditor(PreferenceConstants.P_PATH,
"&Hadoop installation directory:", getFieldEditorParent()));
+ HadoopVersion[] versions = HadoopVersion.values();
+ String[][] values= new String[versions.length][2];
+ int pos=0;
+ for(HadoopVersion ver:versions){
+ values[pos][0]=values[pos][1]=ver.getDisplayName();
+ pos++;
+ }
+ addField(new ComboFieldEditor(PreferenceConstants.P_VERSION,
+ "&Hadoop Version:",values,getFieldEditorParent()));
}
http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/bf1a4949/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/PreferenceConstants.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/PreferenceConstants.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/PreferenceConstants.java
index 4efcbdd..b0bfa48 100644
--- a/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/PreferenceConstants.java
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/preferences/PreferenceConstants.java
@@ -24,6 +24,8 @@ package org.apache.hdt.ui.preferences;
public class PreferenceConstants {
public static final String P_PATH = "pathPreference";
+
+ public static final String P_VERSION = "versionPreference";
// public static final String P_BOOLEAN = "booleanPreference";
//