You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hdt.apache.org by rs...@apache.org on 2014/06/26 10:36:29 UTC
[06/27] git commit: HDT-49 : Job does not launch on cluster -
Generated mapred-site.xml so that jobs can run on cluster - using
mapred.job.tracker instead of deprecated mapreduce.jobtracker.address
HDT-49 : Job does not launch on cluster
- Generated mapred-site.xml so that jobs can run on cluster
- using mapred.job.tracker instead of deprecated mapreduce.jobtracker.address
Project: http://git-wip-us.apache.org/repos/asf/incubator-hdt/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hdt/commit/0e9e729a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hdt/tree/0e9e729a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hdt/diff/0e9e729a
Branch: refs/heads/hadoop-eclipse-merge
Commit: 0e9e729a283806c6d8772c3958a18ac767c7143d
Parents: 29467b5
Author: Rahul Sharma <rs...@apache.org>
Authored: Thu May 1 15:25:15 2014 +0530
Committer: Rahul Sharma <rs...@apache.org>
Committed: Fri May 9 09:58:17 2014 +0530
----------------------------------------------------------------------
.../hdt/core/launch/AbstractHadoopCluster.java | 2 +-
.../org/apache/hdt/core/launch/ConfProp.java | 2 +-
.../hdt/hadoop/release/HadoopCluster.java | 25 ++++++++++++--------
3 files changed, 17 insertions(+), 12 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/0e9e729a/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java b/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java
index e5f7dd4..782a89c 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java
@@ -63,7 +63,7 @@ public abstract class AbstractHadoopCluster {
abstract public String getState();
abstract public boolean loadFromXML(File file) throws IOException;
-
+
public static AbstractHadoopCluster createCluster(File file) throws CoreException, IOException {
AbstractHadoopCluster hadoopCluster = createCluster();
hadoopCluster.loadFromXML(file);
http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/0e9e729a/org.apache.hdt.core/src/org/apache/hdt/core/launch/ConfProp.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/launch/ConfProp.java b/org.apache.hdt.core/src/org/apache/hdt/core/launch/ConfProp.java
index 538eb75..c7c64f9 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/launch/ConfProp.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/launch/ConfProp.java
@@ -81,7 +81,7 @@ public enum ConfProp {
* Property name for naming the job tracker (URI). This property is related
* to {@link #PI_MASTER_HOST_NAME}
*/
- JOB_TRACKER_URI(false, "mapreduce.jobtracker.address", "localhost:50020"),
+ JOB_TRACKER_URI(false, "mapred.job.tracker", "localhost:50020"),
/**
* Property name for naming the default file system (URI).
http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/0e9e729a/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java b/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java
index daaf990..466739b 100644
--- a/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java
+++ b/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java
@@ -18,7 +18,9 @@
package org.apache.hdt.hadoop.release;
+import java.io.BufferedInputStream;
import java.io.File;
+import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Collection;
@@ -49,6 +51,7 @@ import org.apache.hdt.core.launch.AbstractHadoopCluster;
import org.apache.hdt.core.launch.IHadoopJob;
import org.apache.hdt.core.launch.IJarModule;
import org.apache.hdt.core.launch.IJobListener;
+import org.eclipse.core.internal.utils.FileUtil;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
@@ -236,9 +239,7 @@ public class HadoopCluster extends AbstractHadoopCluster {
* @throws ParserConfigurationException
*/
public HadoopCluster(File file) throws ParserConfigurationException, SAXException, IOException {
-
- this.conf = new Configuration();
- this.addPluginConfigDefaultProperties();
+ this();
this.loadFromXML(file);
}
@@ -422,8 +423,8 @@ public class HadoopCluster extends AbstractHadoopCluster {
* the property value
*/
public void setConfProp(ConfProp prop, String propValue) {
- assert propValue != null;
- conf.set(prop.name, propValue);
+ if (propValue != null)
+ conf.set(prop.name, propValue);
}
/**
@@ -472,8 +473,7 @@ public class HadoopCluster extends AbstractHadoopCluster {
*/
private void addPluginConfigDefaultProperties() {
for (ConfProp prop : ConfProp.values()) {
- if (conf.get(prop.name) == null)
- conf.set(prop.name, prop.defVal);
+ conf.set(prop.name, prop.defVal);
}
}
@@ -550,14 +550,19 @@ public class HadoopCluster extends AbstractHadoopCluster {
JobConf conf = new JobConf(this.conf);
conf.setJar(jarFilePath);
// Write it to the disk file
- File confFile = new File(confDir, "core-site.xml");
- FileOutputStream fos = new FileOutputStream(confFile);
+ File coreSiteFile = new File(confDir, "core-site.xml");
+ File mapredSiteFile = new File(confDir, "mapred-site.xml");
+ FileOutputStream fos = new FileOutputStream(coreSiteFile);
+ FileInputStream fis = null;
try {
conf.writeXml(fos);
fos.close();
- fos = null;
+ fos = new FileOutputStream(mapredSiteFile);
+ fis = new FileInputStream(coreSiteFile);
+ IOUtils.copyBytes(new BufferedInputStream(fis), fos, 4096);
} finally {
IOUtils.closeStream(fos);
+ IOUtils.closeStream(fis);
}
}