You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/05/03 22:47:52 UTC
svn commit: r399426 - in /lucene/hadoop/trunk: ./ bin/
src/java/org/apache/hadoop/conf/ src/java/org/apache/hadoop/io/
src/java/org/apache/hadoop/mapred/ src/java/org/apache/hadoop/util/
Author: cutting
Date: Wed May 3 13:47:47 2006
New Revision: 399426
URL: http://svn.apache.org/viewcvs?rev=399426&view=rev
Log:
HADOOP-189. Fix MapReduce in standalone configuration to correctly handle job jar files that contain a lib directory with nested jar files.
Added:
lucene/hadoop/trunk/src/java/org/apache/hadoop/util/RunJar.java
Modified:
lucene/hadoop/trunk/CHANGES.txt
lucene/hadoop/trunk/bin/hadoop
lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java
Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/CHANGES.txt?rev=399426&r1=399425&r2=399426&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Wed May 3 13:47:47 2006
@@ -161,6 +161,10 @@
42. HADOOP-184. Re-structure some test code to better support testing
on a cluster. (Mahadev Konar via cutting)
+40. HADOOP-189. Fix MapReduce in standalone configuration to
+ correctly handle job jar files that contain a lib directory with
+ nested jar files. (cutting)
+
Release 0.1.1 - 2006-04-08
Modified: lucene/hadoop/trunk/bin/hadoop
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/bin/hadoop?rev=399426&r1=399425&r2=399426&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/hadoop (original)
+++ lucene/hadoop/trunk/bin/hadoop Wed May 3 13:47:47 2006
@@ -135,14 +135,7 @@
elif [ "$COMMAND" = "job" ] ; then
CLASS=org.apache.hadoop.mapred.JobClient
elif [ "$COMMAND" = "jar" ] ; then
- JAR="$1"
- shift
- CLASS=`"$0" org.apache.hadoop.util.PrintJarMainClass "$JAR"`
- if [ $? != 0 ]; then
- echo "Error: Could not find main class in jar file $JAR"
- exit 1
- fi
- CLASSPATH=${CLASSPATH}:${JAR}
+ CLASS=org.apache.hadoop.util.RunJar
else
CLASS=$COMMAND
fi
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java?rev=399426&r1=399425&r2=399426&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java Wed May 3 13:47:47 2006
@@ -241,7 +241,7 @@
if (valueString == null)
return defaultValue;
try {
- return Class.forName(valueString);
+ return classLoader.loadClass(valueString);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java?rev=399426&r1=399425&r2=399426&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java Wed May 3 13:47:47 2006
@@ -88,7 +88,8 @@
declaredClass = (Class)PRIMITIVE_NAMES.get(className);
if (declaredClass == null) {
try {
- declaredClass = Class.forName(className);
+ declaredClass =
+ Thread.currentThread().getContextClassLoader().loadClass(className);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e.toString());
}
@@ -170,7 +171,8 @@
Class declaredClass = (Class)PRIMITIVE_NAMES.get(className);
if (declaredClass == null) {
try {
- declaredClass = Class.forName(className);
+ declaredClass =
+ Thread.currentThread().getContextClassLoader().loadClass(className);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e.toString());
}
@@ -215,7 +217,8 @@
} else { // Writable
Class instanceClass = null;
try {
- instanceClass = Class.forName(UTF8.readString(in));
+ instanceClass = Thread.currentThread().getContextClassLoader()
+ .loadClass(UTF8.readString(in));
} catch (ClassNotFoundException e) {
throw new RuntimeException(e.toString());
}
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java?rev=399426&r1=399425&r2=399426&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java Wed May 3 13:47:47 2006
@@ -62,7 +62,7 @@
if (writableClass != null)
return writableClass;
try {
- return Class.forName(name);
+ return Thread.currentThread().getContextClassLoader().loadClass(name);
} catch (ClassNotFoundException e) {
throw new IOException(e.toString());
}
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java?rev=399426&r1=399425&r2=399426&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java Wed May 3 13:47:47 2006
@@ -18,6 +18,7 @@
import org.apache.hadoop.conf.*;
import org.apache.hadoop.util.LogFormatter;
import org.apache.hadoop.fs.*;
+import org.apache.hadoop.util.*;
import java.io.*;
import java.util.jar.*;
@@ -78,7 +79,7 @@
String jar = conf.getJar();
if (jar != null) { // if jar exists, it into workDir
- unJar(new File(jar), workDir);
+ RunJar.unJar(new File(jar), workDir);
File[] libs = new File(workDir, "lib").listFiles();
if (libs != null) {
for (int i = 0; i < libs.length; i++) {
@@ -220,37 +221,6 @@
}
return text;
- }
-
- private void unJar(File jarFile, File toDir) throws IOException {
- JarFile jar = new JarFile(jarFile);
- try {
- Enumeration entries = jar.entries();
- while (entries.hasMoreElements()) {
- JarEntry entry = (JarEntry)entries.nextElement();
- if (!entry.isDirectory()) {
- InputStream in = jar.getInputStream(entry);
- try {
- File file = new File(toDir, entry.getName());
- file.getParentFile().mkdirs();
- OutputStream out = new FileOutputStream(file);
- try {
- byte[] buffer = new byte[8192];
- int i;
- while ((i = in.read(buffer)) != -1) {
- out.write(buffer, 0, i);
- }
- } finally {
- out.close();
- }
- } finally {
- in.close();
- }
- }
- }
- } finally {
- jar.close();
- }
}
/**
Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/RunJar.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/RunJar.java?rev=399426&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/RunJar.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/RunJar.java Wed May 3 13:47:47 2006
@@ -0,0 +1,135 @@
+/**
+ * Copyright 2006 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.util;
+
+import java.util.jar.*;
+import java.lang.reflect.*;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.io.*;
+import java.util.*;
+
+import org.apache.hadoop.fs.FileUtil;
+
+/** Run a Hadoop job jar. */
+public class RunJar {
+
+ /** Unpack a jar file into a directory. */
+ public static void unJar(File jarFile, File toDir) throws IOException {
+ JarFile jar = new JarFile(jarFile);
+ try {
+ Enumeration entries = jar.entries();
+ while (entries.hasMoreElements()) {
+ JarEntry entry = (JarEntry)entries.nextElement();
+ if (!entry.isDirectory()) {
+ InputStream in = jar.getInputStream(entry);
+ try {
+ File file = new File(toDir, entry.getName());
+ file.getParentFile().mkdirs();
+ OutputStream out = new FileOutputStream(file);
+ try {
+ byte[] buffer = new byte[8192];
+ int i;
+ while ((i = in.read(buffer)) != -1) {
+ out.write(buffer, 0, i);
+ }
+ } finally {
+ out.close();
+ }
+ } finally {
+ in.close();
+ }
+ }
+ }
+ } finally {
+ jar.close();
+ }
+ }
+
+ /** Run a Hadoop job jar. If the main class is not in the jar's manifest,
+ * then it must be provided on the command line. */
+ public static void main(String[] args) throws Throwable {
+ String usage = "RunJar jarFile [mainClass] args...";
+
+ if (args.length < 1) {
+ System.err.println(usage);
+ System.exit(-1);
+ }
+
+ int firstArg = 0;
+ String fileName = args[firstArg++];
+ File file = new File(fileName);
+ String mainClassName = null;
+
+ JarFile jarFile = new JarFile(fileName);
+ Manifest manifest = jarFile.getManifest();
+ if (manifest != null) {
+ mainClassName = manifest.getMainAttributes().getValue("Main-Class");
+ }
+ jarFile.close();
+
+ if (mainClassName == null) {
+ if (args.length < 2) {
+ System.err.println(usage);
+ System.exit(-1);
+ }
+ mainClassName = args[firstArg++];
+ }
+ mainClassName = mainClassName.replaceAll("/", ".");
+
+ final File workDir = File.createTempFile("hadoop-unjar","");
+ workDir.delete();
+ workDir.mkdirs();
+
+ Runtime.getRuntime().addShutdownHook(new Thread() {
+ public void run() {
+ try {
+ FileUtil.fullyDelete(workDir);
+ } catch (IOException e) {
+ }
+ }
+ });
+
+ unJar(file, workDir);
+
+ ArrayList classPath = new ArrayList();
+ File[] libs = new File(workDir, "lib").listFiles();
+ if (libs != null) {
+ for (int i = 0; i < libs.length; i++) {
+ classPath.add(libs[i].toURL());
+ }
+ }
+ classPath.add(new File(workDir, "classes/").toURL());
+ classPath.add(new File(workDir+"/").toURL());
+ ClassLoader loader =
+ new URLClassLoader((URL[])classPath.toArray(new URL[0]));
+
+ Thread.currentThread().setContextClassLoader(loader);
+ Class mainClass = loader.loadClass(mainClassName);
+ Method main = mainClass.getMethod("main", new Class[] {
+ Array.newInstance(String.class, 0).getClass()
+ });
+ String[] newArgs = (String[])Arrays.asList(args)
+ .subList(firstArg, args.length).toArray(new String[0]);
+ try {
+ main.invoke(null, new Object[] { newArgs });
+ } catch (InvocationTargetException e) {
+ throw e.getTargetException();
+ }
+ }
+
+}