You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2011/06/10 00:34:32 UTC
svn commit: r1134119 - in /hbase/trunk: CHANGES.txt
src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java
Author: stack
Date: Thu Jun 9 22:34:32 2011
New Revision: 1134119
URL: http://svn.apache.org/viewvc?rev=1134119&view=rev
Log:
HBASE-3871 Compression.java uses ClassLoader.getSystemClassLoader() to load codec
Modified:
hbase/trunk/CHANGES.txt
hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java
Modified: hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hbase/trunk/CHANGES.txt?rev=1134119&r1=1134118&r2=1134119&view=diff
==============================================================================
--- hbase/trunk/CHANGES.txt (original)
+++ hbase/trunk/CHANGES.txt Thu Jun 9 22:34:32 2011
@@ -115,6 +115,8 @@ Release 0.91.0 - Unreleased
HBASE-3894 Thread contention over row locks set monitor (Dave Latham)
HBASE-3959 hadoop-snappy version in the pom.xml is incorrect
(Alejandro Abdelnur)
+ HBASE-3971 Compression.java uses ClassLoader.getSystemClassLoader()
+ to load codec (Alejandro Abdelnur)
IMPROVEMENTS
HBASE-3290 Max Compaction Size (Nicolas Spiegelberg via Stack)
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java?rev=1134119&r1=1134118&r2=1134119&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java Thu Jun 9 22:34:32 2011
@@ -71,6 +71,24 @@ public final class Compression {
}
/**
+ * Returns the classloader to load the Codec class from.
+ * @return
+ */
+ private static ClassLoader getClassLoaderForCodec() {
+ ClassLoader cl = Thread.currentThread().getContextClassLoader();
+ if (cl == null) {
+ cl = Compression.class.getClassLoader();
+ }
+ if (cl == null) {
+ cl = ClassLoader.getSystemClassLoader();
+ }
+ if (cl == null) {
+ throw new RuntimeException("A ClassLoader to load the Codec could not be determined");
+ }
+ return cl;
+ }
+
+ /**
* Compression algorithms. The ordinal of these cannot change or else you
* risk breaking all existing HFiles out there. Even the ones that are
* not compressed! (They use the NONE algorithm)
@@ -85,7 +103,7 @@ public final class Compression {
if (lzoCodec == null) {
try {
Class<?> externalCodec =
- ClassLoader.getSystemClassLoader().loadClass("com.hadoop.compression.lzo.LzoCodec");
+ getClassLoaderForCodec().loadClass("com.hadoop.compression.lzo.LzoCodec");
lzoCodec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec,
new Configuration(conf));
} catch (ClassNotFoundException e) {
@@ -150,7 +168,7 @@ public final class Compression {
if (snappyCodec == null) {
try {
Class<?> externalCodec =
- ClassLoader.getSystemClassLoader().loadClass("org.apache.hadoop.io.compress.SnappyCodec");
+ getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.SnappyCodec");
snappyCodec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec,
conf);
} catch (ClassNotFoundException e) {