You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2007/01/10 18:46:57 UTC

svn commit: r494905 [3/3] - in /lucene/hadoop/trunk: ./ src/java/org/apache/hadoop/io/compress/ src/java/org/apache/hadoop/io/compress/lzo/ src/native/ src/native/src/ src/native/src/org/apache/hadoop/io/compress/lzo/ src/native/src/org/apache/hadoop/i...

Modified: lucene/hadoop/trunk/src/native/configure.ac
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/configure.ac?view=diff&rev=494905&r1=494904&r2=494905
==============================================================================
--- lucene/hadoop/trunk/src/native/configure.ac (original)
+++ lucene/hadoop/trunk/src/native/configure.ac Wed Jan 10 09:46:54 2007
@@ -64,6 +64,9 @@
 dnl Check for '-lz'
 AC_CHECK_LIB([z], [deflate])
 
+dnl Check for '-llzo2'
+AC_CHECK_LIB([lzo2], [lzo_init])
+
 # Checks for header files.
 dnl Check for Ansi C headers
 AC_HEADER_STDC
@@ -89,6 +92,9 @@
 dnl Check for zlib headers
 AC_CHECK_HEADERS([zlib.h zconf.h], AC_COMPUTE_NEEDED_DSO(z,HADOOP_ZLIB_LIBRARY), AC_MSG_ERROR(Zlib headers were not found... native-hadoop library needs zlib to build. Please install the requisite zlib development package.))
 
+dnl Check for lzo headers
+AC_CHECK_HEADERS([lzo/lzo1.h lzo/lzo1a.h lzo/lzo1b.h lzo/lzo1c.h lzo/lzo1f.h lzo/lzo1x.h lzo/lzo1y.h lzo/lzo1z.h lzo/lzo2a.h lzo/lzo_asm.h], AC_COMPUTE_NEEDED_DSO(lzo2,HADOOP_LZO_LIBRARY), AC_MSG_ERROR(lzo headers were not found... native-hadoop library needs lzo to build. Please install the requisite lzo development package.))
+
 # Checks for typedefs, structures, and compiler characteristics.
 AC_C_CONST
 
@@ -97,6 +103,7 @@
 
 AC_CONFIG_FILES([Makefile
                  src/org/apache/hadoop/io/compress/zlib/Makefile
+                 src/org/apache/hadoop/io/compress/lzo/Makefile
                  lib/Makefile])
 AC_OUTPUT
 

Added: lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/LzoCompressor.c
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/LzoCompressor.c?view=auto&rev=494905
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/LzoCompressor.c (added)
+++ lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/LzoCompressor.c Wed Jan 10 09:46:54 2007
@@ -0,0 +1,253 @@
+#if defined HAVE_CONFIG_H
+  #include <config.h>
+#endif
+
+#if defined HAVE_STDIO_H
+  #include <stdio.h>
+#else
+  #error 'stdio.h not found'
+#endif  
+
+#if defined HAVE_STDLIB_H
+  #include <stdlib.h>
+#else
+  #error 'stdlib.h not found'
+#endif  
+
+#include "org_apache_hadoop_io_compress_lzo.h"
+
+// The lzo2 library-handle
+static void *liblzo2 = NULL;
+
+// The lzo 'compressors'
+typedef struct {
+  const char *function;           // The compression function
+  int wrkmem;                     // The 'working memory' needed
+  int compression_level;          // Compression level if required;
+                                  // else UNDEFINED_COMPRESSION_LEVEL
+} lzo_compressor;
+
+#define UNDEFINED_COMPRESSION_LEVEL -999
+
+static lzo_compressor lzo_compressors[] = {
+  /** lzo1 compressors */
+  /* 0 */   {"lzo1_compress", LZO1_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+  /* 1 */   {"lzo1_99_compress", LZO1_99_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+
+  /** lzo1a compressors */
+  /* 2 */   {"lzo1a_compress", LZO1A_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+  /* 3 */   {"lzo1a_99_compress", LZO1A_99_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+
+  /** lzo1b compressors */
+  /* 4 */   {"lzo1b_compress", LZO1B_MEM_COMPRESS, LZO1B_DEFAULT_COMPRESSION}, 
+  /* 5 */   {"lzo1b_compress", LZO1B_MEM_COMPRESS, LZO1B_BEST_SPEED}, 
+  /* 6 */   {"lzo1b_compress", LZO1B_MEM_COMPRESS, LZO1B_BEST_COMPRESSION}, 
+  /* 7 */   {"lzo1b_1_compress", LZO1B_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 8 */   {"lzo1b_2_compress", LZO1B_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 9 */   {"lzo1b_3_compress", LZO1B_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 10 */  {"lzo1b_4_compress", LZO1B_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 11 */  {"lzo1b_5_compress", LZO1B_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 12 */  {"lzo1b_6_compress", LZO1B_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 13 */  {"lzo1b_7_compress", LZO1B_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 14 */  {"lzo1b_8_compress", LZO1B_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 15 */  {"lzo1b_9_compress", LZO1B_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 16 */  {"lzo1b_99_compress", LZO1B_99_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 17 */  {"lzo1b_999_compress", LZO1B_999_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  
+  /** lzo1c compressors */
+  /* 18 */  {"lzo1c_compress", LZO1C_MEM_COMPRESS, LZO1C_DEFAULT_COMPRESSION}, 
+  /* 19 */  {"lzo1c_compress", LZO1C_MEM_COMPRESS, LZO1C_BEST_SPEED}, 
+  /* 20 */  {"lzo1c_compress", LZO1C_MEM_COMPRESS, LZO1C_BEST_COMPRESSION}, 
+  /* 21 */  {"lzo1c_1_compress", LZO1C_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 22 */  {"lzo1c_2_compress", LZO1C_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 23 */  {"lzo1c_3_compress", LZO1C_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 24 */  {"lzo1c_4_compress", LZO1C_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 25 */  {"lzo1c_5_compress", LZO1C_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 26 */  {"lzo1c_6_compress", LZO1C_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 27 */  {"lzo1c_7_compress", LZO1C_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 28 */  {"lzo1c_8_compress", LZO1C_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 29 */  {"lzo1c_9_compress", LZO1C_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 30 */  {"lzo1c_99_compress", LZO1C_99_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  /* 31 */  {"lzo1c_999_compress", LZO1C_999_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL}, 
+  
+  /** lzo1f compressors */
+  /* 32 */  {"lzo1f_1_compress", LZO1F_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+  /* 33 */  {"lzo1f_999_compress", LZO1F_999_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+
+  /** lzo1x compressors */
+  /* 34 */  {"lzo1x_1_compress", LZO1X_1_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+  /* 35 */  {"lzo1x_11_compress", LZO1X_1_11_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+  /* 36 */  {"lzo1x_12_compress", LZO1X_1_12_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+  /* 37 */  {"lzo1x_15_compress", LZO1X_1_15_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+  /* 38 */  {"lzo1x_999_compress", LZO1X_999_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+
+  /** lzo1y compressors */
+  /* 39 */  {"lzo1y_1_compress", LZO1Y_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+  /* 40 */  {"lzo1y_999_compress", LZO1Y_999_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+
+  /** lzo1z compressors */
+  /* 41 */  {"lzo1z_999_compress", LZO1Z_999_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+
+  /** lzo2a compressors */
+  /* 42 */  {"lzo2a_999_compress", LZO2A_999_MEM_COMPRESS, UNDEFINED_COMPRESSION_LEVEL},
+};
+
+// The second lzo* compressor prototype - this really should be in lzoconf.h!
+typedef int
+(__LZO_CDECL *lzo_compress2_t)   ( const lzo_bytep src, lzo_uint  src_len,
+                                  lzo_bytep dst, lzo_uintp dst_len,
+                                  lzo_voidp wrkmem, int compression_level );
+
+static jfieldID LzoCompressor_finish;
+static jfieldID LzoCompressor_finished;
+static jfieldID LzoCompressor_uncompressedDirectBuf;
+static jfieldID LzoCompressor_uncompressedDirectBufLen;
+static jfieldID LzoCompressor_compressedDirectBuf;
+static jfieldID LzoCompressor_directBufferSize;
+static jfieldID LzoCompressor_lzoCompressor;
+static jfieldID LzoCompressor_workingMemoryBufLen;
+static jfieldID LzoCompressor_workingMemoryBuf;
+
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_compress_lzo_LzoCompressor_initIDs(
+	JNIEnv *env, jclass class
+	) {
+	// Load liblzo2.so
+	liblzo2 = dlopen(HADOOP_LZO_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
+	if (!liblzo2) {
+		THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load liblzo2.so!");
+	  return;
+	}
+    
+  LzoCompressor_finish = (*env)->GetFieldID(env, class, "finish", "Z");
+  LzoCompressor_finished = (*env)->GetFieldID(env, class, "finished", "Z");
+  LzoCompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class, 
+                                                    "uncompressedDirectBuf", 
+                                                    "Ljava/nio/Buffer;");
+  LzoCompressor_uncompressedDirectBufLen = (*env)->GetFieldID(env, class, 
+                                            "uncompressedDirectBufLen", "I");
+  LzoCompressor_compressedDirectBuf = (*env)->GetFieldID(env, class, 
+                                                        "compressedDirectBuf",
+                                                        "Ljava/nio/Buffer;");
+  LzoCompressor_directBufferSize = (*env)->GetFieldID(env, class, 
+                                            "directBufferSize", "I");
+  LzoCompressor_lzoCompressor = (*env)->GetFieldID(env, class, 
+                                          "lzoCompressor", "J");
+  LzoCompressor_workingMemoryBufLen = (*env)->GetFieldID(env, class,
+                                                "workingMemoryBufLen", "I");
+  LzoCompressor_workingMemoryBuf = (*env)->GetFieldID(env, class, 
+                                              "workingMemoryBuf", 
+                                              "Ljava/nio/Buffer;");
+}
+
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_compress_lzo_LzoCompressor_init(
+  JNIEnv *env, jobject this, jint compressor 
+  ) {
+  const char *lzo_compressor_function = lzo_compressors[compressor].function;
+ 
+  // Locate the requisite symbols from liblzo2.so
+  dlerror();                                 // Clear any existing error
+
+  // Initialize the lzo library 
+  void *lzo_init_func_ptr = NULL;
+  typedef int (__LZO_CDECL *lzo_init_t) (unsigned,int,int,int,int,int,int,int,int,int);
+  LOAD_DYNAMIC_SYMBOL(lzo_init_func_ptr, env, liblzo2, "__lzo_init_v2");
+  lzo_init_t lzo_init_function = (lzo_init_t)(lzo_init_func_ptr);
+  int rv = lzo_init_function(LZO_VERSION, (int)sizeof(short), (int)sizeof(int), 
+              (int)sizeof(long), (int)sizeof(lzo_uint32), (int)sizeof(lzo_uint), 
+              (int)lzo_sizeof_dict_t, (int)sizeof(char*), (int)sizeof(lzo_voidp),
+              (int)sizeof(lzo_callback_t));
+  if (rv != LZO_E_OK) {
+    THROW(env, "Ljava/lang/InternalError", "Could not initialize lzo library!");
+    return;
+  }
+  
+  // Save the compressor-function into LzoCompressor_lzoCompressor
+  void *compressor_func_ptr = NULL;
+  LOAD_DYNAMIC_SYMBOL(compressor_func_ptr, env, liblzo2, lzo_compressor_function);
+  (*env)->SetLongField(env, this, LzoCompressor_lzoCompressor,
+                       JLONG(compressor_func_ptr));
+  
+  // Save the compressor-function into LzoCompressor_lzoCompressor
+  (*env)->SetIntField(env, this, LzoCompressor_workingMemoryBufLen,
+                      lzo_compressors[compressor].wrkmem);
+
+  return;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_apache_hadoop_io_compress_lzo_LzoCompressor_compressBytesDirect(
+  JNIEnv *env, jobject this, jint compressor 
+	) {
+  const char *lzo_compressor_function = lzo_compressors[compressor].function;
+
+	// Get members of LzoCompressor
+	jobject uncompressed_direct_buf = (*env)->GetObjectField(env, this, 
+									                    LzoCompressor_uncompressedDirectBuf);
+	lzo_uint uncompressed_direct_buf_len = (*env)->GetIntField(env, this, 
+									                  LzoCompressor_uncompressedDirectBufLen);
+
+	jobject compressed_direct_buf = (*env)->GetObjectField(env, this, 
+									                        LzoCompressor_compressedDirectBuf);
+	lzo_uint compressed_direct_buf_len = (*env)->GetIntField(env, this, 
+									                            LzoCompressor_directBufferSize);
+
+	jobject working_memory_buf = (*env)->GetObjectField(env, this, 
+									                      LzoCompressor_workingMemoryBuf);
+
+  jlong lzo_compressor_funcptr = (*env)->GetLongField(env, this,
+                  LzoCompressor_lzoCompressor);
+
+  // Get direct buffers
+	lzo_bytep uncompressed_bytes = (*env)->GetDirectBufferAddress(env, 
+                                            uncompressed_direct_buf);
+  if (uncompressed_bytes == 0) {
+    	return (jint)0;
+	}
+	
+	lzo_bytep compressed_bytes = (*env)->GetDirectBufferAddress(env, 
+                                            compressed_direct_buf);
+  if (compressed_bytes == 0) {
+		return (jint)0;
+	}
+	
+  lzo_voidp workmem = (*env)->GetDirectBufferAddress(env, working_memory_buf);
+  if (workmem == 0) {
+    return (jint)0;
+  }
+  
+	// Compress
+  lzo_uint no_compressed_bytes = compressed_direct_buf_len;
+	int rv = 0;
+  int compression_level = lzo_compressors[compressor].compression_level;
+  if (compression_level == UNDEFINED_COMPRESSION_LEVEL) {
+    lzo_compress_t fptr = (lzo_compress_t) FUNC_PTR(lzo_compressor_funcptr);
+    rv = fptr(uncompressed_bytes, uncompressed_direct_buf_len,
+              compressed_bytes, &no_compressed_bytes, 
+              workmem);
+  } else {
+    lzo_compress2_t fptr = (lzo_compress2_t) FUNC_PTR(lzo_compressor_funcptr);
+    rv = fptr(uncompressed_bytes, uncompressed_direct_buf_len,
+              compressed_bytes, &no_compressed_bytes, 
+              workmem, compression_level); 
+  }
+
+  if (rv == LZO_E_OK) {
+    // lzo compresses all input data
+    (*env)->SetIntField(env, this, 
+                LzoCompressor_uncompressedDirectBufLen, 0);
+  } else {
+    const int msg_len = 32;
+    char exception_msg[msg_len];
+    snprintf(exception_msg, msg_len, "%s returned: %d", lzo_compressor_function, rv);
+    THROW(env, "java/lang/InternalError", exception_msg);
+  }
+
+  return (jint)no_compressed_bytes;
+}
+
+/**
+ * vim: sw=2: ts=2: et:
+ */
+

Added: lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/LzoDecompressor.c
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/LzoDecompressor.c?view=auto&rev=494905
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/LzoDecompressor.c (added)
+++ lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/LzoDecompressor.c Wed Jan 10 09:46:54 2007
@@ -0,0 +1,194 @@
+#if defined HAVE_CONFIG_H
+  #include <config.h>
+#endif
+
+#if defined HAVE_STDIO_H
+  #include <stdio.h>
+#else
+  #error 'stdio.h not found'
+#endif  
+
+#if defined HAVE_STDLIB_H
+  #include <stdlib.h>
+#else
+  #error 'stdlib.h not found'
+#endif  
+
+#include "org_apache_hadoop_io_compress_lzo.h"
+
+// The lzo2 library-handle
+static void *liblzo2 = NULL;
+
+// The lzo 'decompressors'
+static char* lzo_decompressors[] = {
+  /** lzo1 decompressors */
+  /* 0 */   "lzo1_decompress", 
+  
+  /** lzo1a compressors */
+  /* 1 */   "lzo1a_decompress",
+
+  /** lzo1b compressors */
+  /* 2 */   "lzo1b_decompress", 
+  /* 3 */   "lzo1b_decompress_safe",
+
+  /** lzo1c compressors */
+  /* 4 */   "lzo1c_decompress",
+  /* 5 */   "lzo1c_decompress_safe",
+  /* 6 */   "lzo1c_decompress_asm",
+  /* 7 */   "lzo1c_decompress_asm_safe",
+  
+  /** lzo1f compressors */
+  /* 8 */   "lzo1f_decompress",
+  /* 9 */   "lzo1f_decompress_safe",
+  /* 10 */  "lzo1f_decompress_asm_fast",
+  /* 11 */  "lzo1f_decompress_asm_fast_safe",
+
+  /** lzo1x compressors */
+  /* 12 */  "lzo1x_decompress",
+  /* 13 */  "lzo1x_decompress_safe",
+  /* 14 */  "lzo1x_decompress_asm",
+  /* 15 */  "lzo1x_decompress_asm_safe",
+  /* 16 */  "lzo1x_decompress_asm_fast",
+  /* 17 */  "lzo1x_decompress_asm_fast_safe"
+  
+  /** lzo1y compressors */
+  /* 18 */  "lzo1y_decompress",
+  /* 19 */  "lzo1y_decompress_safe",
+  /* 20 */  "lzo1y_decompress_asm",
+  /* 21 */  "lzo1y_decompress_asm_safe",
+  /* 22 */  "lzo1y_decompress_asm_fast",
+  /* 23 */  "lzo1y_decompress_asm_fast_safe",
+
+  /** lzo1z compressors */
+  /* 24 */  "lzo1z_decompress", 
+  /* 25 */  "lzo1z_decompress_safe",
+
+  /** lzo2a compressors */
+  /* 26 */  "lzo2a_decompress",
+  /* 27 */  "lzo2a_decompress_safe"
+};
+
+static jfieldID LzoDecompressor_finished;
+static jfieldID LzoDecompressor_compressedDirectBuf;
+static jfieldID LzoDecompressor_compressedDirectBufLen;
+static jfieldID LzoDecompressor_uncompressedDirectBuf;
+static jfieldID LzoDecompressor_directBufferSize;
+static jfieldID LzoDecompressor_lzoDecompressor;
+
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_compress_lzo_LzoDecompressor_initIDs(
+	JNIEnv *env, jclass class
+	) {
+	// Load liblzo2.so
+	liblzo2 = dlopen(HADOOP_LZO_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
+	if (!liblzo2) {
+		THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load liblzo2.so!");
+	  return;
+	}
+    
+  LzoDecompressor_finished = (*env)->GetFieldID(env, class, "finished", "Z");
+  LzoDecompressor_compressedDirectBuf = (*env)->GetFieldID(env, class, 
+                                                "compressedDirectBuf", 
+                                                "Ljava/nio/Buffer;");
+  LzoDecompressor_compressedDirectBufLen = (*env)->GetFieldID(env, class, 
+                                                    "compressedDirectBufLen", "I");
+  LzoDecompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class, 
+                                                  "uncompressedDirectBuf", 
+                                                  "Ljava/nio/Buffer;");
+  LzoDecompressor_directBufferSize = (*env)->GetFieldID(env, class, 
+                                              "directBufferSize", "I");
+  LzoDecompressor_lzoDecompressor = (*env)->GetFieldID(env, class,
+                                              "lzoDecompressor", "J");
+}
+
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_compress_lzo_LzoDecompressor_init(
+  JNIEnv *env, jobject this, jint decompressor 
+  ) {
+  const char *lzo_decompressor_function = lzo_decompressors[decompressor];
+ 
+  // Locate the requisite symbols from liblzo2.so
+  dlerror();                                 // Clear any existing error
+
+  // Initialize the lzo library 
+  void *lzo_init_func_ptr = NULL;
+  typedef int (__LZO_CDECL *lzo_init_t) (unsigned,int,int,int,int,int,int,int,int,int);
+  LOAD_DYNAMIC_SYMBOL(lzo_init_func_ptr, env, liblzo2, "__lzo_init_v2");
+  lzo_init_t lzo_init_function = (lzo_init_t)(lzo_init_func_ptr);
+  int rv = lzo_init_function(LZO_VERSION, (int)sizeof(short), (int)sizeof(int), 
+              (int)sizeof(long), (int)sizeof(lzo_uint32), (int)sizeof(lzo_uint), 
+              (int)lzo_sizeof_dict_t, (int)sizeof(char*), (int)sizeof(lzo_voidp),
+              (int)sizeof(lzo_callback_t));
+  if (rv != LZO_E_OK) {
+    THROW(env, "Ljava/lang/InternalError", "Could not initialize lzo library!");
+    return;
+  }
+  
+  // Save the decompressor-function into LzoDecompressor_lzoDecompressor
+  void *decompressor_func_ptr = NULL;
+  LOAD_DYNAMIC_SYMBOL(decompressor_func_ptr, env, liblzo2,
+      lzo_decompressor_function);
+  (*env)->SetLongField(env, this, LzoDecompressor_lzoDecompressor,
+                       JLONG(decompressor_func_ptr));
+
+  return;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_apache_hadoop_io_compress_lzo_LzoDecompressor_decompressBytesDirect(
+	JNIEnv *env, jobject this, jint decompressor
+	) {
+  const char *lzo_decompressor_function = lzo_decompressors[decompressor];
+
+	// Get members of LzoDecompressor
+	jobject compressed_direct_buf = (*env)->GetObjectField(env, this,
+                                              LzoDecompressor_compressedDirectBuf);
+	lzo_uint compressed_direct_buf_len = (*env)->GetIntField(env, this, 
+                        		  							LzoDecompressor_compressedDirectBufLen);
+
+	jobject uncompressed_direct_buf = (*env)->GetObjectField(env, this, 
+                            								  LzoDecompressor_uncompressedDirectBuf);
+	lzo_uint uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
+                                                LzoDecompressor_directBufferSize);
+
+  jlong lzo_decompressor_funcptr = (*env)->GetLongField(env, this,
+                                              LzoDecompressor_lzoDecompressor);
+
+  // Get direct buffers
+	lzo_bytep uncompressed_bytes = (*env)->GetDirectBufferAddress(env, 
+											                    uncompressed_direct_buf);
+ 	if (uncompressed_bytes == 0) {
+    return (jint)0;
+	}
+	
+	lzo_bytep compressed_bytes = (*env)->GetDirectBufferAddress(env, 
+										                    compressed_direct_buf);
+  if (compressed_bytes == 0) {
+		return (jint)0;
+	}
+	
+	// Decompress
+  lzo_uint no_uncompressed_bytes = uncompressed_direct_buf_len;
+  lzo_decompress_t fptr = (lzo_decompress_t) FUNC_PTR(lzo_decompressor_funcptr);
+	int rv = fptr(compressed_bytes, compressed_direct_buf_len,
+                uncompressed_bytes, &no_uncompressed_bytes,
+                NULL); 
+
+  if (rv == LZO_E_OK) {
+    // lzo decompresses all input data
+    (*env)->SetIntField(env, this, LzoDecompressor_compressedDirectBufLen, 0);
+  } else {
+    const int msg_len = 32;
+    char exception_msg[msg_len];
+    snprintf(exception_msg, msg_len, "%s returned: %d", 
+              lzo_decompressor_function, rv);
+    THROW(env, "java/lang/InternalError", exception_msg);
+  }
+  
+  return no_uncompressed_bytes;
+}
+
+/**
+ * vim: sw=2: ts=2: et:
+ */
+

Added: lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/Makefile.am
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/Makefile.am?view=auto&rev=494905
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/Makefile.am (added)
+++ lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/Makefile.am Wed Jan 10 09:46:54 2007
@@ -0,0 +1,50 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Makefile template for building native 'lzo' for hadoop.
+#
+
+#
+# Notes: 
+# 1. This makefile is designed to do the actual builds in $(HADOOP_HOME)/build/native/${os.name}-${os.arch}/$(subdir) .
+# 2. This makefile depends on the following environment variables to function correctly:
+#    * HADOOP_NATIVE_SRCDIR 
+#    * JAVA_HOME
+#    * JVM_DATA_MODEL
+#    * OS_ARCH 
+#    * PLATFORM
+#    All these are setup by build.xml and/or the top-level makefile.
+# 3. The creation of requisite jni headers/stubs are also done by build.xml and they are
+#    assumed to be in $(HADOOP_HOME)/build/native/src/org/apache/hadoop/io/compress/lzo.
+#
+
+# The 'vpath directive' to locate the actual source files 
+vpath %.c $(HADOOP_NATIVE_SRCDIR)/$(subdir)
+
+AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src
+AM_LDFLAGS = @JNI_LDFLAGS@
+AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL)
+
+noinst_LTLIBRARIES = libnativelzo.la
+libnativelzo_la_SOURCES = LzoCompressor.c LzoDecompressor.c
+libnativelzo_la_LIBADD = -ldl -ljvm
+
+#
+#vim: sw=4: ts=4: noet
+#

Added: lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/Makefile.in
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/Makefile.in?view=auto&rev=494905
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/Makefile.in (added)
+++ lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/Makefile.in Wed Jan 10 09:46:54 2007
@@ -0,0 +1,469 @@
+# Makefile.in generated by automake 1.9.6 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005  Free Software Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Makefile template for building native 'lzo' for hadoop.
+#
+
+#
+# Notes: 
+# 1. This makefile is designed to do the actual builds in $(HADOOP_HOME)/build/native/${os.name}-${os.arch}/$(subdir) .
+# 2. This makefile depends on the following environment variables to function correctly:
+#    * HADOOP_NATIVE_SRCDIR 
+#    * JAVA_HOME
+#    * JVM_DATA_MODEL
+#    * OS_ARCH 
+#    * PLATFORM
+#    All these are setup by build.xml and/or the top-level makefile.
+# 3. The creation of requisite jni headers/stubs are also done by build.xml and they are
+#    assumed to be in $(HADOOP_HOME)/build/native/src/org/apache/hadoop/io/compress/lzo.
+#
+
+srcdir = @srcdir@
+top_srcdir = @top_srcdir@
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+top_builddir = ../../../../../../..
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+INSTALL = @INSTALL@
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+subdir = src/org/apache/hadoop/io/compress/lzo
+DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/acinclude.m4 \
+	$(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+	$(ACLOCAL_M4)
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = $(top_builddir)/config.h
+CONFIG_CLEAN_FILES =
+LTLIBRARIES = $(noinst_LTLIBRARIES)
+libnativelzo_la_DEPENDENCIES =
+am_libnativelzo_la_OBJECTS = LzoCompressor.lo LzoDecompressor.lo
+libnativelzo_la_OBJECTS = $(am_libnativelzo_la_OBJECTS)
+DEFAULT_INCLUDES = -I. -I$(srcdir) -I$(top_builddir)
+depcomp = $(SHELL) $(top_srcdir)/config/depcomp
+am__depfiles_maybe = depfiles
+COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+	$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+LTCOMPILE = $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) \
+	$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
+	$(AM_CFLAGS) $(CFLAGS)
+CCLD = $(CC)
+LINK = $(LIBTOOL) --tag=CC --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+	$(AM_LDFLAGS) $(LDFLAGS) -o $@
+SOURCES = $(libnativelzo_la_SOURCES)
+DIST_SOURCES = $(libnativelzo_la_SOURCES)
+ETAGS = etags
+CTAGS = ctags
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = @ACLOCAL@
+AMDEP_FALSE = @AMDEP_FALSE@
+AMDEP_TRUE = @AMDEP_TRUE@
+AMTAR = @AMTAR@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CXX = @CXX@
+CXXCPP = @CXXCPP@
+CXXDEPMODE = @CXXDEPMODE@
+CXXFLAGS = @CXXFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+ECHO = @ECHO@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+F77 = @F77@
+FFLAGS = @FFLAGS@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JNI_CPPFLAGS = @JNI_CPPFLAGS@
+JNI_LDFLAGS = @JNI_LDFLAGS@
+LDFLAGS = @LDFLAGS@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+MAKEINFO = @MAKEINFO@
+OBJEXT = @OBJEXT@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+RANLIB = @RANLIB@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+ac_ct_AR = @ac_ct_AR@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_CXX = @ac_ct_CXX@
+ac_ct_F77 = @ac_ct_F77@
+ac_ct_RANLIB = @ac_ct_RANLIB@
+ac_ct_STRIP = @ac_ct_STRIP@
+am__fastdepCC_FALSE = @am__fastdepCC_FALSE@
+am__fastdepCC_TRUE = @am__fastdepCC_TRUE@
+am__fastdepCXX_FALSE = @am__fastdepCXX_FALSE@
+am__fastdepCXX_TRUE = @am__fastdepCXX_TRUE@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+datadir = @datadir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src
+AM_LDFLAGS = @JNI_LDFLAGS@
+AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL)
+noinst_LTLIBRARIES = libnativelzo.la
+libnativelzo_la_SOURCES = LzoCompressor.c LzoDecompressor.c
+libnativelzo_la_LIBADD = -ldl -ljvm
+all: all-am
+
+.SUFFIXES:
+.SUFFIXES: .c .lo .o .obj
+$(srcdir)/Makefile.in:  $(srcdir)/Makefile.am  $(am__configure_deps)
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+	      cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \
+		&& exit 0; \
+	      exit 1;; \
+	  esac; \
+	done; \
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu  src/org/apache/hadoop/io/compress/lzo/Makefile'; \
+	cd $(top_srcdir) && \
+	  $(AUTOMAKE) --gnu  src/org/apache/hadoop/io/compress/lzo/Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+	@case '$?' in \
+	  *config.status*) \
+	    cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+	  *) \
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
+	esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure:  $(am__configure_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4):  $(am__aclocal_m4_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+clean-noinstLTLIBRARIES:
+	-test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES)
+	@list='$(noinst_LTLIBRARIES)'; for p in $$list; do \
+	  dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \
+	  test "$$dir" != "$$p" || dir=.; \
+	  echo "rm -f \"$${dir}/so_locations\""; \
+	  rm -f "$${dir}/so_locations"; \
+	done
+libnativelzo.la: $(libnativelzo_la_OBJECTS) $(libnativelzo_la_DEPENDENCIES) 
+	$(LINK)  $(libnativelzo_la_LDFLAGS) $(libnativelzo_la_OBJECTS) $(libnativelzo_la_LIBADD) $(LIBS)
+
+mostlyclean-compile:
+	-rm -f *.$(OBJEXT)
+
+distclean-compile:
+	-rm -f *.tab.c
+
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/LzoCompressor.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/LzoDecompressor.Plo@am__quote@
+
+.c.o:
+@am__fastdepCC_TRUE@	if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ $<; \
+@am__fastdepCC_TRUE@	then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(COMPILE) -c $<
+
+.c.obj:
+@am__fastdepCC_TRUE@	if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ `$(CYGPATH_W) '$<'`; \
+@am__fastdepCC_TRUE@	then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(COMPILE) -c `$(CYGPATH_W) '$<'`
+
+.c.lo:
+@am__fastdepCC_TRUE@	if $(LTCOMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ $<; \
+@am__fastdepCC_TRUE@	then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Plo"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LTCOMPILE) -c -o $@ $<
+
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-libtool:
+	-rm -rf .libs _libs
+
+distclean-libtool:
+	-rm -f libtool
+uninstall-info-am:
+
+ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
+	list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	mkid -fID $$unique
+tags: TAGS
+
+TAGS:  $(HEADERS) $(SOURCES)  $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	list='$(SOURCES) $(HEADERS)  $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \
+	  test -n "$$unique" || unique=$$empty_fix; \
+	  $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	    $$tags $$unique; \
+	fi
+ctags: CTAGS
+CTAGS:  $(HEADERS) $(SOURCES)  $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	list='$(SOURCES) $(HEADERS)  $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	test -z "$(CTAGS_ARGS)$$tags$$unique" \
+	  || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+	     $$tags $$unique
+
+GTAGS:
+	here=`$(am__cd) $(top_builddir) && pwd` \
+	  && cd $(top_srcdir) \
+	  && gtags -i $(GTAGS_ARGS) $$here
+
+distclean-tags:
+	-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(DISTFILES)
+	@srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's|.|.|g'`; \
+	list='$(DISTFILES)'; for file in $$list; do \
+	  case $$file in \
+	    $(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \
+	    $(top_srcdir)/*) file=`echo "$$file" | sed "s|^$$topsrcdirstrip/|$(top_builddir)/|"`;; \
+	  esac; \
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+	  dir=`echo "$$file" | sed -e 's,/[^/]*$$,,'`; \
+	  if test "$$dir" != "$$file" && test "$$dir" != "."; then \
+	    dir="/$$dir"; \
+	    $(mkdir_p) "$(distdir)$$dir"; \
+	  else \
+	    dir=''; \
+	  fi; \
+	  if test -d $$d/$$file; then \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \
+	    fi; \
+	    cp -pR $$d/$$file $(distdir)$$dir || exit 1; \
+	  else \
+	    test -f $(distdir)/$$file \
+	    || cp -p $$d/$$file $(distdir)/$$file \
+	    || exit 1; \
+	  fi; \
+	done
+check-am: all-am
+check: check-am
+all-am: Makefile $(LTLIBRARIES)
+installdirs:
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+	$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	  install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	  `test -z '$(STRIP)' || \
+	    echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+
+maintainer-clean-generic:
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-generic clean-libtool clean-noinstLTLIBRARIES \
+	mostlyclean-am
+
+distclean: distclean-am
+	-rm -rf ./$(DEPDIR)
+	-rm -f Makefile
+distclean-am: clean-am distclean-compile distclean-generic \
+	distclean-libtool distclean-tags
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+info: info-am
+
+info-am:
+
+install-data-am:
+
+install-exec-am:
+
+install-info: install-info-am
+
+install-man:
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+	-rm -rf ./$(DEPDIR)
+	-rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-compile mostlyclean-generic \
+	mostlyclean-libtool
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am: uninstall-info-am
+
+.PHONY: CTAGS GTAGS all all-am check check-am clean clean-generic \
+	clean-libtool clean-noinstLTLIBRARIES ctags distclean \
+	distclean-compile distclean-generic distclean-libtool \
+	distclean-tags distdir dvi dvi-am html html-am info info-am \
+	install install-am install-data install-data-am install-exec \
+	install-exec-am install-info install-info-am install-man \
+	install-strip installcheck installcheck-am installdirs \
+	maintainer-clean maintainer-clean-generic mostlyclean \
+	mostlyclean-compile mostlyclean-generic mostlyclean-libtool \
+	pdf pdf-am ps ps-am tags uninstall uninstall-am \
+	uninstall-info-am
+
+
+# The 'vpath directive' to locate the actual source files 
+vpath %.c $(HADOOP_NATIVE_SRCDIR)/$(subdir)
+
+#
+#vim: sw=4: ts=4: noet
+#
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:

Added: lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/org_apache_hadoop_io_compress_lzo.h
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/org_apache_hadoop_io_compress_lzo.h?view=auto&rev=494905
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/org_apache_hadoop_io_compress_lzo.h (added)
+++ lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/lzo/org_apache_hadoop_io_compress_lzo.h Wed Jan 10 09:46:54 2007
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#if !defined ORG_APACHE_HADOOP_IO_COMPRESS_LZO_LZO_H
+#define ORG_APACHE_HADOOP_IO_COMPRESS_LZO_LZO_H
+
+#if defined HAVE_CONFIG_H
+  #include <config.h>
+#endif
+
+#if defined HAVE_STDDEF_H
+  #include <stddef.h>
+#else
+  #error 'stddef.h not found'
+#endif
+    
+#if defined HAVE_DLFCN_H
+  #include <dlfcn.h>
+#else
+  #error "dlfcn.h not found"
+#endif  
+
+#if defined HAVE_JNI_H    
+  #include <jni.h>
+#else
+  #error 'jni.h not found'
+#endif
+
+#if defined HAVE_LZO_LZO1_H
+  #include <lzo/lzo1.h>
+#else
+  #error 'lzo/lzo1.h not found'
+#endif
+
+#if defined HAVE_LZO_LZO1A_H
+  #include <lzo/lzo1a.h>
+#else
+  #error 'lzo/lzo1a.h not found'
+#endif
+
+#if defined HAVE_LZO_LZO1B_H
+  #include <lzo/lzo1b.h>
+#else
+  #error 'lzo/lzo1b.h not found'
+#endif
+
+#if defined HAVE_LZO_LZO1C_H
+  #include <lzo/lzo1c.h>
+#else
+  #error 'lzo/lzo1c.h not found'
+#endif
+
+#if defined HAVE_LZO_LZO1F_H
+  #include <lzo/lzo1f.h>
+#else
+  #error 'lzo/lzo1f.h not found'
+#endif
+
+#if defined HAVE_LZO_LZO1X_H
+  #include <lzo/lzo1x.h>
+#else
+  #error 'lzo/lzo1x.h not found'
+#endif
+
+#if defined HAVE_LZO_LZO1Y_H
+  #include <lzo/lzo1y.h>
+#else
+  #error 'lzo/lzo1y.h not found'
+#endif
+
+#if defined HAVE_LZO_LZO1Z_H
+  #include <lzo/lzo1z.h>
+#else
+  #error 'lzo/lzo1z.h not found'
+#endif
+
+#if defined HAVE_LZO_LZO2A_H
+  #include <lzo/lzo2a.h>
+#else
+  #error 'lzo/lzo2a.h not found'
+#endif
+
+#if defined HAVE_LZO_LZO_ASM_H
+  #include <lzo/lzo_asm.h>
+#else
+  #error 'lzo/lzo_asm.h not found'
+#endif
+
+#include "org_apache_hadoop.h"
+
+/* A helper macro to convert the java 'function-pointer' to a void*. */
+#define FUNC_PTR(func_ptr) ((void*)((ptrdiff_t)(func_ptr)))
+
+/* A helper macro to convert the void* to the java 'function-pointer'. */
+#define JLONG(func_ptr) ((jlong)((ptrdiff_t)(func_ptr)))
+
+#endif //ORG_APACHE_HADOOP_IO_COMPRESS_LZO_LZO_H

Modified: lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c?view=diff&rev=494905&r1=494904&r2=494905
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c (original)
+++ lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c Wed Jan 10 09:46:54 2007
@@ -75,11 +75,11 @@
 
 	// Locate the requisite symbols from libz.so
 	dlerror();                                 // Clear any existing error
-	LOAD_ZLIB_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
-	LOAD_ZLIB_SYMBOL(dlsym_deflate, env, libz, "deflate");
-	LOAD_ZLIB_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
-	LOAD_ZLIB_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
-	LOAD_ZLIB_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflate, env, libz, "deflate");
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
 
 	// Initialize the requisite fieldIds
     ZlibCompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");

Modified: lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c?view=diff&rev=494905&r1=494904&r2=494905
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c (original)
+++ lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c Wed Jan 10 09:46:54 2007
@@ -75,11 +75,11 @@
 
 	// Locate the requisite symbols from libz.so
 	dlerror();                                 // Clear any existing error
-	LOAD_ZLIB_SYMBOL(dlsym_inflateInit2_, env, libz, "inflateInit2_");
-	LOAD_ZLIB_SYMBOL(dlsym_inflate, env, libz, "inflate");
-	LOAD_ZLIB_SYMBOL(dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
-	LOAD_ZLIB_SYMBOL(dlsym_inflateReset, env, libz, "inflateReset");
-	LOAD_ZLIB_SYMBOL(dlsym_inflateEnd, env, libz, "inflateEnd");
+	LOAD_DYNAMIC_SYMBOL(dlsym_inflateInit2_, env, libz, "inflateInit2_");
+	LOAD_DYNAMIC_SYMBOL(dlsym_inflate, env, libz, "inflate");
+	LOAD_DYNAMIC_SYMBOL(dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
+	LOAD_DYNAMIC_SYMBOL(dlsym_inflateReset, env, libz, "inflateReset");
+	LOAD_DYNAMIC_SYMBOL(dlsym_inflateEnd, env, libz, "inflateEnd");
 
 	// Initialize the requisite fieldIds
     ZlibDecompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");

Modified: lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h?view=diff&rev=494905&r1=494904&r2=494905
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h (original)
+++ lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h Wed Jan 10 09:46:54 2007
@@ -55,12 +55,6 @@
 
 #include "org_apache_hadoop.h"
 
-/* A helper macro to dlsym the requisite zlib symbol. */
-#define LOAD_ZLIB_SYMBOL(func_ptr, env, handle, symbol) \
-  if ((func_ptr = do_dlsym(env, handle, symbol)) == NULL) { \
-  	return; \
-  }
-
 /* A helper macro to convert the java 'stream-handle' to a z_stream pointer. */
 #define ZSTREAM(stream) ((z_stream*)((ptrdiff_t)(stream)))
 

Modified: lucene/hadoop/trunk/src/native/src/org_apache_hadoop.h
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org_apache_hadoop.h?view=diff&rev=494905&r1=494904&r2=494905
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org_apache_hadoop.h (original)
+++ lucene/hadoop/trunk/src/native/src/org_apache_hadoop.h Wed Jan 10 09:46:54 2007
@@ -73,6 +73,13 @@
   return func_ptr;
 }
 
+/* A helper macro to dlsym the requisite dynamic symbol and bail-out on error. */
+#define LOAD_DYNAMIC_SYMBOL(func_ptr, env, handle, symbol) \
+  if ((func_ptr = do_dlsym(env, handle, symbol)) == NULL) { \
+    return; \
+  }
+
+
 #endif
 
 //vim: sw=2: ts=2: et

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java?view=diff&rev=494905&r1=494904&r2=494905
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java Wed Jan 10 09:46:54 2007
@@ -28,6 +28,7 @@
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
+import org.apache.hadoop.io.compress.LzoCodec;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.conf.*;
 
@@ -41,8 +42,26 @@
   public TestSequenceFile(String name) { super(name); }
 
   /** Unit tests for SequenceFile. */
-  public void testSequenceFile() throws Exception {
+  public void testZlibSequenceFile() throws Exception {
+    LOG.info("Testing SequenceFile with DefaultCodec");
     compressedSeqFileTest(new DefaultCodec());
+    LOG.info("Successfully tested SequenceFile with DefaultCodec");
+  }
+  
+  public void testLzoSequenceFile() throws Exception {
+    if (LzoCodec.isNativeLzoLoaded()) {
+      LOG.info("Testing SequenceFile with LzoCodec");
+      CompressionCodec lzoCodec = null;
+      try {
+        lzoCodec = (CompressionCodec) ReflectionUtils.newInstance(
+                conf.getClassByName(LzoCodec.class.getName()), conf);
+      } catch (ClassNotFoundException cnfe) {
+        throw new IOException("Cannot find LzoCodec!");
+      }
+
+      compressedSeqFileTest(lzoCodec);
+      LOG.info("Successfully tested SequenceFile with LzoCodec");
+    }
   }
   
   public void compressedSeqFileTest(CompressionCodec codec) throws Exception {
@@ -60,8 +79,6 @@
 
     FileSystem fs = new LocalFileSystem(conf);
     try {
-        //LOG.setLevel(Level.FINE);
-
         // SequenceFile.Writer
         writeTest(fs, count, seed, file, CompressionType.NONE, null);
         readTest(fs, count, seed, file);

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java?view=diff&rev=494905&r1=494904&r2=494905
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java Wed Jan 10 09:46:54 2007
@@ -51,6 +51,12 @@
     codecTest(seed, count, "org.apache.hadoop.io.compress.GzipCodec");
   }
   
+  public void testLzoCodec() throws IOException {
+    if (LzoCodec.isNativeLzoLoaded()) {
+      codecTest(seed, count, "org.apache.hadoop.io.compress.LzoCodec");
+    }
+  }
+  
   private static void codecTest(int seed, int count, String codecClass) 
   throws IOException {