You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/11/14 23:35:25 UTC

svn commit: r475025 [9/9] - in /lucene/hadoop/trunk: ./ bin/ src/java/org/apache/hadoop/io/ src/java/org/apache/hadoop/io/compress/ src/java/org/apache/hadoop/io/compress/zlib/ src/java/org/apache/hadoop/util/ src/native/ src/native/config/ src/native/...

Added: lucene/hadoop/trunk/src/native/configure.ac
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/configure.ac?view=auto&rev=475025
==============================================================================
--- lucene/hadoop/trunk/src/native/configure.ac (added)
+++ lucene/hadoop/trunk/src/native/configure.ac Tue Nov 14 14:35:22 2006
@@ -0,0 +1,105 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# configure.ac for hadoop native code. 
+#
+
+# Notes: 
+# 1. This configure.ac depends on the following environment variables to function correctly:
+#    * HADOOP_NATIVE_SRCDIR 
+#    * JAVA_HOME
+#    * JVM_DATA_MODEL
+#    * OS_NAME
+#    * OS_ARCH 
+#    All these are setup by build.xml. 
+
+#                                               -*- Autoconf -*-
+# Process this file with autoconf to produce a configure script.
+#
+
+AC_PREREQ(2.59)
+AC_INIT(src/org_apache_hadoop.h)
+AC_CONFIG_SRCDIR([src/org_apache_hadoop.h])
+AC_CONFIG_AUX_DIR(config)
+AC_CONFIG_HEADER([config.h])
+
+AM_INIT_AUTOMAKE(hadoop,1.0.0)
+
+# Checks for programs.
+AC_PROG_CC
+AC_PROG_LIBTOOL
+
+# Checks for libraries.
+dnl Check for '-ldl'
+AC_CHECK_LIB([dl], [dlopen])
+
+dnl Check for '-ljvm'
+JNI_LDFLAGS=""
+if test $JAVA_HOME != ""
+then
+  JNI_LDFLAGS="-L$JAVA_HOME/jre/lib/$OS_ARCH/server"
+fi
+ldflags_bak=$LDFLAGS
+LDFLAGS="$LDFLAGS $JNI_LDFLAGS"
+AC_CHECK_LIB([jvm], [JNI_GetCreatedJavaVMs])
+LDFLAGS=$ldflags_bak
+AC_SUBST([JNI_LDFLAGS])
+
+dnl Check for '-lz'
+AC_CHECK_LIB([z], [deflate])
+
+# Checks for header files.
+dnl Check for Ansi C headers
+AC_HEADER_STDC
+
+dnl Check for other standard C headers
+AC_CHECK_HEADERS([stdio.h stddef.h], [], AC_MSG_ERROR(Some system headers not found... please ensure their presence on your platform.))
+
+dnl Check for JNI headers
+JNI_CPPFLAGS=""
+if test $JAVA_HOME != ""
+then
+  for dir in `find $JAVA_HOME/include -follow -type d`
+  do
+    JNI_CPPFLAGS="$JNI_CPPFLAGS -I$dir"
+  done
+fi
+cppflags_bak=$CPPFLAGS
+CPPFLAGS="$CPPFLAGS $JNI_CPPFLAGS"
+AC_CHECK_HEADERS([jni.h], [], AC_MSG_ERROR([Native java headers not found. Is \$JAVA_HOME set correctly?]))
+CPPFLAGS=$cppflags_bak
+AC_SUBST([JNI_CPPFLAGS])
+
+dnl Check for zlib headers
+AC_CHECK_HEADERS([zlib.h zconf.h], AC_COMPUTE_NEEDED_DSO(z,HADOOP_ZLIB_LIBRARY), AC_MSG_ERROR(Zlib headers were not found... native-hadoop library needs zlib to build. Please install the requisite zlib development package.))
+
+# Checks for typedefs, structures, and compiler characteristics.
+AC_C_CONST
+
+# Checks for library functions.
+AC_CHECK_FUNCS([memset])
+
+AC_CONFIG_FILES([Makefile
+                 src/org/apache/hadoop/io/compress/zlib/Makefile
+                 lib/Makefile])
+AC_OUTPUT
+
+#
+#vim: sw=2: ts=2: noet
+#

Added: lucene/hadoop/trunk/src/native/lib/Makefile.am
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/lib/Makefile.am?view=auto&rev=475025
==============================================================================
--- lucene/hadoop/trunk/src/native/lib/Makefile.am (added)
+++ lucene/hadoop/trunk/src/native/lib/Makefile.am Tue Nov 14 14:35:22 2006
@@ -0,0 +1,44 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Makefile template for building libhadoop.so 
+#
+
+#
+# Notes: 
+# 1. This makefile is designed to do the actual builds in $(HADOOP_HOME)/build/native/${os.name}-${os.arch}/lib 
+# 2. This makefile depends on the following environment variables to function correctly:
+#    * HADOOP_NATIVE_SRCDIR 
+#    * JAVA_HOME
+#    * OS_ARCH 
+#    All these are setup by build.xml and/or the top-level makefile.
+#
+
+# Add .lo files in $(SUBDIRS) to construct libhadoop.so
+HADOOP_OBJS = $(foreach path,$(addprefix ../,$(SUBDIRS)),$(wildcard $(path)/*.lo))
+AM_LDFLAGS = @JNI_LDFLAGS@
+
+lib_LTLIBRARIES = libhadoop.la
+libhadoop_la_SOURCES = 
+libhadoop_la_LDFLAGS = -version-info 1:0:0
+libhadoop_la_LIBADD = $(HADOOP_OBJS) -ldl -ljvm
+
+#
+#vim: sw=4: ts=4: noet
+#

Added: lucene/hadoop/trunk/src/native/lib/Makefile.in
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/lib/Makefile.in?view=auto&rev=475025
==============================================================================
--- lucene/hadoop/trunk/src/native/lib/Makefile.in (added)
+++ lucene/hadoop/trunk/src/native/lib/Makefile.in Tue Nov 14 14:35:22 2006
@@ -0,0 +1,422 @@
+# Makefile.in generated by automake 1.9.6 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005  Free Software Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Makefile template for building libhadoop.so 
+#
+
+#
+# Notes: 
+# 1. This makefile is designed to do the actual builds in $(HADOOP_HOME)/build/native/${os.name}-${os.arch}/lib 
+# 2. This makefile depends on the following environment variables to function correctly:
+#    * HADOOP_NATIVE_SRCDIR 
+#    * JAVA_HOME
+#    * OS_ARCH 
+#    All these are setup by build.xml and/or the top-level makefile.
+#
+
+srcdir = @srcdir@
+top_srcdir = @top_srcdir@
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+top_builddir = ..
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+INSTALL = @INSTALL@
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+subdir = lib
+DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/acinclude.m4 \
+	$(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+	$(ACLOCAL_M4)
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = $(top_builddir)/config.h
+CONFIG_CLEAN_FILES =
+am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
+am__vpath_adj = case $$p in \
+    $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
+    *) f=$$p;; \
+  esac;
+am__strip_dir = `echo $$p | sed -e 's|^.*/||'`;
+am__installdirs = "$(DESTDIR)$(libdir)"
+libLTLIBRARIES_INSTALL = $(INSTALL)
+LTLIBRARIES = $(lib_LTLIBRARIES)
+am__DEPENDENCIES_1 = $(foreach path,$(addprefix \
+	../,$(SUBDIRS)),$(wildcard $(path)/*.lo))
+libhadoop_la_DEPENDENCIES = $(am__DEPENDENCIES_1)
+am_libhadoop_la_OBJECTS =
+libhadoop_la_OBJECTS = $(am_libhadoop_la_OBJECTS)
+DEFAULT_INCLUDES = -I. -I$(srcdir) -I$(top_builddir)
+COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+	$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+LTCOMPILE = $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) \
+	$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
+	$(AM_CFLAGS) $(CFLAGS)
+CCLD = $(CC)
+LINK = $(LIBTOOL) --tag=CC --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+	$(AM_LDFLAGS) $(LDFLAGS) -o $@
+SOURCES = $(libhadoop_la_SOURCES)
+DIST_SOURCES = $(libhadoop_la_SOURCES)
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = @ACLOCAL@
+AMDEP_FALSE = @AMDEP_FALSE@
+AMDEP_TRUE = @AMDEP_TRUE@
+AMTAR = @AMTAR@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CXX = @CXX@
+CXXCPP = @CXXCPP@
+CXXDEPMODE = @CXXDEPMODE@
+CXXFLAGS = @CXXFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+ECHO = @ECHO@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+F77 = @F77@
+FFLAGS = @FFLAGS@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JNI_CPPFLAGS = @JNI_CPPFLAGS@
+JNI_LDFLAGS = @JNI_LDFLAGS@
+LDFLAGS = @LDFLAGS@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+MAKEINFO = @MAKEINFO@
+OBJEXT = @OBJEXT@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+RANLIB = @RANLIB@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+ac_ct_AR = @ac_ct_AR@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_CXX = @ac_ct_CXX@
+ac_ct_F77 = @ac_ct_F77@
+ac_ct_RANLIB = @ac_ct_RANLIB@
+ac_ct_STRIP = @ac_ct_STRIP@
+am__fastdepCC_FALSE = @am__fastdepCC_FALSE@
+am__fastdepCC_TRUE = @am__fastdepCC_TRUE@
+am__fastdepCXX_FALSE = @am__fastdepCXX_FALSE@
+am__fastdepCXX_TRUE = @am__fastdepCXX_TRUE@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+datadir = @datadir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+
+# Add .lo files in $(SUBDIRS) to construct libhadoop.so
+HADOOP_OBJS = $(foreach path,$(addprefix ../,$(SUBDIRS)),$(wildcard $(path)/*.lo))
+AM_LDFLAGS = @JNI_LDFLAGS@
+lib_LTLIBRARIES = libhadoop.la
+libhadoop_la_SOURCES = 
+libhadoop_la_LDFLAGS = -version-info 1:0:0
+libhadoop_la_LIBADD = $(HADOOP_OBJS) -ldl -ljvm
+all: all-am
+
+.SUFFIXES:
+$(srcdir)/Makefile.in:  $(srcdir)/Makefile.am  $(am__configure_deps)
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+	      cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \
+		&& exit 0; \
+	      exit 1;; \
+	  esac; \
+	done; \
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu  lib/Makefile'; \
+	cd $(top_srcdir) && \
+	  $(AUTOMAKE) --gnu  lib/Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+	@case '$?' in \
+	  *config.status*) \
+	    cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+	  *) \
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
+	esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure:  $(am__configure_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4):  $(am__aclocal_m4_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+install-libLTLIBRARIES: $(lib_LTLIBRARIES)
+	@$(NORMAL_INSTALL)
+	test -z "$(libdir)" || $(mkdir_p) "$(DESTDIR)$(libdir)"
+	@list='$(lib_LTLIBRARIES)'; for p in $$list; do \
+	  if test -f $$p; then \
+	    f=$(am__strip_dir) \
+	    echo " $(LIBTOOL) --mode=install $(libLTLIBRARIES_INSTALL) $(INSTALL_STRIP_FLAG) '$$p' '$(DESTDIR)$(libdir)/$$f'"; \
+	    $(LIBTOOL) --mode=install $(libLTLIBRARIES_INSTALL) $(INSTALL_STRIP_FLAG) "$$p" "$(DESTDIR)$(libdir)/$$f"; \
+	  else :; fi; \
+	done
+
+uninstall-libLTLIBRARIES:
+	@$(NORMAL_UNINSTALL)
+	@set -x; list='$(lib_LTLIBRARIES)'; for p in $$list; do \
+	  p=$(am__strip_dir) \
+	  echo " $(LIBTOOL) --mode=uninstall rm -f '$(DESTDIR)$(libdir)/$$p'"; \
+	  $(LIBTOOL) --mode=uninstall rm -f "$(DESTDIR)$(libdir)/$$p"; \
+	done
+
+clean-libLTLIBRARIES:
+	-test -z "$(lib_LTLIBRARIES)" || rm -f $(lib_LTLIBRARIES)
+	@list='$(lib_LTLIBRARIES)'; for p in $$list; do \
+	  dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \
+	  test "$$dir" != "$$p" || dir=.; \
+	  echo "rm -f \"$${dir}/so_locations\""; \
+	  rm -f "$${dir}/so_locations"; \
+	done
+libhadoop.la: $(libhadoop_la_OBJECTS) $(libhadoop_la_DEPENDENCIES) 
+	$(LINK) -rpath $(libdir) $(libhadoop_la_LDFLAGS) $(libhadoop_la_OBJECTS) $(libhadoop_la_LIBADD) $(LIBS)
+
+mostlyclean-compile:
+	-rm -f *.$(OBJEXT)
+
+distclean-compile:
+	-rm -f *.tab.c
+
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-libtool:
+	-rm -rf .libs _libs
+
+distclean-libtool:
+	-rm -f libtool
+uninstall-info-am:
+tags: TAGS
+TAGS:
+
+ctags: CTAGS
+CTAGS:
+
+
+distdir: $(DISTFILES)
+	@srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's|.|.|g'`; \
+	list='$(DISTFILES)'; for file in $$list; do \
+	  case $$file in \
+	    $(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \
+	    $(top_srcdir)/*) file=`echo "$$file" | sed "s|^$$topsrcdirstrip/|$(top_builddir)/|"`;; \
+	  esac; \
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+	  dir=`echo "$$file" | sed -e 's,/[^/]*$$,,'`; \
+	  if test "$$dir" != "$$file" && test "$$dir" != "."; then \
+	    dir="/$$dir"; \
+	    $(mkdir_p) "$(distdir)$$dir"; \
+	  else \
+	    dir=''; \
+	  fi; \
+	  if test -d $$d/$$file; then \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \
+	    fi; \
+	    cp -pR $$d/$$file $(distdir)$$dir || exit 1; \
+	  else \
+	    test -f $(distdir)/$$file \
+	    || cp -p $$d/$$file $(distdir)/$$file \
+	    || exit 1; \
+	  fi; \
+	done
+check-am: all-am
+check: check-am
+all-am: Makefile $(LTLIBRARIES)
+installdirs:
+	for dir in "$(DESTDIR)$(libdir)"; do \
+	  test -z "$$dir" || $(mkdir_p) "$$dir"; \
+	done
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+	$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	  install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	  `test -z '$(STRIP)' || \
+	    echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+
+maintainer-clean-generic:
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-generic clean-libLTLIBRARIES clean-libtool \
+	mostlyclean-am
+
+distclean: distclean-am
+	-rm -f Makefile
+distclean-am: clean-am distclean-compile distclean-generic \
+	distclean-libtool
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+info: info-am
+
+info-am:
+
+install-data-am:
+
+install-exec-am: install-libLTLIBRARIES
+
+install-info: install-info-am
+
+install-man:
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+	-rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-compile mostlyclean-generic \
+	mostlyclean-libtool
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am: uninstall-info-am uninstall-libLTLIBRARIES
+
+.PHONY: all all-am check check-am clean clean-generic \
+	clean-libLTLIBRARIES clean-libtool distclean distclean-compile \
+	distclean-generic distclean-libtool distdir dvi dvi-am html \
+	html-am info info-am install install-am install-data \
+	install-data-am install-exec install-exec-am install-info \
+	install-info-am install-libLTLIBRARIES install-man \
+	install-strip installcheck installcheck-am installdirs \
+	maintainer-clean maintainer-clean-generic mostlyclean \
+	mostlyclean-compile mostlyclean-generic mostlyclean-libtool \
+	pdf pdf-am ps ps-am uninstall uninstall-am uninstall-info-am \
+	uninstall-libLTLIBRARIES
+
+
+#
+#vim: sw=4: ts=4: noet
+#
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:

Added: lucene/hadoop/trunk/src/native/packageNativeHadoop.sh
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/packageNativeHadoop.sh?view=auto&rev=475025
==============================================================================
--- lucene/hadoop/trunk/src/native/packageNativeHadoop.sh (added)
+++ lucene/hadoop/trunk/src/native/packageNativeHadoop.sh Tue Nov 14 14:35:22 2006
@@ -0,0 +1,44 @@
+#!/bin/sh
+
+#
+# packageNativeHadoop.sh - A simple script to help package native-hadoop libraries
+#
+
+#
+# Note: 
+# This script relies on the following environment variables to function correctly:
+#  * BASE_NATIVE_LIB_DIR
+#  * BUILD_NATIVE_DIR
+#  * DIST_LIB_DIR
+# All these are setup by build.xml.
+#
+
+TAR='tar -c'
+UNTAR='tar -x'
+
+# Copy the pre-built libraries in $BASE_NATIVE_LIB_DIR 
+for platform in `ls $BASE_NATIVE_LIB_DIR`
+do
+  if test ! -e $DIST_LIB_DIR/$platform
+  then
+    mkdir -p $DIST_LIB_DIR/$platform
+    echo "Created $DIST_LIB_DIR/$platform"
+  fi
+  echo "Copying libraries in $BASE_NATIVE_LIB_DIR/$platform to $DIST_LIB_DIR/$platform/"
+  cd $BASE_NATIVE_LIB_DIR/$platform/
+  $TAR *hadoop* | $UNTAR -C $DIST_LIB_DIR/$platform/
+done  
+
+# Copy the custom-built libraries in $BUILD_DIR
+for platform in `ls $BUILD_NATIVE_DIR`
+do
+  if test ! -e $DIST_LIB_DIR/$platform
+  then
+    mkdir -p $DIST_LIB_DIR/$platform
+    echo "Created $DIST_LIB_DIR/$platform"
+  fi
+  echo "Copying libraries in $BUILD_NATIVE_DIR/$platform/lib to $DIST_LIB_DIR/$platform/"
+  cd $BUILD_NATIVE_DIR/$platform/lib
+  $TAR *hadoop* | $UNTAR -C $DIST_LIB_DIR/$platform/
+done  
+

Added: lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am?view=auto&rev=475025
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am (added)
+++ lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am Tue Nov 14 14:35:22 2006
@@ -0,0 +1,50 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Makefile template for building native 'zlib' for hadoop.
+#
+
+#
+# Notes: 
+# 1. This makefile is designed to do the actual builds in $(HADOOP_HOME)/build/native/${os.name}-${os.arch}/$(subdir) .
+# 2. This makefile depends on the following environment variables to function correctly:
+#    * HADOOP_NATIVE_SRCDIR 
+#    * JAVA_HOME
+#    * JVM_DATA_MODEL
+#    * OS_ARCH 
+#    * PLATFORM
+#    All these are setup by build.xml and/or the top-level makefile.
+# 3. The creation of requisite jni headers/stubs are also done by build.xml and they are
+#    assumed to be in $(HADOOP_HOME)/build/native/src/org/apache/hadoop/io/compress/zlib.
+#
+
+# The 'vpath directive' to locate the actual source files 
+vpath %.c $(HADOOP_NATIVE_SRCDIR)/$(subdir)
+
+AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src
+AM_LDFLAGS = @JNI_LDFLAGS@
+AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL)
+
+noinst_LTLIBRARIES = libnativezlib.la
+libnativezlib_la_SOURCES = ZlibCompressor.c ZlibDecompressor.c
+libnativezlib_la_LIBADD = -ldl -ljvm
+
+#
+#vim: sw=4: ts=4: noet
+#

Added: lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/Makefile.in
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/Makefile.in?view=auto&rev=475025
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/Makefile.in (added)
+++ lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/Makefile.in Tue Nov 14 14:35:22 2006
@@ -0,0 +1,469 @@
+# Makefile.in generated by automake 1.9.6 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005  Free Software Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Makefile template for building native 'zlib' for hadoop.
+#
+
+#
+# Notes: 
+# 1. This makefile is designed to do the actual builds in $(HADOOP_HOME)/build/native/${os.name}-${os.arch}/$(subdir) .
+# 2. This makefile depends on the following environment variables to function correctly:
+#    * HADOOP_NATIVE_SRCDIR 
+#    * JAVA_HOME
+#    * JVM_DATA_MODEL
+#    * OS_ARCH 
+#    * PLATFORM
+#    All these are setup by build.xml and/or the top-level makefile.
+# 3. The creation of requisite jni headers/stubs are also done by build.xml and they are
+#    assumed to be in $(HADOOP_HOME)/build/native/src/org/apache/hadoop/io/compress/zlib.
+#
+
+srcdir = @srcdir@
+top_srcdir = @top_srcdir@
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+top_builddir = ../../../../../../..
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+INSTALL = @INSTALL@
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+subdir = src/org/apache/hadoop/io/compress/zlib
+DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/acinclude.m4 \
+	$(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+	$(ACLOCAL_M4)
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = $(top_builddir)/config.h
+CONFIG_CLEAN_FILES =
+LTLIBRARIES = $(noinst_LTLIBRARIES)
+libnativezlib_la_DEPENDENCIES =
+am_libnativezlib_la_OBJECTS = ZlibCompressor.lo ZlibDecompressor.lo
+libnativezlib_la_OBJECTS = $(am_libnativezlib_la_OBJECTS)
+DEFAULT_INCLUDES = -I. -I$(srcdir) -I$(top_builddir)
+depcomp = $(SHELL) $(top_srcdir)/config/depcomp
+am__depfiles_maybe = depfiles
+COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+	$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+LTCOMPILE = $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) \
+	$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
+	$(AM_CFLAGS) $(CFLAGS)
+CCLD = $(CC)
+LINK = $(LIBTOOL) --tag=CC --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+	$(AM_LDFLAGS) $(LDFLAGS) -o $@
+SOURCES = $(libnativezlib_la_SOURCES)
+DIST_SOURCES = $(libnativezlib_la_SOURCES)
+ETAGS = etags
+CTAGS = ctags
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = @ACLOCAL@
+AMDEP_FALSE = @AMDEP_FALSE@
+AMDEP_TRUE = @AMDEP_TRUE@
+AMTAR = @AMTAR@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CXX = @CXX@
+CXXCPP = @CXXCPP@
+CXXDEPMODE = @CXXDEPMODE@
+CXXFLAGS = @CXXFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+ECHO = @ECHO@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+F77 = @F77@
+FFLAGS = @FFLAGS@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JNI_CPPFLAGS = @JNI_CPPFLAGS@
+JNI_LDFLAGS = @JNI_LDFLAGS@
+LDFLAGS = @LDFLAGS@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+MAKEINFO = @MAKEINFO@
+OBJEXT = @OBJEXT@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+RANLIB = @RANLIB@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+ac_ct_AR = @ac_ct_AR@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_CXX = @ac_ct_CXX@
+ac_ct_F77 = @ac_ct_F77@
+ac_ct_RANLIB = @ac_ct_RANLIB@
+ac_ct_STRIP = @ac_ct_STRIP@
+am__fastdepCC_FALSE = @am__fastdepCC_FALSE@
+am__fastdepCC_TRUE = @am__fastdepCC_TRUE@
+am__fastdepCXX_FALSE = @am__fastdepCXX_FALSE@
+am__fastdepCXX_TRUE = @am__fastdepCXX_TRUE@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+datadir = @datadir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src
+AM_LDFLAGS = @JNI_LDFLAGS@
+AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL)
+noinst_LTLIBRARIES = libnativezlib.la
+libnativezlib_la_SOURCES = ZlibCompressor.c ZlibDecompressor.c
+libnativezlib_la_LIBADD = -ldl -ljvm
+all: all-am
+
+.SUFFIXES:
+.SUFFIXES: .c .lo .o .obj
+$(srcdir)/Makefile.in:  $(srcdir)/Makefile.am  $(am__configure_deps)
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+	      cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \
+		&& exit 0; \
+	      exit 1;; \
+	  esac; \
+	done; \
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu  src/org/apache/hadoop/io/compress/zlib/Makefile'; \
+	cd $(top_srcdir) && \
+	  $(AUTOMAKE) --gnu  src/org/apache/hadoop/io/compress/zlib/Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+	@case '$?' in \
+	  *config.status*) \
+	    cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+	  *) \
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
+	esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure:  $(am__configure_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4):  $(am__aclocal_m4_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+clean-noinstLTLIBRARIES:
+	-test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES)
+	@list='$(noinst_LTLIBRARIES)'; for p in $$list; do \
+	  dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \
+	  test "$$dir" != "$$p" || dir=.; \
+	  echo "rm -f \"$${dir}/so_locations\""; \
+	  rm -f "$${dir}/so_locations"; \
+	done
+libnativezlib.la: $(libnativezlib_la_OBJECTS) $(libnativezlib_la_DEPENDENCIES) 
+	$(LINK)  $(libnativezlib_la_LDFLAGS) $(libnativezlib_la_OBJECTS) $(libnativezlib_la_LIBADD) $(LIBS)
+
+mostlyclean-compile:
+	-rm -f *.$(OBJEXT)
+
+distclean-compile:
+	-rm -f *.tab.c
+
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibCompressor.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibDecompressor.Plo@am__quote@
+
+.c.o:
+@am__fastdepCC_TRUE@	if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ $<; \
+@am__fastdepCC_TRUE@	then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(COMPILE) -c $<
+
+.c.obj:
+@am__fastdepCC_TRUE@	if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ `$(CYGPATH_W) '$<'`; \
+@am__fastdepCC_TRUE@	then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(COMPILE) -c `$(CYGPATH_W) '$<'`
+
+.c.lo:
+@am__fastdepCC_TRUE@	if $(LTCOMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ $<; \
+@am__fastdepCC_TRUE@	then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Plo"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LTCOMPILE) -c -o $@ $<
+
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-libtool:
+	-rm -rf .libs _libs
+
+distclean-libtool:
+	-rm -f libtool
+uninstall-info-am:
+
+ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
+	list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	mkid -fID $$unique
+tags: TAGS
+
+TAGS:  $(HEADERS) $(SOURCES)  $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	list='$(SOURCES) $(HEADERS)  $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \
+	  test -n "$$unique" || unique=$$empty_fix; \
+	  $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	    $$tags $$unique; \
+	fi
+ctags: CTAGS
+CTAGS:  $(HEADERS) $(SOURCES)  $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	list='$(SOURCES) $(HEADERS)  $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	test -z "$(CTAGS_ARGS)$$tags$$unique" \
+	  || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+	     $$tags $$unique
+
+GTAGS:
+	here=`$(am__cd) $(top_builddir) && pwd` \
+	  && cd $(top_srcdir) \
+	  && gtags -i $(GTAGS_ARGS) $$here
+
+distclean-tags:
+	-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(DISTFILES)
+	@srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's|.|.|g'`; \
+	list='$(DISTFILES)'; for file in $$list; do \
+	  case $$file in \
+	    $(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \
+	    $(top_srcdir)/*) file=`echo "$$file" | sed "s|^$$topsrcdirstrip/|$(top_builddir)/|"`;; \
+	  esac; \
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+	  dir=`echo "$$file" | sed -e 's,/[^/]*$$,,'`; \
+	  if test "$$dir" != "$$file" && test "$$dir" != "."; then \
+	    dir="/$$dir"; \
+	    $(mkdir_p) "$(distdir)$$dir"; \
+	  else \
+	    dir=''; \
+	  fi; \
+	  if test -d $$d/$$file; then \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \
+	    fi; \
+	    cp -pR $$d/$$file $(distdir)$$dir || exit 1; \
+	  else \
+	    test -f $(distdir)/$$file \
+	    || cp -p $$d/$$file $(distdir)/$$file \
+	    || exit 1; \
+	  fi; \
+	done
+check-am: all-am
+check: check-am
+all-am: Makefile $(LTLIBRARIES)
+installdirs:
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+	$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	  install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	  `test -z '$(STRIP)' || \
+	    echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+
+maintainer-clean-generic:
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-generic clean-libtool clean-noinstLTLIBRARIES \
+	mostlyclean-am
+
+distclean: distclean-am
+	-rm -rf ./$(DEPDIR)
+	-rm -f Makefile
+distclean-am: clean-am distclean-compile distclean-generic \
+	distclean-libtool distclean-tags
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+info: info-am
+
+info-am:
+
+install-data-am:
+
+install-exec-am:
+
+install-info: install-info-am
+
+install-man:
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+	-rm -rf ./$(DEPDIR)
+	-rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-compile mostlyclean-generic \
+	mostlyclean-libtool
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am: uninstall-info-am
+
+.PHONY: CTAGS GTAGS all all-am check check-am clean clean-generic \
+	clean-libtool clean-noinstLTLIBRARIES ctags distclean \
+	distclean-compile distclean-generic distclean-libtool \
+	distclean-tags distdir dvi dvi-am html html-am info info-am \
+	install install-am install-data install-data-am install-exec \
+	install-exec-am install-info install-info-am install-man \
+	install-strip installcheck installcheck-am installdirs \
+	maintainer-clean maintainer-clean-generic mostlyclean \
+	mostlyclean-compile mostlyclean-generic mostlyclean-libtool \
+	pdf pdf-am ps ps-am tags uninstall uninstall-am \
+	uninstall-info-am
+
+
+# The 'vpath directive' to locate the actual source files 
+vpath %.c $(HADOOP_NATIVE_SRCDIR)/$(subdir)
+
+#
+#vim: sw=4: ts=4: noet
+#
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:

Added: lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c?view=auto&rev=475025
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c (added)
+++ lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c Tue Nov 14 14:35:22 2006
@@ -0,0 +1,290 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#if defined HAVE_CONFIG_H
+  #include <config.h>
+#endif
+
+#if defined HAVE_STDIO_H
+  #include <stdio.h>
+#else
+  #error 'stdio.h not found'
+#endif  
+
+#if defined HAVE_STDLIB_H
+  #include <stdlib.h>
+#else
+  #error 'stdlib.h not found'
+#endif  
+
+#if defined HAVE_STRING_H
+  #include <string.h>
+#else
+  #error 'string.h not found'
+#endif  
+
+#if defined HAVE_DLFCN_H
+  #include <dlfcn.h>
+#else
+  #error 'dlfcn.h not found'
+#endif  
+
+#include "org_apache_hadoop_io_compress_zlib.h"
+#include "org_apache_hadoop_io_compress_zlib_ZlibCompressor.h"
+
+static jfieldID ZlibCompressor_stream;
+static jfieldID ZlibCompressor_uncompressedDirectBuf;
+static jfieldID ZlibCompressor_uncompressedDirectBufOff;
+static jfieldID ZlibCompressor_uncompressedDirectBufLen;
+static jfieldID ZlibCompressor_compressedDirectBuf;
+static jfieldID ZlibCompressor_directBufferSize;
+static jfieldID ZlibCompressor_finish;
+static jfieldID ZlibCompressor_finished;
+
+static int (*dlsym_deflateInit2_)(z_streamp, int, int, int, int, int, const char *, int);
+static int (*dlsym_deflate)(z_streamp, int);
+static int (*dlsym_deflateSetDictionary)(z_streamp, const Bytef *, uInt);
+static int (*dlsym_deflateReset)(z_streamp);
+static int (*dlsym_deflateEnd)(z_streamp);
+
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_initIDs(
+	JNIEnv *env, jclass class
+	) {
+	// Load libz.so
+	void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
+	if (!libz) {
+		THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
+	  	return;
+	}
+
+	// Locate the requisite symbols from libz.so
+	dlerror();                                 // Clear any existing error
+	LOAD_ZLIB_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
+	LOAD_ZLIB_SYMBOL(dlsym_deflate, env, libz, "deflate");
+	LOAD_ZLIB_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
+	LOAD_ZLIB_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
+	LOAD_ZLIB_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
+
+	// Initialize the requisite fieldIds
+    ZlibCompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");
+    ZlibCompressor_finish = (*env)->GetFieldID(env, class, "finish", "Z");
+    ZlibCompressor_finished = (*env)->GetFieldID(env, class, "finished", "Z");
+    ZlibCompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class, 
+    									"uncompressedDirectBuf", 
+    									"Ljava/nio/Buffer;");
+    ZlibCompressor_uncompressedDirectBufOff = (*env)->GetFieldID(env, class, 
+    										"uncompressedDirectBufOff", "I");
+    ZlibCompressor_uncompressedDirectBufLen = (*env)->GetFieldID(env, class, 
+    										"uncompressedDirectBufLen", "I");
+    ZlibCompressor_compressedDirectBuf = (*env)->GetFieldID(env, class, 
+    									"compressedDirectBuf", 
+    									"Ljava/nio/Buffer;");
+    ZlibCompressor_directBufferSize = (*env)->GetFieldID(env, class, 
+    										"directBufferSize", "I");
+}
+
+JNIEXPORT jlong JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_init(
+	JNIEnv *env, jclass class, jint level, jint strategy, jint windowBits
+	) {
+	// Create a z_stream
+    z_stream *stream = malloc(sizeof(z_stream));
+    if (!stream) {
+		THROW(env, "java/lang/OutOfMemoryError", NULL);
+		return (jlong)0;
+    }
+    memset((void*)stream, 0, sizeof(z_stream));
+
+	// Initialize stream
+	static const int memLevel = 8; 							// See zconf.h
+    int rv = (*dlsym_deflateInit2_)(stream, level, Z_DEFLATED, windowBits,
+    			memLevel, strategy, ZLIB_VERSION, sizeof(z_stream));
+    			
+    if (rv != Z_OK) {
+	    // Contingency - Report error by throwing appropriate exceptions
+	    free(stream);
+	    stream = NULL;
+	
+		switch (rv) {
+			case Z_MEM_ERROR: 
+			    {
+		    		THROW(env, "java/lang/OutOfMemoryError", NULL);
+			    }
+			break;
+			case Z_STREAM_ERROR:
+		    	{
+		    		THROW(env, "java/lang/IllegalArgumentException", NULL);
+		    	}
+		    break;
+			default:
+		    	{
+		    		THROW(env, "java/lang/InternalError", NULL);
+		    	}
+		    break;
+	    }
+	}
+	
+    return JLONG(stream);
+}
+
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_compress_ZlibCompressor_setDictionary(
+	JNIEnv *env, jclass class, jlong stream, 
+	jarray b, jint off, jint len
+	) {
+    Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
+    if (!buf) {
+        return;
+    }
+    int rv = dlsym_deflateSetDictionary(ZSTREAM(stream), buf + off, len);
+    (*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
+    
+    if (rv != Z_OK) {
+    	// Contingency - Report error by throwing appropriate exceptions
+	    switch (rv) {
+		    case Z_STREAM_ERROR:
+			{	
+		    	THROW(env, "java/lang/IllegalArgumentException", NULL);
+			}
+			break;
+	    	default:
+			{
+				THROW(env, "java/lang/InternalError", (ZSTREAM(stream))->msg);
+			}
+			break;
+	    }
+    }
+}
+
+JNIEXPORT jint JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_deflateBytesDirect(
+	JNIEnv *env, jobject this
+	) {
+	// Get members of ZlibCompressor
+    z_stream *stream = ZSTREAM(
+    						(*env)->GetLongField(env, this, 
+    									ZlibCompressor_stream)
+    					);
+    if (!stream) {
+		THROW(env, "java/lang/NullPointerException", NULL);
+		return (jint)0;
+    } 
+
+	jobject uncompressed_direct_buf = (*env)->GetObjectField(env, this, 
+									ZlibCompressor_uncompressedDirectBuf);
+	jint uncompressed_direct_buf_off = (*env)->GetIntField(env, this, 
+									ZlibCompressor_uncompressedDirectBufOff);
+	jint uncompressed_direct_buf_len = (*env)->GetIntField(env, this, 
+									ZlibCompressor_uncompressedDirectBufLen);
+
+	jobject compressed_direct_buf = (*env)->GetObjectField(env, this, 
+									ZlibCompressor_compressedDirectBuf);
+	jint compressed_direct_buf_len = (*env)->GetIntField(env, this, 
+									ZlibCompressor_directBufferSize);
+
+	jboolean finish = (*env)->GetBooleanField(env, this, ZlibCompressor_finish);
+
+	Bytef* uncompressed_bytes = (*env)->GetDirectBufferAddress(env, 
+											uncompressed_direct_buf);
+  	if (uncompressed_bytes == 0) {
+    	return (jint)0;
+	}
+	
+	Bytef* compressed_bytes = (*env)->GetDirectBufferAddress(env, 
+										compressed_direct_buf);
+  	if (compressed_bytes == 0) {
+		return (jint)0;
+	}
+	
+	// Re-calibrate the z_stream
+  	stream->next_in = uncompressed_bytes + uncompressed_direct_buf_off;
+  	stream->next_out = compressed_bytes;
+  	stream->avail_in = uncompressed_direct_buf_len;
+	stream->avail_out = compressed_direct_buf_len;
+	
+	// Compress
+	int rv = dlsym_deflate(stream, finish ? Z_FINISH : Z_NO_FLUSH);
+
+	jint no_compressed_bytes = 0;
+	switch (rv) {
+    	// Contingency? - Report error by throwing appropriate exceptions
+  		case Z_STREAM_END:
+  		{
+  			(*env)->SetBooleanField(env, this, ZlibCompressor_finished, JNI_TRUE);
+  		} // cascade
+	  	case Z_OK: 
+	  	{
+	  		uncompressed_direct_buf_off += uncompressed_direct_buf_len - stream->avail_in;
+			(*env)->SetIntField(env, this, 
+						ZlibCompressor_uncompressedDirectBufOff, uncompressed_direct_buf_off);
+			(*env)->SetIntField(env, this, 
+						ZlibCompressor_uncompressedDirectBufLen, stream->avail_in);
+			no_compressed_bytes = compressed_direct_buf_len - stream->avail_out;
+	  	}
+	  	break;
+  		case Z_BUF_ERROR:
+  		break;
+  		default:
+		{
+			THROW(env, "java/lang/InternalError", stream->msg);
+		}
+		break;
+  	}
+  	
+  	return no_compressed_bytes;
+}
+
+JNIEXPORT jlong JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_getBytesRead(
+	JNIEnv *env, jclass class, jlong stream
+	) {
+    return (ZSTREAM(stream))->total_in;
+}
+
+JNIEXPORT jlong JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_getBytesWritten(
+	JNIEnv *env, jclass class, jlong stream
+	) {
+    return (ZSTREAM(stream))->total_out;
+}
+
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_reset(
+	JNIEnv *env, jclass class, jlong stream
+	) {
+    if (dlsym_deflateReset(ZSTREAM(stream)) != Z_OK) {
+		THROW(env, "java/lang/InternalError", NULL);
+    }
+}
+
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_end(
+	JNIEnv *env, jclass class, jlong stream
+	) {
+    if (dlsym_deflateEnd(ZSTREAM(stream)) == Z_STREAM_ERROR) {
+		THROW(env, "java/lang/InternalError", NULL);
+    } else {
+		free(ZSTREAM(stream));
+    }
+}
+
+/**
+ * vim: sw=2: ts=2: et:
+ */
+

Added: lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c?view=auto&rev=475025
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c (added)
+++ lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c Tue Nov 14 14:35:22 2006
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#if defined HAVE_CONFIG_H
+  #include <config.h>
+#endif
+
+#if defined HAVE_STDIO_H
+  #include <stdio.h>
+#else
+  #error 'stdio.h not found'
+#endif  
+
+#if defined HAVE_STDLIB_H
+  #include <stdlib.h>
+#else
+  #error 'stdlib.h not found'
+#endif  
+
+#if defined HAVE_STRING_H
+  #include <string.h>
+#else
+  #error 'string.h not found'
+#endif  
+
+#if defined HAVE_DLFCN_H
+  #include <dlfcn.h>
+#else
+  #error 'dlfcn.h not found'
+#endif  
+
+#include "org_apache_hadoop_io_compress_zlib.h"
+#include "org_apache_hadoop_io_compress_zlib_ZlibDecompressor.h"
+
+static jfieldID ZlibDecompressor_stream;
+static jfieldID ZlibDecompressor_compressedDirectBuf;
+static jfieldID ZlibDecompressor_compressedDirectBufOff;
+static jfieldID ZlibDecompressor_compressedDirectBufLen;
+static jfieldID ZlibDecompressor_uncompressedDirectBuf;
+static jfieldID ZlibDecompressor_directBufferSize;
+static jfieldID ZlibDecompressor_needDict;
+static jfieldID ZlibDecompressor_finished;
+
+static int (*dlsym_inflateInit2_)(z_streamp, int, const char *, int);
+static int (*dlsym_inflate)(z_streamp, int);
+static int (*dlsym_inflateSetDictionary)(z_streamp, const Bytef *, uInt);
+static int (*dlsym_inflateReset)(z_streamp);
+static int (*dlsym_inflateEnd)(z_streamp);
+
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_initIDs(
+	JNIEnv *env, jclass class
+	) {
+	// Load libz.so
+    void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
+	if (!libz) {
+	  THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
+	  return;
+	} 
+
+	// Locate the requisite symbols from libz.so
+	dlerror();                                 // Clear any existing error
+	LOAD_ZLIB_SYMBOL(dlsym_inflateInit2_, env, libz, "inflateInit2_");
+	LOAD_ZLIB_SYMBOL(dlsym_inflate, env, libz, "inflate");
+	LOAD_ZLIB_SYMBOL(dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
+	LOAD_ZLIB_SYMBOL(dlsym_inflateReset, env, libz, "inflateReset");
+	LOAD_ZLIB_SYMBOL(dlsym_inflateEnd, env, libz, "inflateEnd");
+
+	// Initialize the requisite fieldIds
+    ZlibDecompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");
+    ZlibDecompressor_needDict = (*env)->GetFieldID(env, class, "needDict", "Z");
+    ZlibDecompressor_finished = (*env)->GetFieldID(env, class, "finished", "Z");
+    ZlibDecompressor_compressedDirectBuf = (*env)->GetFieldID(env, class, 
+    											"compressedDirectBuf", 
+    											"Ljava/nio/Buffer;");
+    ZlibDecompressor_compressedDirectBufOff = (*env)->GetFieldID(env, class, 
+    										"compressedDirectBufOff", "I");
+    ZlibDecompressor_compressedDirectBufLen = (*env)->GetFieldID(env, class, 
+    										"compressedDirectBufLen", "I");
+    ZlibDecompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class, 
+    											"uncompressedDirectBuf", 
+    											"Ljava/nio/Buffer;");
+    ZlibDecompressor_directBufferSize = (*env)->GetFieldID(env, class, 
+    											"directBufferSize", "I");
+}
+
+JNIEXPORT jlong JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_init(
+	JNIEnv *env, jclass cls, jint windowBits
+	) {
+    z_stream *stream = malloc(sizeof(z_stream));
+    memset((void*)stream, 0, sizeof(z_stream));
+
+    if (stream == 0) {
+		THROW(env, "java/lang/OutOfMemoryError", NULL);
+		return (jlong)0;
+    } 
+    
+    int rv = dlsym_inflateInit2_(stream, windowBits, ZLIB_VERSION, sizeof(z_stream));
+
+	if (rv != Z_OK) {
+	    // Contingency - Report error by throwing appropriate exceptions
+		free(stream);
+		stream = NULL;
+		
+		switch (rv) {
+		 	case Z_MEM_ERROR:
+		 	{
+		    	THROW(env, "java/lang/OutOfMemoryError", NULL);
+		 	}
+		 	break;
+	  		default:
+	  		{
+			    THROW(env, "java/lang/InternalError", NULL);
+	  		}
+	  		break;
+		}
+	}
+	
+	return JLONG(stream);
+}
+
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_setDictionary(
+	JNIEnv *env, jclass cls, jlong stream,
+	jarray b, jint off, jint len
+	) {
+    Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
+    if (!buf) {
+		THROW(env, "java/lang/InternalError", NULL);
+        return;
+    }
+    int rv = dlsym_inflateSetDictionary(ZSTREAM(stream), buf + off, len);
+    (*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
+    
+    if (rv != Z_OK) {
+	    // Contingency - Report error by throwing appropriate exceptions
+		switch (rv) {
+		    case Z_STREAM_ERROR:
+	    	case Z_DATA_ERROR:
+			{
+				THROW(env, "java/lang/IllegalArgumentException", 
+					(ZSTREAM(stream))->msg);
+			}
+			break;
+	    	default:
+	    	{
+				THROW(env, "java/lang/InternalError", (ZSTREAM(stream))->msg);
+	    	}
+			break;
+		}
+	}
+}
+
+JNIEXPORT jint JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_inflateBytesDirect(
+	JNIEnv *env, jobject this
+	) {
+	// Get members of ZlibDecompressor
+    z_stream *stream = ZSTREAM(
+    						(*env)->GetLongField(env, this, 
+    									ZlibDecompressor_stream)
+    					);
+    if (!stream) {
+		THROW(env, "java/lang/NullPointerException", NULL);
+		return (jint)0;
+    } 
+
+	jarray compressed_direct_buf = (jarray)(*env)->GetObjectField(env, this, 
+											ZlibDecompressor_compressedDirectBuf);
+	jint compressed_direct_buf_off = (*env)->GetIntField(env, this, 
+									ZlibDecompressor_compressedDirectBufOff);
+	jint compressed_direct_buf_len = (*env)->GetIntField(env, this, 
+									ZlibDecompressor_compressedDirectBufLen);
+
+	jarray uncompressed_direct_buf = (jarray)(*env)->GetObjectField(env, this, 
+											ZlibDecompressor_uncompressedDirectBuf);
+	jint uncompressed_direct_buf_len = (*env)->GetIntField(env, this, 
+										ZlibDecompressor_directBufferSize);
+
+	Bytef *compressed_bytes = (*env)->GetDirectBufferAddress(env, 
+										compressed_direct_buf);
+	if (!compressed_bytes) {
+	    return (jint)0;
+	}
+	
+	Bytef *uncompressed_bytes = (*env)->GetDirectBufferAddress(env, 
+											uncompressed_direct_buf);
+	if (!uncompressed_bytes) {
+	    return (jint)0;
+	}
+	
+	// Re-calibrate the z_stream
+	stream->next_in  = compressed_bytes + compressed_direct_buf_off;
+	stream->next_out = uncompressed_bytes;
+	stream->avail_in  = compressed_direct_buf_len;
+	stream->avail_out = uncompressed_direct_buf_len;
+	
+	// Decompress
+	int rv = dlsym_inflate(stream, Z_PARTIAL_FLUSH);
+
+	// Contingency? - Report error by throwing appropriate exceptions
+	int no_decompressed_bytes = 0;	
+	switch (rv) {
+		case Z_STREAM_END:
+		{
+		    (*env)->SetBooleanField(env, this, ZlibDecompressor_finished, JNI_TRUE);
+		} // cascade down
+		case Z_OK:
+		{
+		    compressed_direct_buf_off += compressed_direct_buf_len - stream->avail_in;
+		    (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufOff, 
+		    			compressed_direct_buf_off);
+		    (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufLen, 
+		    			stream->avail_in);
+		    no_decompressed_bytes = uncompressed_direct_buf_len - stream->avail_out;
+		}
+		break;
+		case Z_NEED_DICT:
+		{
+		    (*env)->SetBooleanField(env, this, ZlibDecompressor_needDict, JNI_TRUE);
+		    compressed_direct_buf_off += compressed_direct_buf_len - stream->avail_in;
+		    (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufOff, 
+		    			compressed_direct_buf_off);
+		    (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufLen, 
+		    			stream->avail_in);
+		}
+		break;
+		case Z_BUF_ERROR:
+		break;
+		case Z_DATA_ERROR:
+		{
+		    THROW(env, "java/io/IOException", stream->msg);
+		}
+		break;
+		case Z_MEM_ERROR:
+		{
+		    THROW(env, "java/lang/OutOfMemoryError", NULL);
+		}
+		break;
+		default:
+		{
+		    THROW(env, "java/lang/InternalError", stream->msg);
+		}
+		break;
+    }
+    
+    return no_decompressed_bytes;
+}
+
+JNIEXPORT jlong JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_getBytesRead(
+	JNIEnv *env, jclass cls, jlong stream
+	) {
+    return (ZSTREAM(stream))->total_in;
+}
+
+JNIEXPORT jlong JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_getBytesWritten(
+	JNIEnv *env, jclass cls, jlong stream
+	) {
+    return (ZSTREAM(stream))->total_out;
+}
+
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_reset(
+	JNIEnv *env, jclass cls, jlong stream
+	) {
+    if (dlsym_inflateReset(ZSTREAM(stream)) != Z_OK) {
+		THROW(env, "java/lang/InternalError", 0);
+    }
+}
+
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_end(
+	JNIEnv *env, jclass cls, jlong stream
+	) {
+    if (dlsym_inflateEnd(ZSTREAM(stream)) == Z_STREAM_ERROR) {
+		THROW(env, "java/lang/InternalError", 0);
+    } else {
+		free(ZSTREAM(stream));
+    }
+}
+
+/**
+ * vim: sw=2: ts=2: et:
+ */
+

Added: lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h?view=auto&rev=475025
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h (added)
+++ lucene/hadoop/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h Tue Nov 14 14:35:22 2006
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#if !defined ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
+#define ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
+
+#if defined HAVE_CONFIG_H
+  #include <config.h>
+#endif
+
+#if defined HAVE_STDDEF_H
+  #include <stddef.h>
+#else
+  #error 'stddef.h not found'
+#endif
+    
+#if defined HAVE_ZLIB_H
+  #include <zlib.h>
+#else
+  #error 'Please install zlib-development packages for your platform.'
+#endif
+    
+#if defined HAVE_ZCONF_H
+  #include <zconf.h>
+#else
+  #error 'Please install zlib-development packages for your platform.'
+#endif
+
+#if defined HAVE_DLFCN_H
+  #include <dlfcn.h>
+#else
+  #error "dlfcn.h not found"
+#endif  
+
+#if defined HAVE_JNI_H    
+  #include <jni.h>
+#else
+  #error 'jni.h not found'
+#endif
+
+#include "org_apache_hadoop.h"
+
+/* A helper macro to dlsym the requisite zlib symbol. */
+#define LOAD_ZLIB_SYMBOL(func_ptr, env, handle, symbol) \
+  if ((func_ptr = do_dlsym(env, handle, symbol)) == NULL) { \
+  	return; \
+  }
+
+/* A helper macro to convert the java 'stream-handle' to a z_stream pointer. */
+#define ZSTREAM(stream) ((z_stream*)((ptrdiff_t)(stream)))
+
+/* A helper macro to convert the z_stream pointer to the java 'stream-handle'. */
+#define JLONG(stream) ((jlong)((ptrdiff_t)(stream)))
+
+#endif //ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H

Added: lucene/hadoop/trunk/src/native/src/org_apache_hadoop.h
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/native/src/org_apache_hadoop.h?view=auto&rev=475025
==============================================================================
--- lucene/hadoop/trunk/src/native/src/org_apache_hadoop.h (added)
+++ lucene/hadoop/trunk/src/native/src/org_apache_hadoop.h Tue Nov 14 14:35:22 2006
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This file includes some common utilities 
+ * for all native code used in hadoop.
+ */
+ 
+#if !defined ORG_APACHE_HADOOP_H
+#define ORG_APACHE_HADOOP_H
+
+#if defined HAVE_CONFIG_H
+  #include <config.h>
+#endif
+
+#if defined HAVE_DLFCN_H
+  #include <dlfcn.h>
+#else
+  #error "dlfcn.h not found"
+#endif  
+
+#if defined HAVE_JNI_H    
+  #include <jni.h>
+#else
+  #error 'jni.h not found'
+#endif
+
+/* A helper macro to 'throw' a java exception. */ 
+#define THROW(env, exception_name, message) \
+  { \
+	jclass ecls = (*env)->FindClass(env, exception_name); \
+	if (ecls) { \
+	  (*env)->ThrowNew(env, ecls, message); \
+	  (*env)->DeleteLocalRef(env, ecls); \
+	} \
+  }
+
+/** 
+ * A helper function to dlsym a 'symbol' from a given library-handle. 
+ * 
+ * @param env jni handle to report contingencies.
+ * @param handle handle to the dlopen'ed library.
+ * @param symbol symbol to load.
+ * @return returns the address where the symbol is loaded in memory, 
+ *         <code>NULL</code> on error.
+ */
+static void *do_dlsym(JNIEnv *env, void *handle, const char *symbol) {
+  if (!env || !handle || !symbol) {
+  	THROW(env, "java/lang/InternalError", NULL);
+  	return NULL;
+  }
+  char *error = NULL;
+  void *func_ptr = dlsym(handle, symbol);
+  if ((error = dlerror()) != NULL) {
+  	THROW(env, "java/lang/UnsatisfiedLinkError", symbol);
+  	return NULL;
+  }
+  return func_ptr;
+}
+
+#endif
+
+//vim: sw=2: ts=2: et

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java?view=diff&rev=475025&r1=475024&r2=475025
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java Tue Nov 14 14:35:22 2006
@@ -26,6 +26,9 @@
 
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.DefaultCodec;
+import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.conf.*;
 
 
@@ -39,6 +42,10 @@
 
   /** Unit tests for SequenceFile. */
   public void testSequenceFile() throws Exception {
+    compressedSeqFileTest(new DefaultCodec());
+  }
+  
+  public void compressedSeqFileTest(CompressionCodec codec) throws Exception {
     int count = 1024 * 10;
     int megabytes = 1;
     int factor = 5;
@@ -56,7 +63,7 @@
         //LOG.setLevel(Level.FINE);
 
         // SequenceFile.Writer
-        writeTest(fs, count, seed, file, CompressionType.NONE);
+        writeTest(fs, count, seed, file, CompressionType.NONE, null);
         readTest(fs, count, seed, file);
 
         sortTest(fs, count, megabytes, factor, false, file);
@@ -74,7 +81,8 @@
         checkSort(fs, count, seed, file);
         
         // SequenceFile.RecordCompressWriter
-        writeTest(fs, count, seed, recordCompressedFile, CompressionType.RECORD);
+        writeTest(fs, count, seed, recordCompressedFile, CompressionType.RECORD, 
+            codec);
         readTest(fs, count, seed, recordCompressedFile);
 
         sortTest(fs, count, megabytes, factor, false, recordCompressedFile);
@@ -92,7 +100,8 @@
         checkSort(fs, count, seed, recordCompressedFile);
         
         // SequenceFile.BlockCompressWriter
-        writeTest(fs, count, seed, blockCompressedFile, CompressionType.BLOCK);
+        writeTest(fs, count, seed, blockCompressedFile, CompressionType.BLOCK,
+            codec);
         readTest(fs, count, seed, blockCompressedFile);
 
         sortTest(fs, count, megabytes, factor, false, blockCompressedFile);
@@ -115,14 +124,14 @@
   }
 
   private static void writeTest(FileSystem fs, int count, int seed, Path file, 
-      CompressionType compressionType)
+      CompressionType compressionType, CompressionCodec codec)
     throws IOException {
     fs.delete(file);
     LOG.info("creating " + count + " records with " + compressionType +
               " compression");
     SequenceFile.Writer writer = 
       SequenceFile.createWriter(fs, conf, file, 
-          RandomDatum.class, RandomDatum.class, compressionType);
+          RandomDatum.class, RandomDatum.class, compressionType, codec);
     RandomDatum.Generator generator = new RandomDatum.Generator(seed);
     for (int i = 0; i < count; i++) {
       generator.next();
@@ -151,7 +160,7 @@
       RandomDatum value = generator.getValue();
 
       try {
-        if ((i%5) == 10) {
+        if ((i%5) == 0) {
           // Testing 'raw' apis
           rawKey.reset();
           reader.nextRaw(rawKey, rawValue);
@@ -163,7 +172,8 @@
           } else {
             reader.next(k, v);
           }
-          // Sanity check
+          
+          // Check
           if (!k.equals(key))
             throw new RuntimeException("wrong key at " + i);
           if (!v.equals(value))
@@ -171,6 +181,10 @@
         }
       } catch (IOException ioe) {
         LOG.info("Problem on row " + i);
+        LOG.info("Expected key = " + key);
+        LOG.info("Expected len = " + key.getLength());
+        LOG.info("Actual key = " + k);
+        LOG.info("Actual len = " + k.getLength());
         LOG.info("Expected value = " + value);
         LOG.info("Expected len = " + value.getLength());
         LOG.info("Actual value = " + v);
@@ -298,12 +312,14 @@
     boolean fast = false;
     boolean merge = false;
     String compressType = "NONE";
+    String compressionCodec = "org.apache.hadoop.io.compress.DefaultCodec";
     Path file = null;
     int seed = new Random().nextInt();
 
     String usage = "Usage: SequenceFile (-local | -dfs <namenode:port>) " +
         "[-count N] " + 
-        "[-seed #] [-check] [-compressType <NONE|RECORD|BLOCK>] " +
+        "[-seed #] [-check] [-compressType <NONE|RECORD|BLOCK>] " + 
+        "-codec <compressionCodec> " + 
         "[[-rwonly] | {[-megabytes M] [-factor F] [-nocreate] [-fast] [-merge]}] " +
         " file";
     if (args.length == 0) {
@@ -336,6 +352,8 @@
               merge = true;
           } else if (args[i].equals("-compressType")) {
               compressType = args[++i];
+          } else if (args[i].equals("-codec")) {
+              compressionCodec = args[++i];
           } else {
               // file is required parameter
               file = new Path(args[i]);
@@ -351,6 +369,7 @@
         LOG.info("fast = " + fast);
         LOG.info("merge = " + merge);
         LOG.info("compressType = " + compressType);
+        LOG.info("compressionCodec = " + compressionCodec);
         LOG.info("file = " + file);
 
         if (rwonly && (!create || merge || fast)) {
@@ -360,9 +379,12 @@
 
         CompressionType compressionType = 
           CompressionType.valueOf(compressType);
+        CompressionCodec codec = (CompressionCodec)ReflectionUtils.newInstance(
+                                    conf.getClassByName(compressionCodec), 
+                                    conf);
 
         if (rwonly || (create && !merge)) {
-            writeTest(fs, count, seed, file, compressionType);
+            writeTest(fs, count, seed, file, compressionType, codec);
             readTest(fs, count, seed, file);
         }
 

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java?view=diff&rev=475025&r1=475024&r2=475025
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java Tue Nov 14 14:35:22 2006
@@ -39,14 +39,18 @@
 
   private static final Log LOG= 
     LogFactory.getLog("org.apache.hadoop.io.compress.TestCodec");
+
+  private int count = 10000;
+  private int seed = new Random().nextInt();
   
-  public void testCodec() throws IOException {
-    int count = 10000;
-    int seed = new Random().nextInt();
-    
+  public void testDefaultCodec() throws IOException {
     codecTest(seed, count, "org.apache.hadoop.io.compress.DefaultCodec");
   }
   
+  public void testGzipCodec() throws IOException {
+    codecTest(seed, count, "org.apache.hadoop.io.compress.GzipCodec");
+  }
+  
   private static void codecTest(int seed, int count, String codecClass) 
   throws IOException {
     
@@ -59,7 +63,7 @@
     } catch (ClassNotFoundException cnfe) {
       throw new IOException("Illegal codec!");
     }
-    LOG.debug("Created a Codec object of type: " + codecClass);
+    LOG.info("Created a Codec object of type: " + codecClass);
 
     // Generate data
     DataOutputBuffer data = new DataOutputBuffer();
@@ -76,7 +80,7 @@
     DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData));
     originalData.reset(data.getData(), 0, data.getLength());
     
-    LOG.debug("Generated " + count + " records");
+    LOG.info("Generated " + count + " records");
     
     // Compress data
     DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
@@ -84,24 +88,19 @@
       codec.createOutputStream(compressedDataBuffer);
     DataOutputStream deflateOut = 
       new DataOutputStream(new BufferedOutputStream(deflateFilter));
-    
-    deflateFilter.resetState();
-    compressedDataBuffer.reset();
     deflateOut.write(data.getData(), 0, data.getLength());
     deflateOut.flush();
     deflateFilter.finish();
-    LOG.debug("Finished compressing data");
+    LOG.info("Finished compressing data");
     
     // De-compress data
     DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
+    deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0, 
+        compressedDataBuffer.getLength());
     CompressionInputStream inflateFilter = 
       codec.createInputStream(deCompressedDataBuffer);
     DataInputStream inflateIn = 
       new DataInputStream(new BufferedInputStream(inflateFilter));
-    
-    inflateFilter.resetState();
-    deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0, 
-        compressedDataBuffer.getLength());
 
     // Check
     for(int i=0; i < count; ++i) {
@@ -115,7 +114,7 @@
       k2.readFields(inflateIn);
       v2.readFields(inflateIn);
     }
-    LOG.debug("SUCCESS! Completed checking " + count + " records");
+    LOG.info("SUCCESS! Completed checking " + count + " records");
   }
   
   public static void main(String[] args) {

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodecFactory.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodecFactory.java?view=diff&rev=475025&r1=475024&r2=475025
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodecFactory.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/compress/TestCodecFactory.java Tue Nov 14 14:35:22 2006
@@ -29,6 +29,16 @@
 public class TestCodecFactory extends TestCase {
 
   private static class BaseCodec implements CompressionCodec {
+    private Configuration conf;
+    
+    public void setConf(Configuration conf) {
+      this.conf = conf;
+    }
+    
+    public Configuration getConf() {
+      return conf;
+    }
+    
     public CompressionOutputStream createOutputStream(OutputStream out) {
       return null;
     }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestTextInputFormat.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestTextInputFormat.java?view=diff&rev=475025&r1=475024&r2=475025
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestTextInputFormat.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestTextInputFormat.java Tue Nov 14 14:35:22 2006
@@ -26,6 +26,7 @@
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.io.compress.*;
+import org.apache.hadoop.util.ReflectionUtils;
 
 public class TestTextInputFormat extends TestCase {
   private static final Log LOG =
@@ -203,13 +204,14 @@
    * Test using the gzip codec for reading
    */
   public static void testGzip() throws IOException {
+    JobConf job = new JobConf();
     CompressionCodec gzip = new GzipCodec();
+    ReflectionUtils.setConf(gzip, job);
     localFs.delete(workDir);
     writeFile(localFs, new Path(workDir, "part1.txt.gz"), gzip, 
               "the quick\nbrown\nfox jumped\nover\n the lazy\n dog\n");
     writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip,
               "this is a test\nof gzip\n");
-    JobConf job = new JobConf();
     job.setInputPath(workDir);
     TextInputFormat format = new TextInputFormat();
     format.configure(job);