You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@avro.apache.org by cu...@apache.org on 2009/05/14 23:15:18 UTC

svn commit: r774931 - in /hadoop/avro/trunk: ./ src/c/

Author: cutting
Date: Thu May 14 21:15:10 2009
New Revision: 774931

URL: http://svn.apache.org/viewvc?rev=774931&view=rev
Log:
AVRO-33.  C support for primitive types.  Contributed by Matt Massie.

Added:
    hadoop/avro/trunk/src/c/   (with props)
    hadoop/avro/trunk/src/c/.gitignore
    hadoop/avro/trunk/src/c/AUTHORS
    hadoop/avro/trunk/src/c/COPYING
    hadoop/avro/trunk/src/c/ChangeLog
    hadoop/avro/trunk/src/c/INSTALL
    hadoop/avro/trunk/src/c/Makefile.am
    hadoop/avro/trunk/src/c/NEWS
    hadoop/avro/trunk/src/c/README
    hadoop/avro/trunk/src/c/avro.c
    hadoop/avro/trunk/src/c/avro.h
    hadoop/avro/trunk/src/c/avro_double.c
    hadoop/avro/trunk/src/c/avro_file.c
    hadoop/avro/trunk/src/c/avro_memory.c
    hadoop/avro/trunk/src/c/avro_raw.c
    hadoop/avro/trunk/src/c/avro_socket.c
    hadoop/avro/trunk/src/c/avro_string.c
    hadoop/avro/trunk/src/c/avro_zigzag.c
    hadoop/avro/trunk/src/c/configure.in
    hadoop/avro/trunk/src/c/dump.c
    hadoop/avro/trunk/src/c/dump.h
    hadoop/avro/trunk/src/c/error.c
    hadoop/avro/trunk/src/c/error.h
    hadoop/avro/trunk/src/c/test_avro_bytes.c
    hadoop/avro/trunk/src/c/test_avro_float_double.c
    hadoop/avro/trunk/src/c/test_avro_raw.c
    hadoop/avro/trunk/src/c/test_avro_string.c
    hadoop/avro/trunk/src/c/test_avro_zigzag.c
Modified:
    hadoop/avro/trunk/CHANGES.txt
    hadoop/avro/trunk/README.txt
    hadoop/avro/trunk/build.xml

Modified: hadoop/avro/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/CHANGES.txt?rev=774931&r1=774930&r2=774931&view=diff
==============================================================================
--- hadoop/avro/trunk/CHANGES.txt (original)
+++ hadoop/avro/trunk/CHANGES.txt Thu May 14 21:15:10 2009
@@ -17,6 +17,8 @@
 
     AVRO-8. Add Java support for default values. (cutting)
 
+    AVRO-33.  C support for primitive types.  (Matt Massie via cutting)
+
   IMPROVEMENTS
 
     AVRO-11.  Re-implement specific and reflect datum readers and

Modified: hadoop/avro/trunk/README.txt
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/README.txt?rev=774931&r1=774930&r2=774931&view=diff
==============================================================================
--- hadoop/avro/trunk/README.txt (original)
+++ hadoop/avro/trunk/README.txt Thu May 14 21:15:10 2009
@@ -15,6 +15,7 @@
 
  - Java 1.6
  - Python 2.5 or greater
+ - gcc, automake, libtool, libapr1-dev, libaprutil1-dev
  - Apache Ant 1.7
  - Apache Forrest 0.8 (for documentation, requires Java 1.5)
 

Modified: hadoop/avro/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/build.xml?rev=774931&r1=774930&r2=774931&view=diff
==============================================================================
--- hadoop/avro/trunk/build.xml (original)
+++ hadoop/avro/trunk/build.xml Thu May 14 21:15:10 2009
@@ -32,14 +32,22 @@
 
   <property name="src.dir" value="${basedir}/src"/>  	
   <property name="java.src.dir" value="${src.dir}/java"/>
+  <property name="c.src.dir" value="${src.dir}/c"/>
   <property name="lib.dir" value="${basedir}/lib"/>
   <property name="build.dir" value="${basedir}/build"/>
+  <property name="build.c" value="${build.dir}/c"/>
   <property name="dist.dir" value="${build.dir}/${fullname}"/>
 
   <property name="build.classes" value="${build.dir}/classes"/>
   <property name="build.doc" value="${build.dir}/doc"/>
   <property name="build.javadoc" value="${build.doc}/api"/>
 
+  <exec executable="sed" inputstring="${os.name}" outputproperty="nonspace.os">
+     <arg value="s/ /_/g"/>
+  </exec>
+  <property name="build.platform" 
+            value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/>
+
   <property name="test.count" value="100"/>
   <property name="test.java.src.dir" value="${basedir}/src/test/java"/>
   <property name="test.schemata.dir" value="${basedir}/src/test/schemata"/>
@@ -87,7 +95,7 @@
     <mkdir dir="${test.java.classes}"/>
   </target>
 
-  <target name="compile" depends="compile-java"/>
+  <target name="compile" depends="compile-java,compile-c"/>
 
   <target name="compile-java" depends="init">
     <copy todir="${build.classes}">
@@ -138,7 +146,7 @@
     </javac> 
   </target>
 
-  <target name="test" depends="test-java,test-py,test-interop"/>
+  <target name="test" depends="test-java,test-py,test-c,test-interop"/>
 
   <target name="compile-test-schemata" depends="compile-java">
     <taskdef name="protocol" classname="org.apache.avro.specific.ProtocolTask">
@@ -390,7 +398,7 @@
     </javadoc>
   </target>	
 
-  <target name="package" depends="jar, javadoc"
+  <target name="package" depends="jar, javadoc, package-c"
 	  description="Build distribution">
     <mkdir dir="${dist.dir}"/>
     <mkdir dir="${dist.dir}/lib"/>
@@ -401,6 +409,10 @@
       <fileset dir="lib"/>
     </copy>
 
+    <copy todir="${dist.dir}/c" includeEmptyDirs="false">
+      <fileset dir="${build.c}"/>
+    </copy>
+
     <copy todir="${dist.dir}"> 
       <fileset file="${build.dir}/${fullname}-*.jar"/>
     </copy>
@@ -444,11 +456,62 @@
     </macro_tar>
   </target>
 
-  <target name="clean" description="Delete build files, and their directories">
+  <target name="clean" description="Delete build files, and their directories"
+	  depends="clean-c">
     <delete dir="${build.dir}"/>
     <delete>
       <fileset dir="src" includes="**/*.pyc" />
     </delete>
   </target>
 
+  <!-- C Targets -->
+  <target name="autoreconf-c-check">
+    <uptodate targetfile="${c.src.dir}/configure"
+	      srcfile="${c.src.dir}/configure.in"
+	      property="autoreconf-c-not-needed"/>
+  </target>
+
+  <target name="autoreconf-c" depends="autoreconf-c-check"
+	  unless="autoreconf-c-not-needed">
+    <exec dir="${c.src.dir}" executable="autoreconf" failonerror="true">
+      <arg line="-f -i"/>
+    </exec>
+  </target>
+
+  <target name="configure-c-check">
+    <uptodate targetfile="${c.src.dir}/Makefile"
+	      srcfile="${c.src.dir}/configure"
+	      property="configure-c-not-needed"/>
+  </target>
+
+  <target name="configure-c" depends="autoreconf-c, configure-c-check"
+	  unless="configure-c-not-needed">
+    <exec dir="${c.src.dir}" executable="sh" failonerror="true">
+      <arg line="./configure --prefix=${build.c}/${build.platform}"/>
+    </exec>
+  </target>
+
+  <target name="compile-c" depends="init, configure-c">
+    <exec dir="${c.src.dir}" executable="make" failonerror="true"/>
+  </target>
+
+  <target name="test-c" depends="compile-c">
+    <exec dir="${c.src.dir}" executable="make" failonerror="true">
+      <arg value="check"/>
+    </exec>
+  </target>
+
+  <target name="package-c" depends="compile-c">
+    <exec dir="${c.src.dir}" executable="make" failonerror="true">
+      <arg value="install"/>
+    </exec>
+  </target>
+
+  <target name="clean-c">
+    <exec dir="${c.src.dir}" executable="make" failonerror="true">
+      <arg value="clean"/>
+    </exec>
+  </target>
+  <!-- End C Targets -->
+
 </project>

Propchange: hadoop/avro/trunk/src/c/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Thu May 14 21:15:10 2009
@@ -0,0 +1,18 @@
+.deps
+.libs
+Makefile
+Makefile.in
+*.la
+*.lo
+*.o
+configure
+stamp-h1
+aclocal.m4
+config
+libtool
+config.status
+config.log
+config.h.in
+config.h
+autom4te.cache
+test_avro_*[!.][!c]

Added: hadoop/avro/trunk/src/c/.gitignore
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/.gitignore?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/.gitignore (added)
+++ hadoop/avro/trunk/src/c/.gitignore Thu May 14 21:15:10 2009
@@ -0,0 +1,18 @@
+.deps
+.libs
+Makefile
+Makefile.in
+*.la
+*.lo
+*.o
+configure
+stamp-h1
+aclocal.m4
+config
+libtool
+config.status
+config.log
+config.h.in
+config.h
+autom4te.cache
+test_avro_*[!.][!c]

Added: hadoop/avro/trunk/src/c/AUTHORS
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/AUTHORS?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/AUTHORS (added)
+++ hadoop/avro/trunk/src/c/AUTHORS Thu May 14 21:15:10 2009
@@ -0,0 +1,4 @@
+
+See http://hadoop.apache.org/avro/ for a list of authors
+
+

Added: hadoop/avro/trunk/src/c/COPYING
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/COPYING?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/COPYING (added)
+++ hadoop/avro/trunk/src/c/COPYING Thu May 14 21:15:10 2009
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.

Added: hadoop/avro/trunk/src/c/ChangeLog
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/ChangeLog?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/ChangeLog (added)
+++ hadoop/avro/trunk/src/c/ChangeLog Thu May 14 21:15:10 2009
@@ -0,0 +1,4 @@
+
+
+
+

Added: hadoop/avro/trunk/src/c/INSTALL
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/INSTALL?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/INSTALL (added)
+++ hadoop/avro/trunk/src/c/INSTALL Thu May 14 21:15:10 2009
@@ -0,0 +1,291 @@
+Installation Instructions
+*************************
+
+Copyright (C) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005,
+2006, 2007, 2008 Free Software Foundation, Inc.
+
+   This file is free documentation; the Free Software Foundation gives
+unlimited permission to copy, distribute and modify it.
+
+Basic Installation
+==================
+
+   Briefly, the shell commands `./configure; make; make install' should
+configure, build, and install this package.  The following
+more-detailed instructions are generic; see the `README' file for
+instructions specific to this package.
+
+   The `configure' shell script attempts to guess correct values for
+various system-dependent variables used during compilation.  It uses
+those values to create a `Makefile' in each directory of the package.
+It may also create one or more `.h' files containing system-dependent
+definitions.  Finally, it creates a shell script `config.status' that
+you can run in the future to recreate the current configuration, and a
+file `config.log' containing compiler output (useful mainly for
+debugging `configure').
+
+   It can also use an optional file (typically called `config.cache'
+and enabled with `--cache-file=config.cache' or simply `-C') that saves
+the results of its tests to speed up reconfiguring.  Caching is
+disabled by default to prevent problems with accidental use of stale
+cache files.
+
+   If you need to do unusual things to compile the package, please try
+to figure out how `configure' could check whether to do them, and mail
+diffs or instructions to the address given in the `README' so they can
+be considered for the next release.  If you are using the cache, and at
+some point `config.cache' contains results you don't want to keep, you
+may remove or edit it.
+
+   The file `configure.ac' (or `configure.in') is used to create
+`configure' by a program called `autoconf'.  You need `configure.ac' if
+you want to change it or regenerate `configure' using a newer version
+of `autoconf'.
+
+The simplest way to compile this package is:
+
+  1. `cd' to the directory containing the package's source code and type
+     `./configure' to configure the package for your system.
+
+     Running `configure' might take a while.  While running, it prints
+     some messages telling which features it is checking for.
+
+  2. Type `make' to compile the package.
+
+  3. Optionally, type `make check' to run any self-tests that come with
+     the package.
+
+  4. Type `make install' to install the programs and any data files and
+     documentation.
+
+  5. You can remove the program binaries and object files from the
+     source code directory by typing `make clean'.  To also remove the
+     files that `configure' created (so you can compile the package for
+     a different kind of computer), type `make distclean'.  There is
+     also a `make maintainer-clean' target, but that is intended mainly
+     for the package's developers.  If you use it, you may have to get
+     all sorts of other programs in order to regenerate files that came
+     with the distribution.
+
+  6. Often, you can also type `make uninstall' to remove the installed
+     files again.
+
+Compilers and Options
+=====================
+
+   Some systems require unusual options for compilation or linking that
+the `configure' script does not know about.  Run `./configure --help'
+for details on some of the pertinent environment variables.
+
+   You can give `configure' initial values for configuration parameters
+by setting variables in the command line or in the environment.  Here
+is an example:
+
+     ./configure CC=c99 CFLAGS=-g LIBS=-lposix
+
+   *Note Defining Variables::, for more details.
+
+Compiling For Multiple Architectures
+====================================
+
+   You can compile the package for more than one kind of computer at the
+same time, by placing the object files for each architecture in their
+own directory.  To do this, you can use GNU `make'.  `cd' to the
+directory where you want the object files and executables to go and run
+the `configure' script.  `configure' automatically checks for the
+source code in the directory that `configure' is in and in `..'.
+
+   With a non-GNU `make', it is safer to compile the package for one
+architecture at a time in the source code directory.  After you have
+installed the package for one architecture, use `make distclean' before
+reconfiguring for another architecture.
+
+   On MacOS X 10.5 and later systems, you can create libraries and
+executables that work on multiple system types--known as "fat" or
+"universal" binaries--by specifying multiple `-arch' options to the
+compiler but only a single `-arch' option to the preprocessor.  Like
+this:
+
+     ./configure CC="gcc -arch i386 -arch x86_64 -arch ppc -arch ppc64" \
+                 CXX="g++ -arch i386 -arch x86_64 -arch ppc -arch ppc64" \
+                 CPP="gcc -E" CXXCPP="g++ -E"
+
+   This is not guaranteed to produce working output in all cases, you
+may have to build one architecture at a time and combine the results
+using the `lipo' tool if you have problems.
+
+Installation Names
+==================
+
+   By default, `make install' installs the package's commands under
+`/usr/local/bin', include files under `/usr/local/include', etc.  You
+can specify an installation prefix other than `/usr/local' by giving
+`configure' the option `--prefix=PREFIX'.
+
+   You can specify separate installation prefixes for
+architecture-specific files and architecture-independent files.  If you
+pass the option `--exec-prefix=PREFIX' to `configure', the package uses
+PREFIX as the prefix for installing programs and libraries.
+Documentation and other data files still use the regular prefix.
+
+   In addition, if you use an unusual directory layout you can give
+options like `--bindir=DIR' to specify different values for particular
+kinds of files.  Run `configure --help' for a list of the directories
+you can set and what kinds of files go in them.
+
+   If the package supports it, you can cause programs to be installed
+with an extra prefix or suffix on their names by giving `configure' the
+option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'.
+
+Optional Features
+=================
+
+   Some packages pay attention to `--enable-FEATURE' options to
+`configure', where FEATURE indicates an optional part of the package.
+They may also pay attention to `--with-PACKAGE' options, where PACKAGE
+is something like `gnu-as' or `x' (for the X Window System).  The
+`README' should mention any `--enable-' and `--with-' options that the
+package recognizes.
+
+   For packages that use the X Window System, `configure' can usually
+find the X include and library files automatically, but if it doesn't,
+you can use the `configure' options `--x-includes=DIR' and
+`--x-libraries=DIR' to specify their locations.
+
+Particular systems
+==================
+
+   On HP-UX, the default C compiler is not ANSI C compatible.  If GNU
+CC is not installed, it is recommended to use the following options in
+order to use an ANSI C compiler:
+
+     ./configure CC="cc -Ae"
+
+and if that doesn't work, install pre-built binaries of GCC for HP-UX.
+
+   On OSF/1 a.k.a. Tru64, some versions of the default C compiler cannot
+parse its `<wchar.h>' header file.  The option `-nodtk' can be used as
+a workaround.  If GNU CC is not installed, it is therefore recommended
+to try
+
+     ./configure CC="cc"
+
+and if that doesn't work, try
+
+     ./configure CC="cc -nodtk"
+
+Specifying the System Type
+==========================
+
+   There may be some features `configure' cannot figure out
+automatically, but needs to determine by the type of machine the package
+will run on.  Usually, assuming the package is built to be run on the
+_same_ architectures, `configure' can figure that out, but if it prints
+a message saying it cannot guess the machine type, give it the
+`--build=TYPE' option.  TYPE can either be a short name for the system
+type, such as `sun4', or a canonical name which has the form:
+
+     CPU-COMPANY-SYSTEM
+
+where SYSTEM can have one of these forms:
+
+     OS KERNEL-OS
+
+   See the file `config.sub' for the possible values of each field.  If
+`config.sub' isn't included in this package, then this package doesn't
+need to know the machine type.
+
+   If you are _building_ compiler tools for cross-compiling, you should
+use the option `--target=TYPE' to select the type of system they will
+produce code for.
+
+   If you want to _use_ a cross compiler, that generates code for a
+platform different from the build platform, you should specify the
+"host" platform (i.e., that on which the generated programs will
+eventually be run) with `--host=TYPE'.
+
+Sharing Defaults
+================
+
+   If you want to set default values for `configure' scripts to share,
+you can create a site shell script called `config.site' that gives
+default values for variables like `CC', `cache_file', and `prefix'.
+`configure' looks for `PREFIX/share/config.site' if it exists, then
+`PREFIX/etc/config.site' if it exists.  Or, you can set the
+`CONFIG_SITE' environment variable to the location of the site script.
+A warning: not all `configure' scripts look for a site script.
+
+Defining Variables
+==================
+
+   Variables not defined in a site shell script can be set in the
+environment passed to `configure'.  However, some packages may run
+configure again during the build, and the customized values of these
+variables may be lost.  In order to avoid this problem, you should set
+them in the `configure' command line, using `VAR=value'.  For example:
+
+     ./configure CC=/usr/local2/bin/gcc
+
+causes the specified `gcc' to be used as the C compiler (unless it is
+overridden in the site shell script).
+
+Unfortunately, this technique does not work for `CONFIG_SHELL' due to
+an Autoconf bug.  Until the bug is fixed you can use this workaround:
+
+     CONFIG_SHELL=/bin/bash /bin/bash ./configure CONFIG_SHELL=/bin/bash
+
+`configure' Invocation
+======================
+
+   `configure' recognizes the following options to control how it
+operates.
+
+`--help'
+`-h'
+     Print a summary of all of the options to `configure', and exit.
+
+`--help=short'
+`--help=recursive'
+     Print a summary of the options unique to this package's
+     `configure', and exit.  The `short' variant lists options used
+     only in the top level, while the `recursive' variant lists options
+     also present in any nested packages.
+
+`--version'
+`-V'
+     Print the version of Autoconf used to generate the `configure'
+     script, and exit.
+
+`--cache-file=FILE'
+     Enable the cache: use and save the results of the tests in FILE,
+     traditionally `config.cache'.  FILE defaults to `/dev/null' to
+     disable caching.
+
+`--config-cache'
+`-C'
+     Alias for `--cache-file=config.cache'.
+
+`--quiet'
+`--silent'
+`-q'
+     Do not print messages saying which checks are being made.  To
+     suppress all normal output, redirect it to `/dev/null' (any error
+     messages will still be shown).
+
+`--srcdir=DIR'
+     Look for the package's source code in directory DIR.  Usually
+     `configure' can determine that directory automatically.
+
+`--prefix=DIR'
+     Use DIR as the installation prefix.  *Note Installation Names::
+     for more details, including other options available for fine-tuning
+     the installation locations.
+
+`--no-create'
+`-n'
+     Run the configure checks, but stop before creating any output
+     files.
+
+`configure' also accepts some other, not widely useful, options.  Run
+`configure --help' for more details.
+

Added: hadoop/avro/trunk/src/c/Makefile.am
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/Makefile.am?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/Makefile.am (added)
+++ hadoop/avro/trunk/src/c/Makefile.am Thu May 14 21:15:10 2009
@@ -0,0 +1,33 @@
+AM_CFLAGS=$(APR_CFLAGS) $(APR_INCLUDES) $(APU_INCLUDES) -Wall -pedantic
+
+include_HEADERS = avro.h
+
+lib_LTLIBRARIES = libavro.la
+libavro_la_SOURCES = avro_memory.c avro_socket.c avro_file.c \
+dump.c dump.h avro.c avro_string.c avro_zigzag.c error.c error.h avro_raw.c \
+avro_double.c
+
+check_PROGRAMS=test_avro_zigzag test_avro_string test_avro_bytes test_avro_raw \
+test_avro_float_double
+
+test_avro_zigzag_SOURCE=test_avro_zigzag.c
+test_avro_zigzag_LDADD=$(APR_LIBS) $(APU_LIBS) $(top_builddir)/libavro.la
+
+test_avro_string_SOURCE=test_avro_string.c
+test_avro_string_LDADD=$(APR_LIBS) $(APU_LIBS) $(top_builddir)/libavro.la
+
+test_avro_bytes_SOURCE=test_avro_bytes.c
+test_avro_bytes_LDADD=$(APR_LIBS) $(APU_LIBS) $(top_builddir)/libavro.la
+
+test_avro_raw_SOURCE=test_avro_raw.c
+test_avro_raw_LDADD=$(APR_LIBS) $(APU_LIBS) $(top_builddir)/libavro.la
+
+test_avro_float_double_SOURCE=test_avro_float_double.c
+test_avro_float_double_LDADD=$(APR_LIBS) $(APU_LIBS) $(top_builddir)/libavro.la
+
+TESTS=$(check_PROGRAMS)
+
+pretty:
+	indent *.c *.h
+
+CLEANFILES=*~

Added: hadoop/avro/trunk/src/c/NEWS
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/NEWS?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/NEWS (added)
+++ hadoop/avro/trunk/src/c/NEWS Thu May 14 21:15:10 2009
@@ -0,0 +1,5 @@
+
+For news, visit the Avro web site at
+http://hadoop.apache.org/avro/
+
+

Added: hadoop/avro/trunk/src/c/README
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/README?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/README (added)
+++ hadoop/avro/trunk/src/c/README Thu May 14 21:15:10 2009
@@ -0,0 +1,20 @@
+Avro C Bindings (libavro)
+
+The code in this directory is the C bindings for Avro.
+
+Currently, there are only the Avro primitives with their corresponding
+unit tests.  The unit tests currently only check for proper
+encoding/decoding within the C library.  Interoperability tests are
+comming soon.
+
+Only the memory-backed Avro handle is working right now although
+adding file and network io support is easy thank to the Apache
+Portable Runtime which the Avro C bindings depend on.
+
+For API details, view the file avro.h.  The documentation is a work in
+progress.  The test cases (test_avro_*.c) also serve as great examples
+of how to use the API.
+
+libavro compile and passes all tests on Linux.  On MacOS X, 4 out of 5
+unit tests pass.  Portability is a definite design goal but currently
+the development emphasis is on Linux.

Added: hadoop/avro/trunk/src/c/avro.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/avro.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/avro.c (added)
+++ hadoop/avro/trunk/src/c/avro.c Thu May 14 21:15:10 2009
@@ -0,0 +1,36 @@
+#include "avro.h"
+
+avro_status_t
+avro_null (void)
+{
+  /* Do nothing */
+  return AVRO_OK;
+}
+
+avro_status_t
+avro_bool (AVRO * avro, bool_t * bp)
+{
+  avro_status_t status;
+  char b;
+  if (!avro || !bp)
+    {
+      return AVRO_FAILURE;
+    }
+  switch (avro->a_op)
+    {
+    case AVRO_ENCODE:
+      {
+	b = *bp ? 1 : 0;
+	return AVRO_PUTBYTES (avro, &b, 1);
+      }
+    case AVRO_DECODE:
+      {
+	status = AVRO_GETBYTES (avro, &b, 1);
+	CHECK_ERROR (status);
+	*bp = b ? 1 : 0;
+      }
+    default:
+      return AVRO_FAILURE;
+    }
+  return AVRO_OK;
+}

Added: hadoop/avro/trunk/src/c/avro.h
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/avro.h?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/avro.h (added)
+++ hadoop/avro/trunk/src/c/avro.h Thu May 14 21:15:10 2009
@@ -0,0 +1,119 @@
+#ifndef AVRO_H
+#define AVRO_H
+#include <stdarg.h>
+#include <stdint.h>
+#include <sys/types.h>
+#include <apr_pools.h>
+#include <apr_file_io.h>
+#include <apr_network_io.h>
+
+/*! \mainpage Avro C Documentation
+*
+* \section intro_sec Introduction
+* 
+* This is the introduction.
+*
+* \section install_sec Installation
+*
+* This is the installation section.
+*
+*/
+
+/**
+Avro operation enum.
+Enum for discriminating whether an Avro handle is for encoding or decoding data.
+*/
+enum avro_op
+{
+  AVRO_ENCODE = 0, /**< Marks a handle as encoding Avro data */
+  AVRO_DECODE = 1  /**< Marks a handle as decoding Avro data */
+};
+typedef enum avro_op avro_op;
+
+/**
+Avro status enum.
+Enum used by Avro functions to return state.
+TODO: expand the number of states
+*/
+enum avro_status_t
+{
+  AVRO_OK = 0, /**< Function success */
+  AVRO_FAILURE = 1 /**< Function failure */
+};
+typedef enum avro_status_t avro_status_t;
+#define CHECK_ERROR(__status) if(__status != AVRO_OK){ return __status; }
+
+/**
+Avro handle.
+Opaque handle for encoding/decoding data to memory, file or network.
+@warning Never operate on an Avro handle directly
+*/
+struct AVRO
+{
+  enum avro_op a_op; /**< Hold the type of operation the handle is performing */
+  struct avro_ops
+  {
+    /**
+     * Function for getting bytes from the underlying media
+     */
+    avro_status_t (*a_getbytes) (struct AVRO * avro, caddr_t addr,
+				 int64_t len);
+    /**
+     * Function for sending bytes to the backing store
+     */
+    avro_status_t (*a_putbytes) (struct AVRO * avro, const char *addr,
+				 const int64_t len);
+  } *a_ops;
+  apr_pool_t *pool; /**< Pool used for allocating memory for dynamic data structures */
+
+  apr_file_t *file;
+  apr_socket_t *socket;
+  caddr_t addr;
+  int64_t len;
+  int64_t used;
+};
+typedef struct AVRO AVRO;
+
+#define AVRO_GETBYTES(avro, addr, len)     \
+(*(avro)->a_ops->a_getbytes)(avro, addr, len)
+
+#define AVRO_PUTBYTES(avro, addr, len)     \
+(*(avro)->a_ops->a_putbytes)(avro, addr, len)
+
+/** Create a memory-backed Avro handle 
+@param avro Pointer to handle that will be initialized
+@param pool Pool used for allocating dynamic data structures.
+@param addr Address of the memory location for manipulating data
+@param len Size of the memory to use 
+@param op Expressing the operation the handle should perform (e.g. encode, decode)
+@return The Avro status 
+*/
+avro_status_t avro_create_memory (AVRO * avro, apr_pool_t * pool,
+				  caddr_t addr, int64_t len, avro_op op);
+
+avro_status_t avro_create_file (AVRO * avro, apr_pool_t * pool,
+				apr_file_t * file, avro_op op);
+avro_status_t avro_create_socket (AVRO * avro, apr_pool_t * pool,
+				  apr_socket_t * socket, avro_op op);
+
+typedef avro_status_t (*avroproc_t) (AVRO, void *, ...);
+typedef int bool_t;
+
+avro_status_t avro_null (void);
+avro_status_t avro_int64 (AVRO * avro, int64_t * lp);
+avro_status_t avro_string (AVRO * avro, char **str, int64_t maxlen);
+avro_status_t avro_bytes (AVRO * avro, char **bytes, int64_t * len,
+			  int64_t maxlen);
+avro_status_t avro_bool (AVRO * avro, bool_t * bp);
+avro_status_t avro_float (AVRO * avro, float *fp);
+avro_status_t avro_double (AVRO * avro, double *dp);
+
+/* Useful for debugging */
+void avro_dump_memory (AVRO * avro, FILE * fp);
+
+avro_status_t avro_getint32_raw (AVRO * avro, int32_t * value);
+avro_status_t avro_putint32_raw (AVRO * avro, const int32_t value);
+avro_status_t avro_getint64_raw (AVRO * avro, int64_t * value);
+avro_status_t avro_putint64_raw (AVRO * avro, const int64_t value);
+
+#endif

Added: hadoop/avro/trunk/src/c/avro_double.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/avro_double.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/avro_double.c (added)
+++ hadoop/avro/trunk/src/c/avro_double.c Thu May 14 21:15:10 2009
@@ -0,0 +1,39 @@
+#include "avro.h"
+
+avro_status_t
+avro_float (AVRO * avro, float *fp)
+{
+  if (!avro || !fp)
+    {
+      return AVRO_FAILURE;
+    }
+  switch (avro->a_op)
+    {
+    case AVRO_ENCODE:
+      return avro_putint32_raw (avro, *(int32_t *) fp);
+    case AVRO_DECODE:
+      return avro_getint32_raw (avro, (int32_t *) fp);
+    default:
+      return AVRO_FAILURE;
+    }
+  return AVRO_OK;
+}
+
+avro_status_t
+avro_double (AVRO * avro, double *dp)
+{
+  if (!avro || !dp)
+    {
+      return AVRO_FAILURE;
+    }
+  switch (avro->a_op)
+    {
+    case AVRO_ENCODE:
+      return avro_putint64_raw (avro, *(int64_t *) dp);
+    case AVRO_DECODE:
+      return avro_getint64_raw (avro, (int64_t *) dp);
+    default:
+      return AVRO_FAILURE;
+    }
+  return AVRO_OK;
+}

Added: hadoop/avro/trunk/src/c/avro_file.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/avro_file.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/avro_file.c (added)
+++ hadoop/avro/trunk/src/c/avro_file.c Thu May 14 21:15:10 2009
@@ -0,0 +1,5 @@
+
+
+
+
+

Added: hadoop/avro/trunk/src/c/avro_memory.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/avro_memory.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/avro_memory.c (added)
+++ hadoop/avro/trunk/src/c/avro_memory.c Thu May 14 21:15:10 2009
@@ -0,0 +1,61 @@
+#include "avro.h"
+#include <string.h>
+#include "dump.h"
+
+static avro_status_t
+memory_get_bytes (struct AVRO *avro, caddr_t addr, const int64_t len)
+{
+  if ((avro->len - avro->used) < len || len < 0)
+    {
+      return AVRO_FAILURE;
+    }
+  if (len > 0)
+    {
+      memcpy (addr, avro->addr + avro->used, len);
+      avro->used += len;
+    }
+  return AVRO_OK;
+}
+
+static avro_status_t
+memory_put_bytes (struct AVRO *avro, const char *addr, const int64_t len)
+{
+  if ((avro->len - avro->used) < len || len < 0)
+    {
+      return AVRO_FAILURE;
+    }
+  if (len > 0)
+    {
+      memcpy (avro->addr + avro->used, addr, len);
+      avro->used += len;
+    }
+  return AVRO_OK;
+}
+
+static const struct avro_ops avro_memory_ops = {
+  memory_get_bytes,
+  memory_put_bytes
+};
+
+avro_status_t
+avro_create_memory (AVRO * avro, apr_pool_t * pool, caddr_t addr, int64_t len,
+		    avro_op op)
+{
+  if (!avro || !pool || !addr || len <= 0)
+    {
+      return AVRO_FAILURE;
+    }
+  avro->pool = pool;
+  avro->a_op = op;
+  avro->a_ops = (struct avro_ops *) &avro_memory_ops;
+  avro->addr = addr;
+  avro->len = len;
+  avro->used = 0;
+  return AVRO_OK;
+}
+
+void
+avro_dump_memory (AVRO * avro, FILE * fp)
+{
+  dump (fp, avro->addr, avro->used);
+}

Added: hadoop/avro/trunk/src/c/avro_raw.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/avro_raw.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/avro_raw.c (added)
+++ hadoop/avro/trunk/src/c/avro_raw.c Thu May 14 21:15:10 2009
@@ -0,0 +1,57 @@
+#include "avro.h"
+
+avro_status_t
+avro_getint32_raw (AVRO * avro, int32_t * value)
+{
+  avro_status_t status;
+  uint8_t buf[4];
+  status = AVRO_GETBYTES (avro, (char *) buf, sizeof (buf));
+  CHECK_ERROR (status);
+  *value = ((int32_t) buf[0] << 0)
+    | ((int32_t) buf[1] << 8)
+    | ((int32_t) buf[2] << 16) | ((int32_t) buf[3] << 24);
+  return AVRO_OK;
+}
+
+avro_status_t
+avro_putint32_raw (AVRO * avro, const int32_t value)
+{
+  uint8_t buf[4];
+  buf[0] = (uint8_t) (value >> 0);
+  buf[1] = (uint8_t) (value >> 8);
+  buf[2] = (uint8_t) (value >> 16);
+  buf[3] = (uint8_t) (value >> 24);
+  return AVRO_PUTBYTES (avro, (char *) buf, sizeof (buf));
+}
+
+avro_status_t
+avro_getint64_raw (AVRO * avro, int64_t * value)
+{
+  avro_status_t status;
+  uint8_t buf[8];
+  status = AVRO_GETBYTES (avro, (char *) buf, sizeof (buf));
+  CHECK_ERROR (status);
+  *value = ((int64_t) buf[0] << 0)
+    | ((int64_t) buf[1] << 8)
+    | ((int64_t) buf[2] << 16)
+    | ((int64_t) buf[3] << 24)
+    | ((int64_t) buf[4] << 32)
+    | ((int64_t) buf[5] << 40)
+    | ((int64_t) buf[6] << 48) | ((int64_t) buf[7] << 56);
+  return AVRO_OK;
+}
+
+avro_status_t
+avro_putint64_raw (AVRO * avro, const int64_t value)
+{
+  uint8_t buf[8];
+  buf[0] = (uint8_t) (value >> 0);
+  buf[1] = (uint8_t) (value >> 8);
+  buf[2] = (uint8_t) (value >> 16);
+  buf[3] = (uint8_t) (value >> 24);
+  buf[4] = (uint8_t) (value >> 32);
+  buf[5] = (uint8_t) (value >> 40);
+  buf[6] = (uint8_t) (value >> 48);
+  buf[7] = (uint8_t) (value >> 56);
+  return AVRO_PUTBYTES (avro, (char *) buf, sizeof (buf));
+}

Added: hadoop/avro/trunk/src/c/avro_socket.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/avro_socket.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/avro_socket.c (added)
+++ hadoop/avro/trunk/src/c/avro_socket.c Thu May 14 21:15:10 2009
@@ -0,0 +1,6 @@
+
+
+
+
+
+

Added: hadoop/avro/trunk/src/c/avro_string.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/avro_string.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/avro_string.c (added)
+++ hadoop/avro/trunk/src/c/avro_string.c Thu May 14 21:15:10 2009
@@ -0,0 +1,74 @@
+#include "avro.h"
+
+static avro_status_t
+avro_string_bytes_encode (AVRO * avro, char **str, int64_t * len,
+			  int64_t maxlen)
+{
+  avro_status_t status = avro_int64 (avro, len);
+  CHECK_ERROR (status);
+  return AVRO_PUTBYTES (avro, *str, *len);
+}
+
+static avro_status_t
+avro_string_bytes_decode (AVRO * avro, char **str, int64_t * len,
+			  int null_terminated, int64_t maxlen)
+{
+  avro_status_t status = avro_int64 (avro, len);
+  CHECK_ERROR (status);
+  if (*len < 0)
+    {
+      return AVRO_FAILURE;
+    }
+  *str = (caddr_t) apr_palloc (avro->pool, *len + null_terminated);
+  if (*str == NULL)
+    {
+      return AVRO_FAILURE;
+    }
+  status = AVRO_GETBYTES (avro, *str, *len);
+  CHECK_ERROR (status);
+  if (null_terminated)
+    {
+      (*str)[*len] = '\0';
+    }
+  return AVRO_OK;
+}
+
+avro_status_t
+avro_string (AVRO * avro, char **str, int64_t maxlen)
+{
+  int64_t len;
+  if (!avro || !str)
+    {
+      return AVRO_FAILURE;
+    }
+  switch (avro->a_op)
+    {
+    case AVRO_ENCODE:
+      len = strlen (*str);
+      return avro_string_bytes_encode (avro, str, &len, maxlen);
+    case AVRO_DECODE:
+      return avro_string_bytes_decode (avro, str, &len, 1, maxlen);
+    default:
+      return AVRO_FAILURE;
+    }
+  return AVRO_OK;
+}
+
+avro_status_t
+avro_bytes (AVRO * avro, char **bytes, int64_t * len, int64_t maxlen)
+{
+  if (!avro || !bytes || !len || *len < 0)
+    {
+      return AVRO_FAILURE;
+    }
+  switch (avro->a_op)
+    {
+    case AVRO_ENCODE:
+      return avro_string_bytes_encode (avro, bytes, len, maxlen);
+    case AVRO_DECODE:
+      return avro_string_bytes_decode (avro, bytes, len, 0, maxlen);
+    default:
+      return AVRO_FAILURE;
+    }
+  return AVRO_OK;
+}

Added: hadoop/avro/trunk/src/c/avro_zigzag.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/avro_zigzag.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/avro_zigzag.c (added)
+++ hadoop/avro/trunk/src/c/avro_zigzag.c Thu May 14 21:15:10 2009
@@ -0,0 +1,72 @@
+#include "avro.h"
+
+/* TODO: add avro_int32 */
+
+static avro_status_t
+avro_int64_encode (AVRO * avro, int64_t * lp)
+{
+  avro_status_t status;
+  int64_t n = *lp;
+  uint8_t b;
+
+  /* move sign to low-order bit */
+  n = (n << 1) ^ (n >> 63);
+  while ((n & ~0x7F) != 0)
+    {
+      b = ((((uint8_t) n) & 0x7F) | 0x80);
+      status = AVRO_PUTBYTES (avro, (caddr_t) & b, 1);
+      CHECK_ERROR (status);
+      n >>= 7;
+    }
+  b = (uint8_t) n;
+  return AVRO_PUTBYTES (avro, (caddr_t) & b, 1);
+}
+
+static avro_status_t
+avro_int64_decode (AVRO * avro, int64_t * lp)
+{
+  avro_status_t status;
+  int64_t value = 0;
+  int offset = 0;
+  uint8_t b;
+  const int MAX_VARINT_BUF_SIZE = 10;
+  do
+    {
+      if (offset == MAX_VARINT_BUF_SIZE)
+	{
+	  return AVRO_FAILURE;
+	}
+      status = AVRO_GETBYTES (avro, &b, 1);
+      CHECK_ERROR (status);
+      value |= (int64_t) (b & 0x7F) << (7 * offset);
+      ++offset;
+    }
+  while (b & 0x80);
+  /* back to two's-complement value; */
+  *lp = (value >> 1) ^ -(value & 1);
+  return AVRO_OK;
+}
+
+/**
+* Function for encoding/decoding 64-bit signed integers
+* @param avro An initialized avro handle
+* @param lp Pointer to a 64-bit signed integer
+*/
+avro_status_t
+avro_int64 (AVRO * avro, int64_t * lp)
+{
+  if (!avro || !lp)
+    {
+      return AVRO_FAILURE;
+    }
+  switch (avro->a_op)
+    {
+    case AVRO_ENCODE:
+      return avro_int64_encode (avro, lp);
+    case AVRO_DECODE:
+      return avro_int64_decode (avro, lp);
+    default:
+      return AVRO_FAILURE;
+    }
+  return AVRO_OK;
+}

Added: hadoop/avro/trunk/src/c/configure.in
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/configure.in?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/configure.in (added)
+++ hadoop/avro/trunk/src/c/configure.in Thu May 14 21:15:10 2009
@@ -0,0 +1,83 @@
+#                                               -*- Autoconf -*-
+# Process this file with autoconf to produce a configure script.
+
+AC_PREREQ(2.59)
+AC_INIT([avro-c], [0.1.0], [matt@cloudera.com])
+AC_CONFIG_AUX_DIR([config])
+AM_INIT_AUTOMAKE
+AC_CONFIG_SRCDIR([avro.h])
+AC_CONFIG_HEADER([config.h])
+
+# Checks for programs.
+AC_PROG_CC
+AC_PROG_LIBTOOL
+
+# Checks for libraries.
+AC_MSG_CHECKING(for apr-1-config)
+AC_PATH_PROG(APR_CONFIG, apr-1-config)
+if test -z "$APR_CONFIG"; then
+  AC_MSG_ERROR(Please make sure that you have the Apache Portable Runtime 'apr' installed 
+  and the apr-1-config binary is on your path.)
+else
+  AC_MSG_RESULT(Found $APR_CONFIG for version `$APR_CONFIG --version`) 
+fi
+APR_CFLAGS=$($APR_CONFIG --cflags --cppflags)
+APR_INCLUDES=$($APR_CONFIG --includes)
+APR_LIBS=$($APR_CONFIG --link-libtool --libs)
+AC_SUBST(APR_CFLAGS)
+AC_SUBST(APR_INCLUDES)
+AC_SUBST(APR_LIBS)
+AC_MSG_CHECKING(for APR cflags)
+AC_MSG_RESULT($APR_CFLAGS)
+AC_MSG_CHECKING(for APR includes)
+AC_MSG_RESULT($APR_INCLUDES)
+AC_MSG_CHECKING(for APR libraries)
+AC_MSG_RESULT($APR_LIBS)
+
+AC_MSG_CHECKING(for apu-1-config)
+AC_PATH_PROG(APR_UTIL_CONFIG, apu-1-config)
+if test -z "$APR_UTIL_CONFIG"; then
+  AC_MSG_ERROR(Please make sure that you have the Apache Portable Runtime Utility 'apr-util'
+  installed and the apu-1-config script is on your path.)
+else
+  AC_MSG_RESULT(Found $APR_UTIL_CONFIG for version `$APR_UTIL_CONFIG --version`)
+fi
+#APU_CFLAGS=$($APR_UTIL_CONFIG --cflags --cppflags)
+APU_INCLUDES=$($APR_UTIL_CONFIG --includes)
+APU_LIBS=$($APR_UTIL_CONFIG --link-libtool --libs)
+#AC_SUBST(APU_CFLAGS)
+AC_SUBST(APU_INCLUDES)
+AC_SUBST(APU_LIBS)
+#AC_MSG_CHECKING(for APR util cflags)
+#AC_MSG_RESULT($APU_CFLAGS)
+AC_MSG_CHECKING(for APR util includes)
+AC_MSG_RESULT($APU_INCLUDES)
+AC_MSG_CHECKING(for APR util libraries)
+AC_MSG_RESULT($APU_LIBS)
+
+
+# Checks for header files.
+AC_HEADER_STDC
+AC_CHECK_HEADERS([libintl.h limits.h netinet/in.h stdint.h stdlib.h string.h sys/param.h sys/time.h unistd.h wchar.h])
+
+# Checks for typedefs, structures, and compiler characteristics.
+AC_C_CONST
+AC_TYPE_SIZE_T
+
+# Checks for library functions.
+AC_FUNC_MALLOC
+AC_CHECK_FUNCS([bzero])
+
+AC_CONFIG_FILES([Makefile])
+AC_OUTPUT
+
+echo
+echo "C bindings for"
+echo "    _                  "
+echo "   / \__   ___ __ ___  "
+echo "  / _ \ \ / / '__/ _ \ "
+echo " / ___ \ V /| | | (_) |"
+echo "/_/   \_\_/ |_|  \___/ "
+echo
+echo "         Version: $VERSION"
+echo

Added: hadoop/avro/trunk/src/c/dump.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/dump.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/dump.c (added)
+++ hadoop/avro/trunk/src/c/dump.c Thu May 14 21:15:10 2009
@@ -0,0 +1,51 @@
+#include "dump.h"
+#include <ctype.h>
+#include <string.h>
+#include <stdint.h>
+
+static void
+dump_line (FILE * out, const caddr_t addr, const long len)
+{
+  int i;
+  fprintf (out, "|");
+  for (i = 0; i < 16; i++)
+    {
+      if (i < len)
+	{
+	  fprintf (out, " %02X", ((uint8_t *) addr)[i]);
+	}
+      else
+	{
+	  fprintf (out, " ..");
+	}
+      if (!((i + 1) % 8))
+	{
+	  fprintf (out, " |");
+	}
+    }
+  fprintf (out, "\t");
+  for (i = 0; i < 16; i++)
+    {
+      char c = 0x7f & ((uint8_t *) addr)[i];
+      if (i < len && isprint (c))
+	{
+	  fprintf (out, "%c", c);
+	}
+      else
+	{
+	  fprintf (out, ".");
+	}
+    }
+}
+
+void
+dump (FILE * out, const caddr_t addr, const long len)
+{
+  int i;
+  for (i = 0; i < len; i += 16)
+    {
+      dump_line (out, addr + i, (len - i) < 16 ? (len - i) : 16);
+      fprintf (out, "\n");
+    }
+  fflush (out);
+}

Added: hadoop/avro/trunk/src/c/dump.h
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/dump.h?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/dump.h (added)
+++ hadoop/avro/trunk/src/c/dump.h Thu May 14 21:15:10 2009
@@ -0,0 +1,9 @@
+#ifndef DUMP_H
+#define DUMP_H
+
+#include <stdio.h>
+#include <sys/types.h>
+
+void dump (FILE * out, const caddr_t addr, const long len);
+
+#endif

Added: hadoop/avro/trunk/src/c/error.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/error.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/error.c (added)
+++ hadoop/avro/trunk/src/c/error.c Thu May 14 21:15:10 2009
@@ -0,0 +1,111 @@
+#include <stdio.h>
+#include <stdarg.h>		/* ANSI C header file */
+#include <errno.h>
+#include <string.h>
+#include <stdlib.h>
+#include <syslog.h>		/* for syslog() */
+#include "error.h"
+
+int daemon_proc = 0;		/* set nonzero by daemon_init() */
+
+static void err_doit (int, int, const char *, va_list);
+
+/* Nonfatal error related to a system call.
+ * Print a message and return. */
+
+void
+err_ret (const char *fmt, ...)
+{
+  va_list ap;
+
+  va_start (ap, fmt);
+  err_doit (1, LOG_INFO, fmt, ap);
+  va_end (ap);
+  return;
+}
+
+/* Fatal error related to a system call.
+ * Print a message and terminate. */
+
+void
+err_sys (const char *fmt, ...)
+{
+  va_list ap;
+
+  va_start (ap, fmt);
+  err_doit (1, LOG_ERR, fmt, ap);
+  va_end (ap);
+  exit (EXIT_FAILURE);
+}
+
+/* Fatal error related to a system call.
+ * Print a message, dump core, and terminate. */
+
+void
+err_dump (const char *fmt, ...)
+{
+  va_list ap;
+
+  va_start (ap, fmt);
+  err_doit (1, LOG_ERR, fmt, ap);
+  va_end (ap);
+  abort ();			/* dump core and terminate */
+  exit (EXIT_FAILURE);		/* shouldn't get here */
+}
+
+/* Nonfatal error unrelated to a system call.
+ * Print a message and return. */
+
+void
+err_msg (const char *fmt, ...)
+{
+  va_list ap;
+
+  va_start (ap, fmt);
+  err_doit (0, LOG_INFO, fmt, ap);
+  va_end (ap);
+  return;
+}
+
+/* Fatal error unrelated to a system call.
+ * Print a message and terminate. */
+
+void
+err_quit (const char *fmt, ...)
+{
+  va_list ap;
+
+  va_start (ap, fmt);
+  err_doit (0, LOG_ERR, fmt, ap);
+  va_end (ap);
+  exit (EXIT_FAILURE);
+}
+
+/* Print a message and return to caller.
+ * Caller specifies "errnoflag" and "level". */
+
+static void
+err_doit (int errnoflag, int level, const char *fmt, va_list ap)
+{
+  int errno_save, n;
+  char buf[1024];
+
+  errno_save = errno;		/* value caller might want printed */
+  vsnprintf (buf, sizeof (buf), fmt, ap);	/* this is safe */
+  n = strlen (buf);
+  if (errnoflag)
+    snprintf (buf + n, sizeof (buf) - n, ": %s", strerror (errno_save));
+  strcat (buf, "\n");
+
+  if (daemon_proc)
+    {
+      syslog (level, buf);
+    }
+  else
+    {
+      fflush (stdout);		/* in case stdout and stderr are the same */
+      fputs (buf, stderr);
+      fflush (stderr);
+    }
+  return;
+}

Added: hadoop/avro/trunk/src/c/error.h
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/error.h?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/error.h (added)
+++ hadoop/avro/trunk/src/c/error.h Thu May 14 21:15:10 2009
@@ -0,0 +1,29 @@
+#ifndef ERROR_H
+#define ERROR_H
+
+/* Nonfatal error related to a system call.
+ * Print a message and return. */
+
+void err_ret (const char *fmt, ...);
+
+/* Fatal error related to a system call.
+ * Print a message and terminate. */
+
+void err_sys (const char *fmt, ...);
+
+/* Fatal error related to a system call.
+ * Print a message, dump core, and terminate. */
+
+void err_dump (const char *fmt, ...);
+
+/* Nonfatal error unrelated to a system call.
+ * Print a message and return. */
+
+void err_msg (const char *fmt, ...);
+
+/* Fatal error unrelated to a system call.
+ * Print a message and terminate. */
+
+void err_quit (const char *fmt, ...);
+
+#endif

Added: hadoop/avro/trunk/src/c/test_avro_bytes.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/test_avro_bytes.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/test_avro_bytes.c (added)
+++ hadoop/avro/trunk/src/c/test_avro_bytes.c Thu May 14 21:15:10 2009
@@ -0,0 +1,76 @@
+#include <apr.h>
+#include <apr_pools.h>
+#include <apr_buckets.h>
+#include <apr_file_io.h>
+#include <stdlib.h>
+#include <time.h>
+#include "avro.h"
+#include "error.h"
+
+int
+main (void)
+{
+  apr_pool_t *pool;
+  AVRO avro_in, avro_out;
+  avro_status_t avro_status;
+  char buf[1024];
+  long long_in, long_out;
+  char *bytes_in, *bytes_out;
+  int i;
+  int64_t len_in, len_out;
+
+  apr_initialize ();
+  atexit (apr_terminate);
+
+  srand (time (NULL));
+
+  for (i = 0; i < 10; i++)
+    {
+      apr_pool_create (&pool, NULL);
+      avro_status =
+	avro_create_memory (&avro_in, pool, buf, sizeof (buf), AVRO_ENCODE);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to create AVRO encoder");
+	}
+
+      long_in = rand ();
+      bytes_in = (char *) &long_in;
+      len_in = sizeof (bytes_in);
+      avro_status = avro_bytes (&avro_in, &bytes_in, &len_in, -1);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to encode bytes value=%s", long_in);
+	}
+
+      avro_status =
+	avro_create_memory (&avro_out, pool, buf, sizeof (buf), AVRO_DECODE);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to create AVRO decoder");
+	}
+
+      avro_status = avro_bytes (&avro_out, &bytes_out, &len_out, -1);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to decode AVRO long");
+	}
+
+      if (len_out != len_in)
+	{
+	  err_quit ("Error decoding bytes out len=%d != in len=%d", len_out,
+		    len_in);
+	}
+      long_out = *((long *) bytes_out);
+      if (long_out != long_in)
+	{
+	  avro_dump_memory (&avro_in, stderr);
+	  avro_dump_memory (&avro_out, stderr);
+	  err_quit ("Error decoding bytes long_in=%d != long_out = %d",
+		    long_in, long_out);
+	}
+      apr_pool_destroy (pool);
+    }
+
+  return EXIT_SUCCESS;
+}

Added: hadoop/avro/trunk/src/c/test_avro_float_double.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/test_avro_float_double.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/test_avro_float_double.c (added)
+++ hadoop/avro/trunk/src/c/test_avro_float_double.c Thu May 14 21:15:10 2009
@@ -0,0 +1,84 @@
+#include <apr.h>
+#include <apr_pools.h>
+#include <apr_buckets.h>
+#include <apr_file_io.h>
+#include <stdlib.h>
+#include <time.h>
+#include "avro.h"
+#include "error.h"
+
+int
+main (void)
+{
+  apr_pool_t *pool;
+  AVRO avro_in, avro_out;
+  avro_status_t avro_status;
+  char buf[1024];
+  float f_in, f_out;
+  double d_in, d_out;
+  int i;
+
+  apr_initialize ();
+  atexit (apr_terminate);
+
+  srand (time (NULL));
+
+  for (i = 0; i < 10; i++)
+    {
+      apr_pool_create (&pool, NULL);
+      avro_status =
+	avro_create_memory (&avro_in, pool, buf, sizeof (buf), AVRO_ENCODE);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to create AVRO encoder");
+	}
+
+      f_in = rand () + (rand () / (RAND_MAX + 1.0));
+      d_in = f_in * f_in;
+
+      avro_status = avro_float (&avro_in, &f_in);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to encode float value=%f", f_in);
+	}
+
+      avro_status = avro_double (&avro_in, &d_in);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to encode double value=%f", d_in);
+	}
+
+      avro_status =
+	avro_create_memory (&avro_out, pool, buf, sizeof (buf), AVRO_DECODE);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to create AVRO decoder");
+	}
+
+      avro_status = avro_float (&avro_out, &f_out);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to decode AVRO float");
+	}
+
+      avro_status = avro_getint64_raw (&avro_out, &d_out);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to decode AVRO double");
+	}
+
+      if (f_in != f_out)
+	{
+	  avro_dump_memory (&avro_in, stderr);
+	  avro_dump_memory (&avro_out, stderr);
+	  err_quit ("Error encoding decoding float %f != %f", f_in, f_out);
+	}
+      if (d_in != d_out)
+	{
+	  err_quit ("Error encoding decoding double %f != %f", d_in, d_out);
+	}
+      apr_pool_destroy (pool);
+    }
+
+  return EXIT_SUCCESS;
+}

Added: hadoop/avro/trunk/src/c/test_avro_raw.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/test_avro_raw.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/test_avro_raw.c (added)
+++ hadoop/avro/trunk/src/c/test_avro_raw.c Thu May 14 21:15:10 2009
@@ -0,0 +1,86 @@
+#include <apr.h>
+#include <apr_pools.h>
+#include <apr_buckets.h>
+#include <apr_file_io.h>
+#include <stdlib.h>
+#include <time.h>
+#include "avro.h"
+#include "error.h"
+
+int
+main (void)
+{
+  apr_pool_t *pool;
+  AVRO avro_in, avro_out;
+  avro_status_t avro_status;
+  char buf[1024];
+  int32_t i32_in, i32_out;
+  int64_t i64_in, i64_out;
+  int i;
+
+  apr_initialize ();
+  atexit (apr_terminate);
+
+  srand (time (NULL));
+
+  for (i = 0; i < 10; i++)
+    {
+      apr_pool_create (&pool, NULL);
+      avro_status =
+	avro_create_memory (&avro_in, pool, buf, sizeof (buf), AVRO_ENCODE);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to create AVRO encoder");
+	}
+
+      i32_in = rand ();
+      i64_in = i32_in * i32_in;
+
+      avro_status = avro_putint32_raw (&avro_in, i32_in);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to encode raw int32 value=%d", i32_in);
+	}
+
+      avro_status = avro_putint64_raw (&avro_in, i64_in);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to encode raw int64 value=%lld", i64_in);
+	}
+
+      avro_status =
+	avro_create_memory (&avro_out, pool, buf, sizeof (buf), AVRO_DECODE);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to create AVRO decoder");
+	}
+
+      avro_status = avro_getint32_raw (&avro_out, &i32_out);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to decode AVRO raw int32");
+	}
+
+      avro_status = avro_getint64_raw (&avro_out, &i64_out);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to decode AVRO raw in64");
+	}
+
+      if (i32_in != i32_out)
+	{
+	  avro_dump_memory (&avro_in, stderr);
+	  avro_dump_memory (&avro_out, stderr);
+	  err_quit ("Error encoding decoding raw 32-bit int %d != %d", i32_in,
+		    i32_out);
+	}
+      if (i64_in != i64_out)
+	{
+	  err_quit ("Error encoding decoding raw 64-bit int %lld != %lld",
+		    i64_in, i64_out);
+	}
+      apr_pool_destroy (pool);
+    }
+
+  return EXIT_SUCCESS;
+}

Added: hadoop/avro/trunk/src/c/test_avro_string.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/test_avro_string.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/test_avro_string.c (added)
+++ hadoop/avro/trunk/src/c/test_avro_string.c Thu May 14 21:15:10 2009
@@ -0,0 +1,74 @@
+#include <apr.h>
+#include <apr_pools.h>
+#include <apr_buckets.h>
+#include <apr_file_io.h>
+#include <stdlib.h>
+#include "avro.h"
+#include "error.h"
+
+int
+main (void)
+{
+  apr_pool_t *pool;
+  AVRO avro_in, avro_out;
+  avro_status_t avro_status;
+  char buf[1024];
+  char *str_in, *str_out;
+  int i, len;
+  char *test_strings[] = {
+    "This",
+    "Is",
+    "A",
+    "Test"
+  };
+
+  apr_initialize ();
+  atexit (apr_terminate);
+
+  for (i = 0; i < sizeof (test_strings) / sizeof (test_strings[0]); i++)
+    {
+      char *test_string = test_strings[i];
+
+      apr_pool_create (&pool, NULL);
+      avro_status =
+	avro_create_memory (&avro_in, pool, buf, sizeof (buf), AVRO_ENCODE);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to create AVRO encoder");
+	}
+
+      str_in = test_string;
+      avro_status = avro_string (&avro_in, &str_in, -1);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to encode string value=%s", str_in);
+	}
+
+      avro_status =
+	avro_create_memory (&avro_out, pool, buf, sizeof (buf), AVRO_DECODE);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to create AVRO decoder");
+	}
+
+      avro_status = avro_string (&avro_out, &str_out, -1);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to decode AVRO long");
+	}
+
+      len = strlen (str_in);
+      if (len != strlen (str_out))
+	{
+	  err_quit ("Error decoding string");
+	}
+      if (memcmp (str_in, str_out, len))
+	{
+	  err_quit ("Error decoding string");
+	}
+      apr_pool_destroy (pool);
+
+    }
+
+  return EXIT_SUCCESS;
+}

Added: hadoop/avro/trunk/src/c/test_avro_zigzag.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/test_avro_zigzag.c?rev=774931&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/test_avro_zigzag.c (added)
+++ hadoop/avro/trunk/src/c/test_avro_zigzag.c Thu May 14 21:15:10 2009
@@ -0,0 +1,97 @@
+#include <apr.h>
+#include <apr_pools.h>
+#include <apr_buckets.h>
+#include <apr_file_io.h>
+#include <stdlib.h>
+#include "avro.h"
+#include "error.h"
+
+struct test_case
+{
+  int64_t value;
+  unsigned len;
+  uint8_t bytes[16];
+};
+typedef struct test_case test_case;
+
+static const test_case test_cases[] = {
+  {0, 1, {0x0}},
+  {-1, 1, {0x1}},
+  {1, 1, {0x2}},
+  {-2, 1, {0x3}},
+  {2, 1, {0x4}},
+  {-64, 1, {0x7f}},
+  {64, 2, {0x80, 0x01}},
+  {-65, 2, {0x81, 0x01}},
+  {65, 2, {0x82, 0x01}}
+};
+
+int
+main (void)
+{
+  apr_pool_t *pool;
+  AVRO avro_in, avro_out;
+  avro_status_t avro_status;
+  char buf[1024];
+  int64_t value_in, value_out;
+  int i, j;
+
+  apr_initialize ();
+  atexit (apr_terminate);
+
+  apr_pool_create (&pool, NULL);
+
+  for (i = 0; i < sizeof (test_cases) / sizeof (test_cases[0]); i++)
+    {
+      const test_case *tc = test_cases + i;
+      avro_status =
+	avro_create_memory (&avro_in, pool, buf, sizeof (buf), AVRO_ENCODE);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to create AVRO encoder");
+	}
+
+      value_in = (int64_t) tc->value;
+      avro_status = avro_int64 (&avro_in, &value_in);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to encode long value=%lld", value_in);
+	}
+
+      if (avro_in.used != tc->len)
+	{
+	  err_quit ("Long value=%lld encoded to the wrong length %d != %d",
+		    value_in, avro_in.used, tc->len);
+	}
+      for (j = 0; j < tc->len; j++)
+	{
+	  if ((uint8_t) avro_in.addr[j] != tc->bytes[j])
+	    {
+	      err_quit ("Invalid byte %d encoding the value=%lld", j,
+			value_in);
+	    }
+	}
+
+      avro_status =
+	avro_create_memory (&avro_out, pool, buf, sizeof (buf), AVRO_DECODE);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to create AVRO decoder");
+	}
+
+      avro_status = avro_int64 (&avro_out, &value_out);
+      if (avro_status != AVRO_OK)
+	{
+	  err_quit ("Unable to decode AVRO long");
+	}
+
+      if (value_out != value_in)
+	{
+	  err_msg ("Decoder error %lld decoded as %lld", value_in, value_out);
+	}
+
+    }
+
+  apr_pool_destroy (pool);
+  return EXIT_SUCCESS;
+}