You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/05/04 04:04:07 UTC

svn commit: r399509 [3/5] - in /lucene/hadoop/trunk: ./ bin/ src/c++/ src/c++/librecordio/ src/c++/librecordio/test/ src/java/org/apache/hadoop/record/ src/java/org/apache/hadoop/record/compiler/ src/java/org/apache/hadoop/record/compiler/generated/ sr...

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFile.java?rev=399509&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFile.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFile.java Wed May  3 19:04:01 2006
@@ -0,0 +1,56 @@
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+/**
+ *
+ * @author milindb@yahoo-inc.com
+ */
+public class JFile {
+    
+    private String mName;
+    private ArrayList mInclFiles;
+    private ArrayList mRecords;
+    
+    /** Creates a new instance of JFile */
+    public JFile(String name, ArrayList inclFiles, ArrayList recList) {
+        mName = name;
+        mInclFiles = inclFiles;
+        mRecords = recList;
+    }
+        
+    String getName() {
+        int idx = mName.lastIndexOf('/');
+        return (idx > 0) ? mName.substring(idx) : mName; 
+    }
+    
+    public void genCode(String language) throws IOException {
+        if ("c++".equals(language)) {
+            CppGenerator gen = new CppGenerator(mName, mInclFiles, mRecords);
+            gen.genCode();
+        } else if ("java".equals(language)) {
+            JavaGenerator gen = new JavaGenerator(mName, mInclFiles, mRecords);
+            gen.genCode();
+        } else {
+            System.out.println("Cannnot recognize language:"+language);
+            System.exit(1);
+        }
+    }
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java?rev=399509&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java Wed May  3 19:04:01 2006
@@ -0,0 +1,37 @@
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler;
+
+/**
+ *
+ * @author milindb
+ */
+public class JFloat extends JType {
+    
+    /** Creates a new instance of JFloat */
+    public JFloat() {
+        super("float", "float", "Float", "Float", "toFloat");
+    }
+    
+    public String getSignature() {
+        return "f";
+    }
+    
+    public String genJavaHashCode(String fname) {
+        return "    ret = Float.floatToIntBits("+fname+");\n";
+    }
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java?rev=399509&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java Wed May  3 19:04:01 2006
@@ -0,0 +1,33 @@
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler;
+
+/**
+ *
+ * @author milindb
+ */
+public class JInt extends JType {
+    
+    /** Creates a new instance of JInt */
+    public JInt() {
+        super("int32_t", "int", "Int", "Integer", "toInt");
+    }
+    
+    public String getSignature() {
+        return "i";
+    }
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java?rev=399509&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java Wed May  3 19:04:01 2006
@@ -0,0 +1,37 @@
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler;
+
+/**
+ *
+ * @author milindb
+ */
+public class JLong extends JType {
+    
+    /** Creates a new instance of JLong */
+    public JLong() {
+        super("int64_t", "long", "Long", "Long", "toLong");
+    }
+    
+    public String getSignature() {
+        return "l";
+    }
+    
+    public String genJavaHashCode(String fname) {
+        return "    ret = (int) ("+fname+"^("+fname+">>>32));\n";
+    }
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java?rev=399509&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java Wed May  3 19:04:01 2006
@@ -0,0 +1,99 @@
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler;
+
+/**
+ *
+ * @author milindb
+ */
+public class JMap extends JCompType {
+   
+    static private int level = 0;
+    
+    static private String getLevel() { return Integer.toString(level); }
+    
+    static private void incrLevel() { level++; }
+    
+    static private void decrLevel() { level--; }
+    
+    static private String getId(String id) { return id+getLevel(); }
+    
+    private JType mKey;
+    private JType mValue;
+    
+    /** Creates a new instance of JMap */
+    public JMap(JType t1, JType t2) {
+        super(" ::std::map<"+t1.getCppType()+","+t2.getCppType()+">",
+                "java.util.TreeMap", "Map", "java.util.TreeMap");
+        mKey = t1;
+        mValue = t2;
+    }
+    
+    public String getSignature() {
+        return "{" + mKey.getSignature() + mValue.getSignature() +"}";
+    }
+    
+    public String genJavaCompareTo(String fname) {
+        return "";
+    }
+    
+    public String genJavaReadWrapper(String fname, String tag, boolean decl) {
+        StringBuffer ret = new StringBuffer("");
+        if (decl) {
+            ret.append("    java.util.TreeMap "+fname+";\n");
+        }
+        ret.append("    {\n");
+        incrLevel();
+        ret.append("      org.apache.hadoop.record.Index "+getId("midx")+" = a_.startMap(\""+tag+"\");\n");
+        ret.append("      "+fname+"=new java.util.TreeMap();\n");
+        ret.append("      for (; !"+getId("midx")+".done(); "+getId("midx")+".incr()) {\n");
+        ret.append(mKey.genJavaReadWrapper(getId("k"),getId("k"),true));
+        ret.append(mValue.genJavaReadWrapper(getId("v"),getId("v"),true));
+        ret.append("        "+fname+".put("+getId("k")+","+getId("v")+");\n");
+        ret.append("      }\n");
+        ret.append("    a_.endMap(\""+tag+"\");\n");
+        decrLevel();
+        ret.append("    }\n");
+        return ret.toString();
+    }
+    
+    public String genJavaReadMethod(String fname, String tag) {
+        return genJavaReadWrapper(fname, tag, false);
+    }
+    
+    public String genJavaWriteWrapper(String fname, String tag) {
+        StringBuffer ret = new StringBuffer("    {\n");
+        incrLevel();
+        ret.append("      a_.startMap("+fname+",\""+tag+"\");\n");
+        ret.append("      java.util.Set "+getId("es")+" = "+fname+".entrySet();\n");
+        ret.append("      for(java.util.Iterator "+getId("midx")+" = "+getId("es")+".iterator(); "+getId("midx")+".hasNext(); ) {\n");
+        ret.append("        java.util.Map.Entry "+getId("me")+" = (java.util.Map.Entry) "+getId("midx")+".next();\n");
+        ret.append("        "+mKey.getJavaWrapperType()+" "+getId("k")+" = ("+mKey.getJavaWrapperType()+") "+getId("me")+".getKey();\n");
+        ret.append("        "+mValue.getJavaWrapperType()+" "+getId("v")+" = ("+mValue.getJavaWrapperType()+") "+getId("me")+".getValue();\n");
+        ret.append(mKey.genJavaWriteWrapper(getId("k"),getId("k")));
+        ret.append(mValue.genJavaWriteWrapper(getId("v"),getId("v")));
+        ret.append("      }\n");
+        ret.append("      a_.endMap("+fname+",\""+tag+"\");\n");
+        ret.append("    }\n");
+        decrLevel();
+        return ret.toString();
+    }
+    
+    public String genJavaWriteMethod(String fname, String tag) {
+        return genJavaWriteWrapper(fname, tag);
+    }
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java?rev=399509&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java Wed May  3 19:04:01 2006
@@ -0,0 +1,384 @@
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+
+/**
+ *
+ * @author milindb
+ */
+public class JRecord extends JCompType {
+
+    private String mFQName;
+    private String mName;
+    private String mModule;
+    private ArrayList mFields;
+    
+    /**
+     * Creates a new instance of JRecord
+     */
+    public JRecord(String name, ArrayList flist) {
+        super(name.replaceAll("\\.","::"), name, "Record", name);
+        mFQName = name;
+        int idx = name.lastIndexOf('.');
+        mName = name.substring(idx+1);
+        mModule = name.substring(0, idx);
+        mFields = flist;
+    }
+    
+    public String getName() {
+        return mName;
+    }
+    
+    public String getJavaFQName() {
+        return mFQName;
+    }
+    
+    public String getCppFQName() {
+        return mFQName.replaceAll("\\.", "::");
+    }
+    
+    public String getJavaPackage() {
+        return mModule;
+    }
+    
+    public String getCppNameSpace() {
+        return mModule.replaceAll("\\.", "::");
+    }
+    
+    public ArrayList getFields() {
+        return mFields;
+    }
+    
+    public String getSignature() {
+        StringBuffer sb = new StringBuffer();
+        sb.append("L").append(mName).append("(");
+        for (Iterator i = mFields.iterator(); i.hasNext();) {
+            String s = ((JField) i.next()).getSignature();
+            sb.append(s);
+        }
+        sb.append(")");
+        return sb.toString();
+    }
+    
+    public String genCppDecl(String fname) {
+        return "  "+mName+" "+fname+";\n";
+    }
+    
+    public String genJavaDecl (String fname) {
+        return "  private "+mName+" "+fname+";\n";
+    }
+    
+    public String genJavaReadWrapper(String fname, String tag, boolean decl) {
+        StringBuffer ret = new StringBuffer("");
+        if (decl) {
+            ret.append("    "+getJavaFQName()+" "+fname+";\n");
+        }
+        ret.append("    "+fname+"= new "+getJavaFQName()+"();\n");
+        ret.append("    a_.readRecord("+fname+",\""+tag+"\");\n");
+        return ret.toString();
+    }
+    
+    public String genJavaWriteWrapper(String fname, String tag) {
+        return "    a_.writeRecord("+fname+",\""+tag+"\");\n";
+    }
+    
+    public void genCppCode(FileWriter hh, FileWriter cc)
+        throws IOException {
+        String[] ns = getCppNameSpace().split("::");
+        for (int i = 0; i < ns.length; i++) {
+            hh.write("namespace "+ns[i]+" {\n");
+        }
+        
+        hh.write("class "+getName()+" : public ::hadoop::Record {\n");
+        hh.write("private:\n");
+        
+        for (Iterator i = mFields.iterator(); i.hasNext();) {
+            JField jf = (JField) i.next();
+            hh.write(jf.genCppDecl());
+        }
+        hh.write("  std::bitset<"+mFields.size()+"> bs_;\n");
+        hh.write("public:\n");
+        hh.write("  virtual void serialize(::hadoop::OArchive& a_, const char* tag);\n");
+        hh.write("  virtual void deserialize(::hadoop::IArchive& a_, const char* tag);\n");
+        hh.write("  virtual const ::std::string& type() const;\n");
+        hh.write("  virtual const ::std::string& signature() const;\n");
+        hh.write("  virtual bool validate() const;\n");
+        hh.write("  virtual bool operator<(const "+getName()+"& peer_);\n");
+        hh.write("  virtual bool operator==(const "+getName()+"& peer_);\n");
+        hh.write("  virtual ~"+getName()+"() {};\n");
+        int fIdx = 0;
+        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+            JField jf = (JField) i.next();
+            hh.write(jf.genCppGetSet(fIdx));
+        }
+        hh.write("}; // end record "+getName()+"\n");
+        for (int i=0; i<ns.length; i++) {
+            hh.write("} // end namespace "+ns[i]+"\n");
+        }
+        cc.write("void "+getCppFQName()+"::serialize(::hadoop::OArchive& a_, const char* tag) {\n");
+        cc.write("  if (!validate()) throw new ::hadoop::IOException(\"All fields not set.\");\n");
+        cc.write("  a_.startRecord(*this,tag);\n");
+        fIdx = 0;
+        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+            JField jf = (JField) i.next();
+            String name = jf.getName();
+            if (jf.getType() instanceof JBuffer) {
+                cc.write("  a_.serialize("+name+","+name+".length(),\""+jf.getTag()+"\");\n");
+            } else {
+                cc.write("  a_.serialize("+name+",\""+jf.getTag()+"\");\n");
+            }
+            cc.write("  bs_.reset("+fIdx+");\n");
+        }
+        cc.write("  a_.endRecord(*this,tag);\n");
+        cc.write("  return;\n");
+        cc.write("}\n");
+        
+        cc.write("void "+getCppFQName()+"::deserialize(::hadoop::IArchive& a_, const char* tag) {\n");
+        cc.write("  a_.startRecord(*this,tag);\n");
+        fIdx = 0;
+        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+            JField jf = (JField) i.next();
+            String name = jf.getName();
+            if (jf.getType() instanceof JBuffer) {
+                cc.write("  { size_t len=0; a_.deserialize("+name+",len,\""+jf.getTag()+"\");}\n");
+            } else {
+                cc.write("  a_.deserialize("+name+",\""+jf.getTag()+"\");\n");
+            }
+            cc.write("  bs_.set("+fIdx+");\n");
+        }
+        cc.write("  a_.endRecord(*this,tag);\n");
+        cc.write("  return;\n");
+        cc.write("}\n");
+        
+        cc.write("bool "+getCppFQName()+"::validate() const {\n");
+        cc.write("  if (bs_.size() != bs_.count()) return false;\n");
+        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+            JField jf = (JField) i.next();
+            JType type = jf.getType();
+            if (type instanceof JRecord) {
+                cc.write("  if (!"+jf.getName()+".validate()) return false;\n");
+            }
+        }
+        cc.write("  return true;\n");
+        cc.write("}\n");
+        
+        cc.write("bool "+getCppFQName()+"::operator< (const "+getCppFQName()+"& peer_) {\n");
+        cc.write("  return (1\n");
+        for (Iterator i = mFields.iterator(); i.hasNext();) {
+            JField jf = (JField) i.next();
+            String name = jf.getName();
+            cc.write("    && ("+name+" < peer_."+name+")\n");
+        }
+        cc.write("  );\n");
+        cc.write("}\n");
+        
+        cc.write("bool "+getCppFQName()+"::operator== (const "+getCppFQName()+"& peer_) {\n");
+        cc.write("  return (1\n");
+        for (Iterator i = mFields.iterator(); i.hasNext();) {
+            JField jf = (JField) i.next();
+            String name = jf.getName();
+            cc.write("    && ("+name+" == peer_."+name+")\n");
+        }
+        cc.write("  );\n");
+        cc.write("}\n");
+        
+        cc.write("const ::std::string&"+getCppFQName()+"::type() const {\n");
+        cc.write("  static const ::std::string type_(\""+mName+"\");\n");
+        cc.write("  return type_;\n");
+        cc.write("}\n");
+        
+        cc.write("const ::std::string&"+getCppFQName()+"::signature() const {\n");
+        cc.write("  static const ::std::string sig_(\""+getSignature()+"\");\n");
+        cc.write("  return sig_;\n");
+        cc.write("}\n");
+        
+    }
+    
+    public void genJavaCode() throws IOException {
+        String pkg = getJavaPackage();
+        String pkgpath = pkg.replaceAll("\\.", "/");
+        File pkgdir = new File(pkgpath);
+        if (!pkgdir.exists()) {
+            // create the pkg directory
+            boolean ret = pkgdir.mkdirs();
+            if (!ret) {
+                System.out.println("Cannnot create directory: "+pkgpath);
+                System.exit(1);
+            }
+        } else if (!pkgdir.isDirectory()) {
+            // not a directory
+            System.out.println(pkgpath+" is not a directory.");
+            System.exit(1);
+        }
+        File jfile = new File(pkgdir, getName()+".java");
+        FileWriter jj = new FileWriter(jfile);
+        jj.write("// File generated by hadoop record compiler. Do not edit.\n");
+        jj.write("package "+getJavaPackage()+";\n\n");
+        jj.write("public class "+getName()+" implements org.apache.hadoop.record.Record, org.apache.hadoop.io.WritableComparable {\n");
+        for (Iterator i = mFields.iterator(); i.hasNext();) {
+            JField jf = (JField) i.next();
+            jj.write(jf.genJavaDecl());
+        }
+        jj.write("  private java.util.BitSet bs_;\n");
+        jj.write("  public "+getName()+"() {\n");
+        jj.write("    bs_ = new java.util.BitSet("+(mFields.size()+1)+");\n");
+        jj.write("    bs_.set("+mFields.size()+");\n");
+        jj.write("  }\n");
+        
+        jj.write("  public "+getName()+"(\n");
+        int fIdx = 0;
+        int fLen = mFields.size();
+        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+            JField jf = (JField) i.next();
+            jj.write(jf.genJavaConstructorParam(fIdx));
+            jj.write((fLen-1 == fIdx)?"":",\n");
+        }
+        jj.write(") {\n");
+        jj.write("    bs_ = new java.util.BitSet("+(mFields.size()+1)+");\n");
+        jj.write("    bs_.set("+mFields.size()+");\n");
+        fIdx = 0;
+        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+            JField jf = (JField) i.next();
+            jj.write(jf.genJavaConstructorSet(fIdx));
+        }
+        jj.write("  }\n");
+        fIdx = 0;
+        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+            JField jf = (JField) i.next();
+            jj.write(jf.genJavaGetSet(fIdx));
+        }
+        jj.write("  public void serialize(org.apache.hadoop.record.OutputArchive a_, String tag) throws java.io.IOException {\n");
+        jj.write("    if (!validate()) throw new java.io.IOException(\"All fields not set:\");\n");
+        jj.write("    a_.startRecord(this,tag);\n");
+        fIdx = 0;
+        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+            JField jf = (JField) i.next();
+            jj.write(jf.genJavaWriteMethodName());
+            jj.write("    bs_.clear("+fIdx+");\n");
+        }
+        jj.write("    a_.endRecord(this,tag);\n");
+        jj.write("  }\n");
+        
+        jj.write("  public void deserialize(org.apache.hadoop.record.InputArchive a_, String tag) throws java.io.IOException {\n");
+        jj.write("    a_.startRecord(tag);\n");
+        fIdx = 0;
+        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+            JField jf = (JField) i.next();
+            jj.write(jf.genJavaReadMethodName());
+            jj.write("    bs_.set("+fIdx+");\n");
+        }
+        jj.write("    a_.endRecord(tag);\n");
+        jj.write("}\n");
+        
+        jj.write("  public String toString() {\n");
+        jj.write("    try {\n");
+        jj.write("      java.io.ByteArrayOutputStream s =\n");
+        jj.write("        new java.io.ByteArrayOutputStream();\n");
+        jj.write("      org.apache.hadoop.record.CsvOutputArchive a_ = \n");
+        jj.write("        new org.apache.hadoop.record.CsvOutputArchive(s);\n");
+        jj.write("      a_.startRecord(this,\"\");\n");
+        fIdx = 0;
+        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+            JField jf = (JField) i.next();
+            jj.write(jf.genJavaWriteMethodName());
+        }
+        jj.write("      a_.endRecord(this,\"\");\n");
+        jj.write("      return new String(s.toByteArray(), \"UTF-8\");\n");
+        jj.write("    } catch (Throwable ex) {\n");
+        jj.write("      ex.printStackTrace();\n");
+        jj.write("    }\n");
+        jj.write("    return \"ERROR\";\n");
+        jj.write("  }\n");
+        
+        jj.write("  public void write(java.io.DataOutput out) throws java.io.IOException {\n");
+        jj.write("    org.apache.hadoop.record.BinaryOutputArchive archive = new org.apache.hadoop.record.BinaryOutputArchive(out);\n");
+        jj.write("    serialize(archive, \"\");\n");
+        jj.write("  }\n");
+        
+        jj.write("  public void readFields(java.io.DataInput in) throws java.io.IOException {\n");
+        jj.write("    org.apache.hadoop.record.BinaryInputArchive archive = new org.apache.hadoop.record.BinaryInputArchive(in);\n");
+        jj.write("    deserialize(archive, \"\");\n");
+        jj.write("  }\n");
+        
+        jj.write("  private boolean validate() {\n");
+        jj.write("    if (bs_.cardinality() != bs_.length()) return false;\n");
+        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+            JField jf = (JField) i.next();
+            JType type = jf.getType();
+            if (type instanceof JRecord) {
+                jj.write("    if (!"+jf.getName()+".validate()) return false;\n");
+            }
+        }
+        jj.write("    return true;\n");
+        jj.write("}\n");
+        
+        jj.write("  public int compareTo (Object peer_) throws ClassCastException {\n");
+        jj.write("    if (!(peer_ instanceof "+getName()+")) {\n");
+        jj.write("      throw new ClassCastException(\"Comparing different types of records.\");\n");
+        jj.write("    }\n");
+        jj.write("    "+getName()+" peer = ("+getName()+") peer_;\n");
+        jj.write("    int ret = 0;\n");
+        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+            JField jf = (JField) i.next();
+            jj.write(jf.genJavaCompareTo());
+            jj.write("    if (ret != 0) return ret;\n");
+        }
+        jj.write("     return ret;\n");
+        jj.write("  }\n");
+        
+        jj.write("  public boolean equals(Object peer_) {\n");
+        jj.write("    if (!(peer_ instanceof "+getName()+")) {\n");
+        jj.write("      return false;\n");
+        jj.write("    }\n");
+        jj.write("    if (peer_ == this) {\n");
+        jj.write("      return true;\n");
+        jj.write("    }\n");
+        jj.write("    "+getName()+" peer = ("+getName()+") peer_;\n");
+        jj.write("    boolean ret = false;\n");
+        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+            JField jf = (JField) i.next();
+            jj.write(jf.genJavaEquals());
+            jj.write("    if (!ret) return ret;\n");
+        }
+        jj.write("     return ret;\n");
+        jj.write("  }\n");
+        
+        jj.write("  public int hashCode() {\n");
+        jj.write("    int result = 17;\n");
+        jj.write("    int ret;\n");
+        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
+            JField jf = (JField) i.next();
+            jj.write(jf.genJavaHashCode());
+            jj.write("    result = 37*result + ret;\n");
+        }
+        jj.write("    return result;\n");
+        jj.write("  }\n");
+        jj.write("  public static String signature() {\n");
+        jj.write("    return \""+getSignature()+"\";\n");
+        jj.write("  }\n");
+        
+        jj.write("}\n");
+        
+        jj.close();
+    }
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java?rev=399509&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java Wed May  3 19:04:01 2006
@@ -0,0 +1,47 @@
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler;
+
+/**
+ *
+ * @author milindb
+ */
+public class JString extends JCompType {
+    
+    /** Creates a new instance of JString */
+    public JString() {
+        super(" ::std::string", "String", "String", "String");
+    }
+    
+    public String getSignature() {
+        return "s";
+    }
+    
+    public String genJavaReadWrapper(String fname, String tag, boolean decl) {
+        String ret = "";
+        if (decl) {
+            ret = "    String "+fname+";\n";
+        }
+        return ret + "        "+fname+"=a_.readString(\""+tag+"\");\n";
+    }
+    
+    public String genJavaWriteWrapper(String fname, String tag) {
+        return "        a_.writeString("+fname+",\""+tag+"\");\n";
+    }
+    
+    
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java?rev=399509&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java Wed May  3 19:04:01 2006
@@ -0,0 +1,127 @@
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler;
+
+/**
+ *
+ * @author milindb
+ */
+public abstract class JType {
+    
+    private String mCppName;
+    private String mJavaName;
+    private String mMethodSuffix;
+    private String mWrapper;
+    private String mUnwrapMethod;
+    
+    /**
+     * Creates a new instance of JType
+     */
+    public JType(String cppname, String javaname, String suffix, String wrapper, String unwrap) {
+        mCppName = cppname;
+        mJavaName = javaname;
+        mMethodSuffix = suffix;
+        mWrapper = wrapper;
+        mUnwrapMethod = unwrap;
+    }
+    
+    abstract public String getSignature();
+    
+    public String genCppDecl(String fname) {
+        return "  "+mCppName+" m"+fname+";\n"; 
+    }
+    
+    public String genJavaDecl (String fname) {
+        return "  private "+mJavaName+" m"+fname+";\n";
+    }
+    
+    public String genJavaConstructorParam (int fIdx) {
+        return "        "+mJavaName+" m"+fIdx;
+    }
+    
+    public String genCppGetSet(String fname, int fIdx) {
+        String getFunc = "  virtual "+mCppName+" get"+fname+"() const {\n";
+        getFunc += "    return m"+fname+";\n";
+        getFunc += "  }\n";
+        String setFunc = "  virtual void set"+fname+"("+mCppName+" m_) {\n";
+        setFunc += "    m"+fname+"=m_; bs_.set("+fIdx+");\n";
+        setFunc += "  }\n";
+        return getFunc+setFunc;
+    }
+    
+    public String genJavaGetSet(String fname, int fIdx) {
+        String getFunc = "  public "+mJavaName+" get"+fname+"() {\n";
+        getFunc += "    return m"+fname+";\n";
+        getFunc += "  }\n";
+        String setFunc = "  public void set"+fname+"("+mJavaName+" m_) {\n";
+        setFunc += "    m"+fname+"=m_; bs_.set("+fIdx+");\n";
+        setFunc += "  }\n";
+        return getFunc+setFunc;
+    }
+    
+    public String getCppType() {
+        return mCppName;
+    }
+    
+    public String getJavaType() {
+        return mJavaName;
+    }
+   
+    public String getJavaWrapperType() {
+        return mWrapper;
+    }
+    
+    public String getMethodSuffix() {
+        return mMethodSuffix;
+    }
+    
+    public String genJavaWriteMethod(String fname, String tag) {
+        return "    a_.write"+mMethodSuffix+"("+fname+",\""+tag+"\");\n";
+    }
+    
+    public String genJavaReadMethod(String fname, String tag) {
+        return "    "+fname+"=a_.read"+mMethodSuffix+"(\""+tag+"\");\n";
+    }
+    
+    public String genJavaReadWrapper(String fname, String tag, boolean decl) {
+        String ret = "";
+        if (decl) {
+            ret = "    "+mWrapper+" "+fname+";\n";
+        }
+        return ret + "    "+fname+"=new "+mWrapper+"(a_.read"+mMethodSuffix+"(\""+tag+"\"));\n";
+    }
+    
+    public String genJavaWriteWrapper(String fname, String tag) {
+        return "        a_.write"+mMethodSuffix+"("+fname+"."+mUnwrapMethod+"(),\""+tag+"\");\n";
+    }
+    
+    public String genJavaCompareTo(String fname) {
+        return "    ret = ("+fname+" == peer."+fname+")? 0 :(("+fname+"<peer."+fname+")?-1:1);\n";
+    }
+    
+    public String genJavaEquals(String fname, String peer) {
+        return "    ret = ("+fname+"=="+peer+");\n";
+    }
+    
+    public String genJavaHashCode(String fname) {
+        return "    ret = (int)"+fname+";\n";
+    }
+
+    String genJavaConstructorSet(String fname, int fIdx) {
+        return "    m"+fname+"=m"+fIdx+"; bs_.set("+fIdx+");\n";
+    }
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java?rev=399509&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java Wed May  3 19:04:01 2006
@@ -0,0 +1,92 @@
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler;
+
+/**
+ *
+ * @author milindb
+ */
+public class JVector extends JCompType {
+    
+    static private int level = 0;
+    
+    static private String getId(String id) { return id+getLevel(); }
+    
+    static private String getLevel() { return Integer.toString(level); }
+    
+    static private void incrLevel() { level++; }
+    
+    static private void decrLevel() { level--; }
+    
+    private JType mElement;
+    
+    /** Creates a new instance of JVector */
+    public JVector(JType t) {
+        super(" ::std::vector<"+t.getCppType()+">", "java.util.ArrayList", "Vector", "java.util.ArrayList");
+        mElement = t;
+    }
+    
+    public String getSignature() {
+        return "[" + mElement.getSignature() + "]";
+    }
+    
+    public String genJavaCompareTo(String fname) {
+        return "";
+    }
+    
+    public String genJavaReadWrapper(String fname, String tag, boolean decl) {
+        StringBuffer ret = new StringBuffer("");
+        if (decl) {
+            ret.append("      java.util.ArrayList "+fname+";\n");
+        }
+        ret.append("    {\n");
+        incrLevel();
+        ret.append("      org.apache.hadoop.record.Index "+getId("vidx")+" = a_.startVector(\""+tag+"\");\n");
+        ret.append("      "+fname+"=new java.util.ArrayList();\n");
+        ret.append("      for (; !"+getId("vidx")+".done(); "+getId("vidx")+".incr()) {\n");
+        ret.append(mElement.genJavaReadWrapper(getId("e"), getId("e"), true));
+        ret.append("        "+fname+".add("+getId("e")+");\n");
+        ret.append("      }\n");
+        ret.append("    a_.endVector(\""+tag+"\");\n");
+        decrLevel();
+        ret.append("    }\n");
+        return ret.toString();
+    }
+    
+    public String genJavaReadMethod(String fname, String tag) {
+        return genJavaReadWrapper(fname, tag, false);
+    }
+    
+    public String genJavaWriteWrapper(String fname, String tag) {
+        StringBuffer ret = new StringBuffer("    {\n");
+        incrLevel();
+        ret.append("      a_.startVector("+fname+",\""+tag+"\");\n");
+        ret.append("      int "+getId("len")+" = "+fname+".size();\n");
+        ret.append("      for(int "+getId("vidx")+" = 0; "+getId("vidx")+"<"+getId("len")+"; "+getId("vidx")+"++) {\n");
+        ret.append("        "+mElement.getJavaWrapperType()+" "+getId("e")+" = ("+mElement.getJavaWrapperType()+") "+fname+".get("+getId("vidx")+");\n");
+        ret.append(mElement.genJavaWriteWrapper(getId("e"), getId("e")));
+        ret.append("      }\n");
+        ret.append("      a_.endVector("+fname+",\""+tag+"\");\n");
+        ret.append("    }\n");
+        decrLevel();
+        return ret.toString();
+    }
+    
+    public String genJavaWriteMethod(String fname, String tag) {
+        return genJavaWriteWrapper(fname, tag);
+    }
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JavaGenerator.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JavaGenerator.java?rev=399509&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JavaGenerator.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JavaGenerator.java Wed May  3 19:04:01 2006
@@ -0,0 +1,47 @@
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler;
+
+import java.util.ArrayList;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.Iterator;
+
+/**
+ *
+ * @author milindb
+ */
+class JavaGenerator {
+    private String mName;
+    private ArrayList mInclFiles;
+    private ArrayList mRecList;
+    
+    /** Creates a new instance of JavaGenerator */
+    JavaGenerator(String name, ArrayList incl, ArrayList records) {
+        mName = name;
+        mInclFiles = incl;
+        mRecList = records;
+    }
+    
+    public void genCode() throws IOException {
+        for (Iterator i = mRecList.iterator(); i.hasNext(); ) {
+            JRecord rec = (JRecord) i.next();
+            rec.genJavaCode();
+        }
+    }
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/ParseException.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/ParseException.java?rev=399509&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/ParseException.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/ParseException.java Wed May  3 19:04:01 2006
@@ -0,0 +1,208 @@
+/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 3.0 */
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler.generated;
+
+/**
+ * This exception is thrown when parse errors are encountered.
+ * You can explicitly create objects of this exception type by
+ * calling the method generateParseException in the generated
+ * parser.
+ *
+ * You can modify this class to customize your error reporting
+ * mechanisms so long as you retain the public fields.
+ */
+public class ParseException extends Exception {
+
+  /**
+   * This constructor is used by the method "generateParseException"
+   * in the generated parser.  Calling this constructor generates
+   * a new object of this type with the fields "currentToken",
+   * "expectedTokenSequences", and "tokenImage" set.  The boolean
+   * flag "specialConstructor" is also set to true to indicate that
+   * this constructor was used to create this object.
+   * This constructor calls its super class with the empty string
+   * to force the "toString" method of parent class "Throwable" to
+   * print the error message in the form:
+   *     ParseException: <result of getMessage>
+   */
+  public ParseException(Token currentTokenVal,
+                        int[][] expectedTokenSequencesVal,
+                        String[] tokenImageVal
+                       )
+  {
+    super("");
+    specialConstructor = true;
+    currentToken = currentTokenVal;
+    expectedTokenSequences = expectedTokenSequencesVal;
+    tokenImage = tokenImageVal;
+  }
+
+  /**
+   * The following constructors are for use by you for whatever
+   * purpose you can think of.  Constructing the exception in this
+   * manner makes the exception behave in the normal way - i.e., as
+   * documented in the class "Throwable".  The fields "errorToken",
+   * "expectedTokenSequences", and "tokenImage" do not contain
+   * relevant information.  The JavaCC generated code does not use
+   * these constructors.
+   */
+
+  public ParseException() {
+    super();
+    specialConstructor = false;
+  }
+
+  public ParseException(String message) {
+    super(message);
+    specialConstructor = false;
+  }
+
+  /**
+   * This variable determines which constructor was used to create
+   * this object and thereby affects the semantics of the
+   * "getMessage" method (see below).
+   */
+  protected boolean specialConstructor;
+
+  /**
+   * This is the last token that has been consumed successfully.  If
+   * this object has been created due to a parse error, the token
+   * followng this token will (therefore) be the first error token.
+   */
+  public Token currentToken;
+
+  /**
+   * Each entry in this array is an array of integers.  Each array
+   * of integers represents a sequence of tokens (by their ordinal
+   * values) that is expected at this point of the parse.
+   */
+  public int[][] expectedTokenSequences;
+
+  /**
+   * This is a reference to the "tokenImage" array of the generated
+   * parser within which the parse error occurred.  This array is
+   * defined in the generated ...Constants interface.
+   */
+  public String[] tokenImage;
+
+  /**
+   * This method has the standard behavior when this object has been
+   * created using the standard constructors.  Otherwise, it uses
+   * "currentToken" and "expectedTokenSequences" to generate a parse
+   * error message and returns it.  If this object has been created
+   * due to a parse error, and you do not catch it (it gets thrown
+   * from the parser), then this method is called during the printing
+   * of the final stack trace, and hence the correct error message
+   * gets displayed.
+   */
+  public String getMessage() {
+    if (!specialConstructor) {
+      return super.getMessage();
+    }
+    StringBuffer expected = new StringBuffer();
+    int maxSize = 0;
+    for (int i = 0; i < expectedTokenSequences.length; i++) {
+      if (maxSize < expectedTokenSequences[i].length) {
+        maxSize = expectedTokenSequences[i].length;
+      }
+      for (int j = 0; j < expectedTokenSequences[i].length; j++) {
+        expected.append(tokenImage[expectedTokenSequences[i][j]]).append(" ");
+      }
+      if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) {
+        expected.append("...");
+      }
+      expected.append(eol).append("    ");
+    }
+    String retval = "Encountered \"";
+    Token tok = currentToken.next;
+    for (int i = 0; i < maxSize; i++) {
+      if (i != 0) retval += " ";
+      if (tok.kind == 0) {
+        retval += tokenImage[0];
+        break;
+      }
+      retval += add_escapes(tok.image);
+      tok = tok.next; 
+    }
+    retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn;
+    retval += "." + eol;
+    if (expectedTokenSequences.length == 1) {
+      retval += "Was expecting:" + eol + "    ";
+    } else {
+      retval += "Was expecting one of:" + eol + "    ";
+    }
+    retval += expected.toString();
+    return retval;
+  }
+
+  /**
+   * The end of line string for this machine.
+   */
+  protected String eol = System.getProperty("line.separator", "\n");
+ 
+  /**
+   * Used to convert raw characters to their escaped version
+   * when these raw version cannot be used as part of an ASCII
+   * string literal.
+   */
+  protected String add_escapes(String str) {
+      StringBuffer retval = new StringBuffer();
+      char ch;
+      for (int i = 0; i < str.length(); i++) {
+        switch (str.charAt(i))
+        {
+           case 0 :
+              continue;
+           case '\b':
+              retval.append("\\b");
+              continue;
+           case '\t':
+              retval.append("\\t");
+              continue;
+           case '\n':
+              retval.append("\\n");
+              continue;
+           case '\f':
+              retval.append("\\f");
+              continue;
+           case '\r':
+              retval.append("\\r");
+              continue;
+           case '\"':
+              retval.append("\\\"");
+              continue;
+           case '\'':
+              retval.append("\\\'");
+              continue;
+           case '\\':
+              retval.append("\\\\");
+              continue;
+           default:
+              if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
+                 String s = "0000" + Integer.toString(ch, 16);
+                 retval.append("\\u" + s.substring(s.length() - 4, s.length()));
+              } else {
+                 retval.append(ch);
+              }
+              continue;
+        }
+      }
+      return retval.toString();
+   }
+
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/Rcc.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/Rcc.java?rev=399509&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/Rcc.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/Rcc.java Wed May  3 19:04:01 2006
@@ -0,0 +1,517 @@
+/* Generated By:JavaCC: Do not edit this line. Rcc.java */
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler.generated;
+
+import org.apache.hadoop.record.compiler.*;
+import java.util.ArrayList;
+import java.util.Hashtable;
+import java.util.Iterator;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+public class Rcc implements RccConstants {
+    private static String language = "java";
+    private static ArrayList recFiles = new ArrayList();
+    private static JFile curFile;
+    private static Hashtable recTab;
+    private static String curDir = System.getProperty("user.dir");
+    private static String curFileName;
+    private static String curModuleName;
+
+    public static void main(String args[]) {
+        for (int i=0; i<args.length; i++) {
+            if ("-l".equalsIgnoreCase(args[i]) ||
+                "--language".equalsIgnoreCase(args[i])) {
+                language = args[i+1].toLowerCase();
+                i++;
+            } else {
+                recFiles.add(args[i]);
+            }
+        }
+        if (!"c++".equals(language) && !"java".equals(language)) {
+            System.out.println("Cannot recognize language:" + language);
+            System.exit(1);
+        }
+        if (recFiles.size() == 0) {
+            System.out.println("No record files specified. Exiting.");
+            System.exit(1);
+        }
+        for (int i=0; i<recFiles.size(); i++) {
+            curFileName = (String) recFiles.get(i);
+            File file = new File(curDir, curFileName);
+            try {
+                FileReader reader = new FileReader(file);
+                Rcc parser = new Rcc(reader);
+                try {
+                    recTab = new Hashtable();
+                    curFile = parser.Input();
+                    System.out.println((String) recFiles.get(i) +
+                        " Parsed Successfully");
+                } catch (ParseException e) {
+                    System.out.println(e.toString());
+                    System.exit(1);
+                }
+                try {
+                    reader.close();
+                } catch (IOException e) {
+                }
+            } catch (FileNotFoundException e) {
+                System.out.println("File " + (String) recFiles.get(i) +
+                    " Not found.");
+                System.exit(1);
+            }
+            try {
+                curFile.genCode(language);
+            } catch (IOException e) {
+                System.out.println(e.toString());
+                System.exit(1);
+            }
+        }
+    }
+
+  final public JFile Input() throws ParseException {
+    ArrayList ilist = new ArrayList();
+    ArrayList rlist = new ArrayList();
+    JFile i;
+    ArrayList l;
+    label_1:
+    while (true) {
+      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+      case INCLUDE_TKN:
+        i = Include();
+          ilist.add(i);
+        break;
+      case MODULE_TKN:
+        l = Module();
+          rlist.addAll(l);
+        break;
+      default:
+        jj_la1[0] = jj_gen;
+        jj_consume_token(-1);
+        throw new ParseException();
+      }
+      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+      case MODULE_TKN:
+      case INCLUDE_TKN:
+        ;
+        break;
+      default:
+        jj_la1[1] = jj_gen;
+        break label_1;
+      }
+    }
+    jj_consume_token(0);
+      {if (true) return new JFile(curFileName, ilist, rlist);}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public JFile Include() throws ParseException {
+    String fname;
+    Token t;
+    jj_consume_token(INCLUDE_TKN);
+    t = jj_consume_token(CSTRING_TKN);
+        JFile ret = null;
+        fname = t.image.replaceAll("^\"", "").replaceAll("\"$","");
+        File file = new File(curDir, fname);
+        String tmpDir = curDir;
+        String tmpFile = curFileName;
+        curDir = file.getParent();
+        curFileName = file.getName();
+        try {
+            FileReader reader = new FileReader(file);
+            Rcc parser = new Rcc(reader);
+            try {
+                ret = parser.Input();
+                System.out.println(fname + " Parsed Successfully");
+            } catch (ParseException e) {
+                System.out.println(e.toString());
+                System.exit(1);
+            }
+            try {
+                reader.close();
+            } catch (IOException e) {
+            }
+        } catch (FileNotFoundException e) {
+            System.out.println("File " + fname +
+                " Not found.");
+            System.exit(1);
+        }
+        curDir = tmpDir;
+        curFileName = tmpFile;
+        {if (true) return ret;}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public ArrayList Module() throws ParseException {
+    String mName;
+    ArrayList rlist;
+    jj_consume_token(MODULE_TKN);
+    mName = ModuleName();
+      curModuleName = mName;
+    jj_consume_token(LBRACE_TKN);
+    rlist = RecordList();
+    jj_consume_token(RBRACE_TKN);
+      {if (true) return rlist;}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public String ModuleName() throws ParseException {
+    String name = "";
+    Token t;
+    t = jj_consume_token(IDENT_TKN);
+      name += t.image;
+    label_2:
+    while (true) {
+      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+      case DOT_TKN:
+        ;
+        break;
+      default:
+        jj_la1[2] = jj_gen;
+        break label_2;
+      }
+      jj_consume_token(DOT_TKN);
+      t = jj_consume_token(IDENT_TKN);
+          name += "." + t.image;
+    }
+      {if (true) return name;}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public ArrayList RecordList() throws ParseException {
+    ArrayList rlist = new ArrayList();
+    JRecord r;
+    label_3:
+    while (true) {
+      r = Record();
+          rlist.add(r);
+      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+      case RECORD_TKN:
+        ;
+        break;
+      default:
+        jj_la1[3] = jj_gen;
+        break label_3;
+      }
+    }
+      {if (true) return rlist;}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public JRecord Record() throws ParseException {
+    String rname;
+    ArrayList flist = new ArrayList();
+    Token t;
+    JField f;
+    jj_consume_token(RECORD_TKN);
+    t = jj_consume_token(IDENT_TKN);
+      rname = t.image;
+    jj_consume_token(LBRACE_TKN);
+    label_4:
+    while (true) {
+      f = Field();
+          flist.add(f);
+      jj_consume_token(SEMICOLON_TKN);
+      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+      case BYTE_TKN:
+      case BOOLEAN_TKN:
+      case INT_TKN:
+      case LONG_TKN:
+      case FLOAT_TKN:
+      case DOUBLE_TKN:
+      case USTRING_TKN:
+      case BUFFER_TKN:
+      case VECTOR_TKN:
+      case MAP_TKN:
+      case IDENT_TKN:
+        ;
+        break;
+      default:
+        jj_la1[4] = jj_gen;
+        break label_4;
+      }
+    }
+    jj_consume_token(RBRACE_TKN);
+        String fqn = curModuleName + "." + rname;
+        JRecord r = new JRecord(fqn, flist);
+        recTab.put(fqn, r);
+        {if (true) return r;}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public JField Field() throws ParseException {
+    JType jt;
+    Token t;
+    jt = Type();
+    t = jj_consume_token(IDENT_TKN);
+      {if (true) return new JField(jt, t.image);}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public JType Type() throws ParseException {
+    JType jt;
+    Token t;
+    String rname;
+    switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+    case MAP_TKN:
+      jt = Map();
+      {if (true) return jt;}
+      break;
+    case VECTOR_TKN:
+      jt = Vector();
+      {if (true) return jt;}
+      break;
+    case BYTE_TKN:
+      jj_consume_token(BYTE_TKN);
+      {if (true) return new JByte();}
+      break;
+    case BOOLEAN_TKN:
+      jj_consume_token(BOOLEAN_TKN);
+      {if (true) return new JBoolean();}
+      break;
+    case INT_TKN:
+      jj_consume_token(INT_TKN);
+      {if (true) return new JInt();}
+      break;
+    case LONG_TKN:
+      jj_consume_token(LONG_TKN);
+      {if (true) return new JLong();}
+      break;
+    case FLOAT_TKN:
+      jj_consume_token(FLOAT_TKN);
+      {if (true) return new JFloat();}
+      break;
+    case DOUBLE_TKN:
+      jj_consume_token(DOUBLE_TKN);
+      {if (true) return new JDouble();}
+      break;
+    case USTRING_TKN:
+      jj_consume_token(USTRING_TKN);
+      {if (true) return new JString();}
+      break;
+    case BUFFER_TKN:
+      jj_consume_token(BUFFER_TKN);
+      {if (true) return new JBuffer();}
+      break;
+    case IDENT_TKN:
+      rname = ModuleName();
+        if (rname.indexOf('.', 0) < 0) {
+            rname = curModuleName + "." + rname;
+        }
+        JRecord r = (JRecord) recTab.get(rname);
+        if (r == null) {
+            System.out.println("Type " + rname + " not known. Exiting.");
+            System.exit(1);
+        }
+        {if (true) return r;}
+      break;
+    default:
+      jj_la1[5] = jj_gen;
+      jj_consume_token(-1);
+      throw new ParseException();
+    }
+    throw new Error("Missing return statement in function");
+  }
+
+  final public JMap Map() throws ParseException {
+    JType jt1;
+    JType jt2;
+    jj_consume_token(MAP_TKN);
+    jj_consume_token(LT_TKN);
+    jt1 = Type();
+    jj_consume_token(COMMA_TKN);
+    jt2 = Type();
+    jj_consume_token(GT_TKN);
+      {if (true) return new JMap(jt1, jt2);}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public JVector Vector() throws ParseException {
+    JType jt;
+    jj_consume_token(VECTOR_TKN);
+    jj_consume_token(LT_TKN);
+    jt = Type();
+    jj_consume_token(GT_TKN);
+      {if (true) return new JVector(jt);}
+    throw new Error("Missing return statement in function");
+  }
+
+  public RccTokenManager token_source;
+  SimpleCharStream jj_input_stream;
+  public Token token, jj_nt;
+  private int jj_ntk;
+  private int jj_gen;
+  final private int[] jj_la1 = new int[6];
+  static private int[] jj_la1_0;
+  static private int[] jj_la1_1;
+  static {
+      jj_la1_0();
+      jj_la1_1();
+   }
+   private static void jj_la1_0() {
+      jj_la1_0 = new int[] {0x2800,0x2800,0x40000000,0x1000,0xffc000,0xffc000,};
+   }
+   private static void jj_la1_1() {
+      jj_la1_1 = new int[] {0x0,0x0,0x0,0x0,0x1,0x1,};
+   }
+
+  public Rcc(java.io.InputStream stream) {
+     this(stream, null);
+  }
+  public Rcc(java.io.InputStream stream, String encoding) {
+    try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
+    token_source = new RccTokenManager(jj_input_stream);
+    token = new Token();
+    jj_ntk = -1;
+    jj_gen = 0;
+    for (int i = 0; i < 6; i++) jj_la1[i] = -1;
+  }
+
+  public void ReInit(java.io.InputStream stream) {
+     ReInit(stream, null);
+  }
+  public void ReInit(java.io.InputStream stream, String encoding) {
+    try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
+    token_source.ReInit(jj_input_stream);
+    token = new Token();
+    jj_ntk = -1;
+    jj_gen = 0;
+    for (int i = 0; i < 6; i++) jj_la1[i] = -1;
+  }
+
+  public Rcc(java.io.Reader stream) {
+    jj_input_stream = new SimpleCharStream(stream, 1, 1);
+    token_source = new RccTokenManager(jj_input_stream);
+    token = new Token();
+    jj_ntk = -1;
+    jj_gen = 0;
+    for (int i = 0; i < 6; i++) jj_la1[i] = -1;
+  }
+
+  public void ReInit(java.io.Reader stream) {
+    jj_input_stream.ReInit(stream, 1, 1);
+    token_source.ReInit(jj_input_stream);
+    token = new Token();
+    jj_ntk = -1;
+    jj_gen = 0;
+    for (int i = 0; i < 6; i++) jj_la1[i] = -1;
+  }
+
+  public Rcc(RccTokenManager tm) {
+    token_source = tm;
+    token = new Token();
+    jj_ntk = -1;
+    jj_gen = 0;
+    for (int i = 0; i < 6; i++) jj_la1[i] = -1;
+  }
+
+  public void ReInit(RccTokenManager tm) {
+    token_source = tm;
+    token = new Token();
+    jj_ntk = -1;
+    jj_gen = 0;
+    for (int i = 0; i < 6; i++) jj_la1[i] = -1;
+  }
+
+  final private Token jj_consume_token(int kind) throws ParseException {
+    Token oldToken;
+    if ((oldToken = token).next != null) token = token.next;
+    else token = token.next = token_source.getNextToken();
+    jj_ntk = -1;
+    if (token.kind == kind) {
+      jj_gen++;
+      return token;
+    }
+    token = oldToken;
+    jj_kind = kind;
+    throw generateParseException();
+  }
+
+  final public Token getNextToken() {
+    if (token.next != null) token = token.next;
+    else token = token.next = token_source.getNextToken();
+    jj_ntk = -1;
+    jj_gen++;
+    return token;
+  }
+
+  final public Token getToken(int index) {
+    Token t = token;
+    for (int i = 0; i < index; i++) {
+      if (t.next != null) t = t.next;
+      else t = t.next = token_source.getNextToken();
+    }
+    return t;
+  }
+
+  final private int jj_ntk() {
+    if ((jj_nt=token.next) == null)
+      return (jj_ntk = (token.next=token_source.getNextToken()).kind);
+    else
+      return (jj_ntk = jj_nt.kind);
+  }
+
+  private java.util.Vector jj_expentries = new java.util.Vector();
+  private int[] jj_expentry;
+  private int jj_kind = -1;
+
+  public ParseException generateParseException() {
+    jj_expentries.removeAllElements();
+    boolean[] la1tokens = new boolean[33];
+    for (int i = 0; i < 33; i++) {
+      la1tokens[i] = false;
+    }
+    if (jj_kind >= 0) {
+      la1tokens[jj_kind] = true;
+      jj_kind = -1;
+    }
+    for (int i = 0; i < 6; i++) {
+      if (jj_la1[i] == jj_gen) {
+        for (int j = 0; j < 32; j++) {
+          if ((jj_la1_0[i] & (1<<j)) != 0) {
+            la1tokens[j] = true;
+          }
+          if ((jj_la1_1[i] & (1<<j)) != 0) {
+            la1tokens[32+j] = true;
+          }
+        }
+      }
+    }
+    for (int i = 0; i < 33; i++) {
+      if (la1tokens[i]) {
+        jj_expentry = new int[1];
+        jj_expentry[0] = i;
+        jj_expentries.addElement(jj_expentry);
+      }
+    }
+    int[][] exptokseq = new int[jj_expentries.size()][];
+    for (int i = 0; i < jj_expentries.size(); i++) {
+      exptokseq[i] = (int[])jj_expentries.elementAt(i);
+    }
+    return new ParseException(token, exptokseq, tokenImage);
+  }
+
+  final public void enable_tracing() {
+  }
+
+  final public void disable_tracing() {
+  }
+
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/RccConstants.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/RccConstants.java?rev=399509&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/RccConstants.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/RccConstants.java Wed May  3 19:04:01 2006
@@ -0,0 +1,86 @@
+/* Generated By:JavaCC: Do not edit this line. RccConstants.java */
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.compiler.generated;
+
+public interface RccConstants {
+
+  int EOF = 0;
+  int MODULE_TKN = 11;
+  int RECORD_TKN = 12;
+  int INCLUDE_TKN = 13;
+  int BYTE_TKN = 14;
+  int BOOLEAN_TKN = 15;
+  int INT_TKN = 16;
+  int LONG_TKN = 17;
+  int FLOAT_TKN = 18;
+  int DOUBLE_TKN = 19;
+  int USTRING_TKN = 20;
+  int BUFFER_TKN = 21;
+  int VECTOR_TKN = 22;
+  int MAP_TKN = 23;
+  int LBRACE_TKN = 24;
+  int RBRACE_TKN = 25;
+  int LT_TKN = 26;
+  int GT_TKN = 27;
+  int SEMICOLON_TKN = 28;
+  int COMMA_TKN = 29;
+  int DOT_TKN = 30;
+  int CSTRING_TKN = 31;
+  int IDENT_TKN = 32;
+
+  int DEFAULT = 0;
+  int WithinOneLineComment = 1;
+  int WithinMultiLineComment = 2;
+
+  String[] tokenImage = {
+    "<EOF>",
+    "\" \"",
+    "\"\\t\"",
+    "\"\\n\"",
+    "\"\\r\"",
+    "\"//\"",
+    "<token of kind 6>",
+    "<token of kind 7>",
+    "\"/*\"",
+    "\"*/\"",
+    "<token of kind 10>",
+    "\"module\"",
+    "\"class\"",
+    "\"include\"",
+    "\"byte\"",
+    "\"boolean\"",
+    "\"int\"",
+    "\"long\"",
+    "\"float\"",
+    "\"double\"",
+    "\"ustring\"",
+    "\"buffer\"",
+    "\"vector\"",
+    "\"map\"",
+    "\"{\"",
+    "\"}\"",
+    "\"<\"",
+    "\">\"",
+    "\";\"",
+    "\",\"",
+    "\".\"",
+    "<CSTRING_TKN>",
+    "<IDENT_TKN>",
+  };
+
+}