You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ni...@apache.org on 2017/02/08 14:31:50 UTC

[11/53] [abbrv] lucenenet git commit: Lucene.Net.Core: Renamed all type-derived properties and methods from Short, Int, Long, and Float to match CLR types Int16, Int32, Int64, and Single, respectively.

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f7432173/src/Lucene.Net.Core/Codecs/BlockTreeTermsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/BlockTreeTermsReader.cs b/src/Lucene.Net.Core/Codecs/BlockTreeTermsReader.cs
index be4641d..a9ecb01 100644
--- a/src/Lucene.Net.Core/Codecs/BlockTreeTermsReader.cs
+++ b/src/Lucene.Net.Core/Codecs/BlockTreeTermsReader.cs
@@ -154,7 +154,7 @@ namespace Lucene.Net.Codecs
                     SeekDir(indexIn, indexDirOffset);
                 }
 
-                int numFields = @in.ReadVInt();
+                int numFields = @in.ReadVInt32();
                 if (numFields < 0)
                 {
                     throw new CorruptIndexException("invalid numFields: " + numFields + " (resource=" + @in + ")");
@@ -162,19 +162,19 @@ namespace Lucene.Net.Codecs
 
                 for (int i = 0; i < numFields; i++)
                 {
-                    int field = @in.ReadVInt();
-                    long numTerms = @in.ReadVLong();
+                    int field = @in.ReadVInt32();
+                    long numTerms = @in.ReadVInt64();
                     Debug.Assert(numTerms >= 0);
-                    int numBytes = @in.ReadVInt();
+                    int numBytes = @in.ReadVInt32();
                     BytesRef rootCode = new BytesRef(new byte[numBytes]);
                     @in.ReadBytes(rootCode.Bytes, 0, numBytes);
                     rootCode.Length = numBytes;
                     FieldInfo fieldInfo = fieldInfos.FieldInfo(field);
                     Debug.Assert(fieldInfo != null, "field=" + field);
-                    long sumTotalTermFreq = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY ? -1 : @in.ReadVLong();
-                    long sumDocFreq = @in.ReadVLong();
-                    int docCount = @in.ReadVInt();
-                    int longsSize = version >= BlockTreeTermsWriter.VERSION_META_ARRAY ? @in.ReadVInt() : 0;
+                    long sumTotalTermFreq = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY ? -1 : @in.ReadVInt64();
+                    long sumDocFreq = @in.ReadVInt64();
+                    int docCount = @in.ReadVInt32();
+                    int longsSize = version >= BlockTreeTermsWriter.VERSION_META_ARRAY ? @in.ReadVInt32() : 0;
                     if (docCount < 0 || docCount > info.DocCount) // #docs with field must be <= #docs
                     {
                         throw new CorruptIndexException("invalid docCount: " + docCount + " maxDoc: " + info.DocCount + " (resource=" + @in + ")");
@@ -187,7 +187,7 @@ namespace Lucene.Net.Codecs
                     {
                         throw new CorruptIndexException("invalid sumTotalTermFreq: " + sumTotalTermFreq + " sumDocFreq: " + sumDocFreq + " (resource=" + @in + ")");
                     }
-                    long indexStartFP = indexDivisor != -1 ? indexIn.ReadVLong() : 0;
+                    long indexStartFP = indexDivisor != -1 ? indexIn.ReadVInt64() : 0;
 
                     if (fields.ContainsKey(fieldInfo.Name))
                     {
@@ -222,7 +222,7 @@ namespace Lucene.Net.Codecs
             int version = CodecUtil.CheckHeader(input, BlockTreeTermsWriter.TERMS_CODEC_NAME, BlockTreeTermsWriter.VERSION_START, BlockTreeTermsWriter.VERSION_CURRENT);
             if (version < BlockTreeTermsWriter.VERSION_APPEND_ONLY)
             {
-                dirOffset = input.ReadLong();
+                dirOffset = input.ReadInt64();
             }
             return version;
         }
@@ -234,7 +234,7 @@ namespace Lucene.Net.Codecs
             int version = CodecUtil.CheckHeader(input, BlockTreeTermsWriter.TERMS_INDEX_CODEC_NAME, BlockTreeTermsWriter.VERSION_START, BlockTreeTermsWriter.VERSION_CURRENT);
             if (version < BlockTreeTermsWriter.VERSION_APPEND_ONLY)
             {
-                indexDirOffset = input.ReadLong();
+                indexDirOffset = input.ReadInt64();
             }
             return version;
         }
@@ -246,12 +246,12 @@ namespace Lucene.Net.Codecs
             if (version >= BlockTreeTermsWriter.VERSION_CHECKSUM)
             {
                 input.Seek(input.Length - CodecUtil.FooterLength() - 8);
-                dirOffset = input.ReadLong();
+                dirOffset = input.ReadInt64();
             }
             else if (version >= BlockTreeTermsWriter.VERSION_APPEND_ONLY)
             {
                 input.Seek(input.Length - 8);
-                dirOffset = input.ReadLong();
+                dirOffset = input.ReadInt64();
             }
             input.Seek(dirOffset);
         }
@@ -568,7 +568,7 @@ namespace Lucene.Net.Codecs
                 //   System.out.println("BTTR: seg=" + segment + " field=" + fieldInfo.name + " rootBlockCode=" + rootCode + " divisor=" + indexDivisor);
                 // }
 
-                rootBlockFP = (int)((uint)(new ByteArrayDataInput((byte[])(Array)rootCode.Bytes, rootCode.Offset, rootCode.Length)).ReadVLong() >> BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS);
+                rootBlockFP = (int)((uint)(new ByteArrayDataInput((byte[])(Array)rootCode.Bytes, rootCode.Offset, rootCode.Length)).ReadVInt64() >> BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS);
 
                 if (indexIn != null)
                 {
@@ -752,9 +752,12 @@ namespace Lucene.Net.Codecs
                     internal readonly BlockTermState termState;
 
                     // metadata buffer, holding monotonic values
+                    /// <summary>
+                    /// NOTE: This was longs (field) in Lucene
+                    /// </summary>
                     [WritableArray]
                     [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")]
-                    public long[] Longs
+                    public long[] Int64s
                     {
                         get { return longs; }
                         set { longs = value; }
@@ -795,7 +798,7 @@ namespace Lucene.Net.Codecs
 
                         do
                         {
-                            fp = fpOrig + ((int)((uint)floorDataReader.ReadVLong() >> 1));
+                            fp = fpOrig + ((int)((uint)floorDataReader.ReadVInt64() >> 1));
                             numFollowFloorBlocks--;
                             // if (DEBUG) System.out.println("    skip floor block2!  nextFloorLabel=" + (char) nextFloorLabel + " vs target=" + (char) transitions[transitionIndex].getMin() + " newFP=" + fp + " numFollowFloorBlocks=" + numFollowFloorBlocks);
                             if (numFollowFloorBlocks != 0)
@@ -842,10 +845,10 @@ namespace Lucene.Net.Codecs
                             floorDataReader.Reset(floorData, 0, frameIndexData.Length);
                             // Skip first long -- has redundant fp, hasTerms
                             // flag, isFloor flag
-                            long code = floorDataReader.ReadVLong();
+                            long code = floorDataReader.ReadVInt64();
                             if ((code & BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR) != 0)
                             {
-                                numFollowFloorBlocks = floorDataReader.ReadVInt();
+                                numFollowFloorBlocks = floorDataReader.ReadVInt32();
                                 nextFloorLabel = floorDataReader.ReadByte() & 0xff;
                                 // if (DEBUG) System.out.println("    numFollowFloorBlocks=" + numFollowFloorBlocks + " nextFloorLabel=" + nextFloorLabel);
 
@@ -856,7 +859,7 @@ namespace Lucene.Net.Codecs
                                     // Maybe skip floor blocks:
                                     while (numFollowFloorBlocks != 0 && nextFloorLabel <= transitions[0].Min)
                                     {
-                                        fp = fpOrig + ((int)((uint)floorDataReader.ReadVLong() >> 1));
+                                        fp = fpOrig + ((int)((uint)floorDataReader.ReadVInt64() >> 1));
                                         numFollowFloorBlocks--;
                                         // if (DEBUG) System.out.println("    skip floor block!  nextFloorLabel=" + (char) nextFloorLabel + " vs target=" + (char) transitions[0].getMin() + " newFP=" + fp + " numFollowFloorBlocks=" + numFollowFloorBlocks);
                                         if (numFollowFloorBlocks != 0)
@@ -873,13 +876,13 @@ namespace Lucene.Net.Codecs
                         }
 
                         outerInstance.@in.Seek(fp);
-                        int code_ = outerInstance.@in.ReadVInt();
+                        int code_ = outerInstance.@in.ReadVInt32();
                         entCount = (int)((uint)code_ >> 1);
                         Debug.Assert(entCount > 0);
                         isLastInFloor = (code_ & 1) != 0;
 
                         // term suffixes:
-                        code_ = outerInstance.@in.ReadVInt();
+                        code_ = outerInstance.@in.ReadVInt32();
                         isLeafBlock = (code_ & 1) != 0;
                         int numBytes = (int)((uint)code_ >> 1);
                         // if (DEBUG) System.out.println("      entCount=" + entCount + " lastInFloor?=" + isLastInFloor + " leafBlock?=" + isLeafBlock + " numSuffixBytes=" + numBytes);
@@ -891,7 +894,7 @@ namespace Lucene.Net.Codecs
                         suffixesReader.Reset(suffixBytes, 0, numBytes);
 
                         // stats
-                        numBytes = outerInstance.@in.ReadVInt();
+                        numBytes = outerInstance.@in.ReadVInt32();
                         if (statBytes.Length < numBytes)
                         {
                             statBytes = new byte[ArrayUtil.Oversize(numBytes, 1)];
@@ -904,7 +907,7 @@ namespace Lucene.Net.Codecs
                         nextEnt = 0;
 
                         // metadata
-                        numBytes = outerInstance.@in.ReadVInt();
+                        numBytes = outerInstance.@in.ReadVInt32();
                         if (bytes == null)
                         {
                             bytes = new byte[ArrayUtil.Oversize(numBytes, 1)];
@@ -938,7 +941,7 @@ namespace Lucene.Net.Codecs
                         //if (DEBUG) System.out.println("  frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount);
                         Debug.Assert(nextEnt != -1 && nextEnt < entCount, "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp);
                         nextEnt++;
-                        suffix = suffixesReader.ReadVInt();
+                        suffix = suffixesReader.ReadVInt32();
                         startBytePos = suffixesReader.Position;
                         suffixesReader.SkipBytes(suffix);
                         return false;
@@ -949,7 +952,7 @@ namespace Lucene.Net.Codecs
                         //if (DEBUG) System.out.println("  frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount);
                         Debug.Assert(nextEnt != -1 && nextEnt < entCount, "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp);
                         nextEnt++;
-                        int code = suffixesReader.ReadVInt();
+                        int code = suffixesReader.ReadVInt32();
                         suffix = (int)((uint)code >> 1);
                         startBytePos = suffixesReader.Position;
                         suffixesReader.SkipBytes(suffix);
@@ -962,7 +965,7 @@ namespace Lucene.Net.Codecs
                         else
                         {
                             // A sub-block; make sub-FP absolute:
-                            lastSubFP = fp - suffixesReader.ReadVLong();
+                            lastSubFP = fp - suffixesReader.ReadVInt64();
                             return true;
                         }
                     }
@@ -995,17 +998,17 @@ namespace Lucene.Net.Codecs
                             // just skipN here:
 
                             // stats
-                            termState.DocFreq = statsReader.ReadVInt();
+                            termState.DocFreq = statsReader.ReadVInt32();
                             //if (DEBUG) System.out.println("    dF=" + state.docFreq);
                             if (outerInstance.outerInstance.fieldInfo.IndexOptions != IndexOptions.DOCS_ONLY)
                             {
-                                termState.TotalTermFreq = termState.DocFreq + statsReader.ReadVLong();
+                                termState.TotalTermFreq = termState.DocFreq + statsReader.ReadVInt64();
                                 //if (DEBUG) System.out.println("    totTF=" + state.totalTermFreq);
                             }
                             // metadata
                             for (int i = 0; i < outerInstance.outerInstance.longsSize; i++)
                             {
-                                longs[i] = bytesReader.ReadVLong();
+                                longs[i] = bytesReader.ReadVInt64();
                             }
                             outerInstance.outerInstance.outerInstance.postingsReader.DecodeTerm(longs, bytesReader, outerInstance.outerInstance.fieldInfo, termState, absolute);
 
@@ -1760,7 +1763,7 @@ namespace Lucene.Net.Codecs
                 internal Frame PushFrame(FST.Arc<BytesRef> arc, BytesRef frameData, int length)
                 {
                     scratchReader.Reset((byte[])(Array)frameData.Bytes, frameData.Offset, frameData.Length);
-                    long code = scratchReader.ReadVLong();
+                    long code = scratchReader.ReadVInt64();
                     long fpSeek = (long)((ulong)code >> BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS);
                     Frame f = GetFrame(1 + currentFrame.ord);
                     f.hasTerms = (code & BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS) != 0;
@@ -2732,9 +2735,12 @@ namespace Lucene.Net.Codecs
                     internal readonly BlockTermState state;
 
                     // metadata buffer, holding monotonic values
+                    /// <summary>
+                    /// NOTE: This was longs (field) in Lucene
+                    /// </summary>
                     [WritableArray]
                     [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")]
-                    public long[] Longs
+                    public long[] Int64s
                     {
                         get { return longs; }
                         set { longs = value; }
@@ -2771,7 +2777,7 @@ namespace Lucene.Net.Codecs
                         }
                         System.Buffer.BlockCopy(source.Bytes, source.Offset + @in.Position, floorData, 0, numBytes);
                         floorDataReader.Reset(floorData, 0, numBytes);
-                        numFollowFloorBlocks = floorDataReader.ReadVInt();
+                        numFollowFloorBlocks = floorDataReader.ReadVInt32();
                         nextFloorLabel = floorDataReader.ReadByte() & 0xff;
                         //if (DEBUG) {
                         //System.out.println("    setFloorData fpOrig=" + fpOrig + " bytes=" + new BytesRef(source.bytes, source.offset + in.getPosition(), numBytes) + " numFollowFloorBlocks=" + numFollowFloorBlocks + " nextFloorLabel=" + toHex(nextFloorLabel));
@@ -2822,7 +2828,7 @@ namespace Lucene.Net.Codecs
                         //System.out.println("blc=" + blockLoadCount);
 
                         outerInstance.@in.Seek(fp);
-                        int code = outerInstance.@in.ReadVInt();
+                        int code = outerInstance.@in.ReadVInt32();
                         entCount = (int)((uint)code >> 1);
                         Debug.Assert(entCount > 0);
                         isLastInFloor = (code & 1) != 0;
@@ -2834,7 +2840,7 @@ namespace Lucene.Net.Codecs
                         // we could have simple array of offsets
 
                         // term suffixes:
-                        code = outerInstance.@in.ReadVInt();
+                        code = outerInstance.@in.ReadVInt32();
                         isLeafBlock = (code & 1) != 0;
                         int numBytes = (int)((uint)code >> 1);
                         if (suffixBytes.Length < numBytes)
@@ -2853,7 +2859,7 @@ namespace Lucene.Net.Codecs
                           }*/
 
                         // stats
-                        numBytes = outerInstance.@in.ReadVInt();
+                        numBytes = outerInstance.@in.ReadVInt32();
                         if (statBytes.Length < numBytes)
                         {
                             statBytes = new byte[ArrayUtil.Oversize(numBytes, 1)];
@@ -2869,7 +2875,7 @@ namespace Lucene.Net.Codecs
                         // TODO: we could skip this if !hasTerms; but
                         // that's rare so won't help much
                         // metadata
-                        numBytes = outerInstance.@in.ReadVInt();
+                        numBytes = outerInstance.@in.ReadVInt32();
                         if (bytes == null)
                         {
                             bytes = new byte[ArrayUtil.Oversize(numBytes, 1)];
@@ -2899,7 +2905,7 @@ namespace Lucene.Net.Codecs
                         if (isFloor)
                         {
                             floorDataReader.Rewind();
-                            numFollowFloorBlocks = floorDataReader.ReadVInt();
+                            numFollowFloorBlocks = floorDataReader.ReadVInt32();
                             nextFloorLabel = floorDataReader.ReadByte() & 0xff;
                         }
 
@@ -2949,7 +2955,7 @@ namespace Lucene.Net.Codecs
                         //if (DEBUG) System.out.println("  frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount);
                         Debug.Assert(nextEnt != -1 && nextEnt < entCount, "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp);
                         nextEnt++;
-                        suffix = suffixesReader.ReadVInt();
+                        suffix = suffixesReader.ReadVInt32();
                         startBytePos = suffixesReader.Position;
                         outerInstance.term.Length = prefix + suffix;
                         if (outerInstance.term.Bytes.Length < outerInstance.term.Length)
@@ -2967,7 +2973,7 @@ namespace Lucene.Net.Codecs
                         //if (DEBUG) System.out.println("  frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount);
                         Debug.Assert(nextEnt != -1 && nextEnt < entCount, "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp);
                         nextEnt++;
-                        int code = suffixesReader.ReadVInt();
+                        int code = suffixesReader.ReadVInt32();
                         suffix = (int)((uint)code >> 1);
                         startBytePos = suffixesReader.Position;
                         outerInstance.term.Length = prefix + suffix;
@@ -2988,7 +2994,7 @@ namespace Lucene.Net.Codecs
                         {
                             // A sub-block; make sub-FP absolute:
                             outerInstance.termExists = false;
-                            subCode = suffixesReader.ReadVLong();
+                            subCode = suffixesReader.ReadVInt64();
                             lastSubFP = fp - subCode;
                             //if (DEBUG) {
                             //System.out.println("    lastSubFP=" + lastSubFP);
@@ -3029,7 +3035,7 @@ namespace Lucene.Net.Codecs
                         long newFP = fpOrig;
                         while (true)
                         {
-                            long code = floorDataReader.ReadVLong();
+                            long code = floorDataReader.ReadVInt64();
                             newFP = fpOrig + ((long)((ulong)code >> 1));
                             hasTerms = (code & 1) != 0;
                             // if (DEBUG) {
@@ -3099,17 +3105,17 @@ namespace Lucene.Net.Codecs
                             // just skipN here:
 
                             // stats
-                            state.DocFreq = statsReader.ReadVInt();
+                            state.DocFreq = statsReader.ReadVInt32();
                             //if (DEBUG) System.out.println("    dF=" + state.docFreq);
                             if (outerInstance.outerInstance.fieldInfo.IndexOptions != IndexOptions.DOCS_ONLY)
                             {
-                                state.TotalTermFreq = state.DocFreq + statsReader.ReadVLong();
+                                state.TotalTermFreq = state.DocFreq + statsReader.ReadVInt64();
                                 //if (DEBUG) System.out.println("    totTF=" + state.totalTermFreq);
                             }
                             // metadata
                             for (int i = 0; i < outerInstance.outerInstance.longsSize; i++)
                             {
-                                longs[i] = bytesReader.ReadVLong();
+                                longs[i] = bytesReader.ReadVInt64();
                             }
                             outerInstance.outerInstance.outerInstance.postingsReader.DecodeTerm(longs, bytesReader, outerInstance.outerInstance.fieldInfo, state, absolute);
 
@@ -3153,12 +3159,12 @@ namespace Lucene.Net.Codecs
                         {
                             Debug.Assert(nextEnt < entCount);
                             nextEnt++;
-                            int code = suffixesReader.ReadVInt();
+                            int code = suffixesReader.ReadVInt32();
                             suffixesReader.SkipBytes(isLeafBlock ? code : (int)((uint)code >> 1));
                             //if (DEBUG) System.out.println("    " + nextEnt + " (of " + entCount + ") ent isSubBlock=" + ((code&1)==1));
                             if ((code & 1) != 0)
                             {
-                                long subCode = suffixesReader.ReadVLong();
+                                long subCode = suffixesReader.ReadVInt64();
                                 //if (DEBUG) System.out.println("      subCode=" + subCode);
                                 if (targetSubCode == subCode)
                                 {
@@ -3212,7 +3218,7 @@ namespace Lucene.Net.Codecs
                         {
                             nextEnt++;
 
-                            suffix = suffixesReader.ReadVInt();
+                            suffix = suffixesReader.ReadVInt32();
 
                             // if (DEBUG) {
                             //   BytesRef suffixBytesRef = new BytesRef();
@@ -3356,7 +3362,7 @@ namespace Lucene.Net.Codecs
                         {
                             nextEnt++;
 
-                            int code = suffixesReader.ReadVInt();
+                            int code = suffixesReader.ReadVInt32();
                             suffix = (int)((uint)code >> 1);
                             // if (DEBUG) {
                             //   BytesRef suffixBytesRef = new BytesRef();
@@ -3377,7 +3383,7 @@ namespace Lucene.Net.Codecs
                             }
                             else
                             {
-                                subCode = suffixesReader.ReadVLong();
+                                subCode = suffixesReader.ReadVInt64();
                                 lastSubFP = fp - subCode;
                             }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f7432173/src/Lucene.Net.Core/Codecs/BlockTreeTermsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/BlockTreeTermsWriter.cs b/src/Lucene.Net.Core/Codecs/BlockTreeTermsWriter.cs
index 05c55b2..4541d1d 100644
--- a/src/Lucene.Net.Core/Codecs/BlockTreeTermsWriter.cs
+++ b/src/Lucene.Net.Core/Codecs/BlockTreeTermsWriter.cs
@@ -246,7 +246,11 @@ namespace Lucene.Net.Codecs
             public long SumTotalTermFreq { get; private set; }
             public long SumDocFreq { get; private set; }
             public int DocCount { get; private set; }
-            internal int LongsSize { get; private set; }
+
+            /// <summary>
+            /// NOTE: This was longsSize (field) in Lucene
+            /// </summary>
+            internal int Int64sSize { get; private set; }
 
             public FieldMetaData(FieldInfo fieldInfo, BytesRef rootCode, long numTerms, long indexStartFP, long sumTotalTermFreq, long sumDocFreq, int docCount, int longsSize)
             {
@@ -259,7 +263,7 @@ namespace Lucene.Net.Codecs
                 this.SumTotalTermFreq = sumTotalTermFreq;
                 this.SumDocFreq = sumDocFreq;
                 this.DocCount = docCount;
-                this.LongsSize = longsSize;
+                this.Int64sSize = longsSize;
             }
         }
 
@@ -345,14 +349,14 @@ namespace Lucene.Net.Codecs
         /// Writes the terms file trailer. </summary>
         protected internal virtual void WriteTrailer(IndexOutput @out, long dirStart)
         {
-            @out.WriteLong(dirStart);
+            @out.WriteInt64(dirStart);
         }
 
         /// <summary>
         /// Writes the index file trailer. </summary>
         protected internal virtual void WriteIndexTrailer(IndexOutput indexOut, long dirStart)
         {
-            indexOut.WriteLong(dirStart);
+            indexOut.WriteInt64(dirStart);
         }
 
         public override TermsConsumer AddField(FieldInfo field)
@@ -436,10 +440,10 @@ namespace Lucene.Net.Codecs
                 // TODO: try writing the leading vLong in MSB order
                 // (opposite of what Lucene does today), for better
                 // outputs sharing in the FST
-                scratchBytes.WriteVLong(EncodeOutput(Fp, HasTerms, IsFloor));
+                scratchBytes.WriteVInt64(EncodeOutput(Fp, HasTerms, IsFloor));
                 if (IsFloor)
                 {
-                    scratchBytes.WriteVInt(floorBlocks.Count);
+                    scratchBytes.WriteVInt32(floorBlocks.Count);
                     foreach (PendingBlock sub in floorBlocks)
                     {
                         Debug.Assert(sub.FloorLeadByte != -1);
@@ -448,7 +452,7 @@ namespace Lucene.Net.Codecs
                         //}
                         scratchBytes.WriteByte((byte)(sbyte)sub.FloorLeadByte);
                         Debug.Assert(sub.Fp > Fp);
-                        scratchBytes.WriteVLong((sub.Fp - Fp) << 1 | (sub.HasTerms ? 1 : 0));
+                        scratchBytes.WriteVInt64((sub.Fp - Fp) << 1 | (sub.HasTerms ? 1 : 0));
                     }
                 }
 
@@ -457,7 +461,7 @@ namespace Lucene.Net.Codecs
                 var bytes = new byte[(int)scratchBytes.FilePointer];
                 Debug.Assert(bytes.Length > 0);
                 scratchBytes.WriteTo(bytes, 0);
-                indexBuilder.Add(Util.ToIntsRef(Prefix, scratchIntsRef), new BytesRef(bytes, 0, bytes.Length));
+                indexBuilder.Add(Util.ToInt32sRef(Prefix, scratchIntsRef), new BytesRef(bytes, 0, bytes.Length));
                 scratchBytes.Reset();
 
                 // Copy over index for all sub-blocks
@@ -508,7 +512,7 @@ namespace Lucene.Net.Codecs
                     //if (DEBUG) {
                     //  System.out.println("      add sub=" + indexEnt.input + " " + indexEnt.input + " output=" + indexEnt.output);
                     //}
-                    builder.Add(Util.ToIntsRef(indexEnt.Input, scratchIntsRef), indexEnt.Output);
+                    builder.Add(Util.ToInt32sRef(indexEnt.Input, scratchIntsRef), indexEnt.Output);
                 }
             }
         }
@@ -647,7 +651,7 @@ namespace Lucene.Net.Codecs
                     //}
                     //System.out.println("\nwbs count=" + count);
 
-                    int savLabel = prevTerm.Ints[prevTerm.Offset + prefixLength];
+                    int savLabel = prevTerm.Int32s[prevTerm.Offset + prefixLength];
 
                     // Count up how many items fall under
                     // each unique label after the prefix.
@@ -791,7 +795,7 @@ namespace Lucene.Net.Codecs
                             {
                                 curPrefixLength = 1 + prefixLength;
                                 // floor term:
-                                prevTerm.Ints[prevTerm.Offset + prefixLength] = startLabel;
+                                prevTerm.Int32s[prevTerm.Offset + prefixLength] = startLabel;
                             }
                             //System.out.println("  " + subCount + " subs");
                             PendingBlock floorBlock = WriteBlock(prevTerm, prefixLength, curPrefixLength, curStart, pendingCount, subTermCountSums[1 + sub], true, startLabel, curStart == pendingCount);
@@ -824,7 +828,7 @@ namespace Lucene.Net.Codecs
                                 // here
                                 Debug.Assert(startLabel != -1);
                                 Debug.Assert(firstBlock != null);
-                                prevTerm.Ints[prevTerm.Offset + prefixLength] = startLabel;
+                                prevTerm.Int32s[prevTerm.Offset + prefixLength] = startLabel;
                                 //System.out.println("  final " + (numSubs-sub-1) + " subs");
                                 /*
                                 for(sub++;sub < numSubs;sub++) {
@@ -841,7 +845,7 @@ namespace Lucene.Net.Codecs
                         }
                     }
 
-                    prevTerm.Ints[prevTerm.Offset + prefixLength] = savLabel;
+                    prevTerm.Int32s[prevTerm.Offset + prefixLength] = savLabel;
 
                     Debug.Assert(firstBlock != null);
                     firstBlock.CompileIndex(floorBlocks, outerInstance.scratchBytes);
@@ -885,12 +889,12 @@ namespace Lucene.Net.Codecs
                 BytesRef prefix = new BytesRef(indexPrefixLength);
                 for (int m = 0; m < indexPrefixLength; m++)
                 {
-                    prefix.Bytes[m] = (byte)prevTerm.Ints[m];
+                    prefix.Bytes[m] = (byte)prevTerm.Int32s[m];
                 }
                 prefix.Length = indexPrefixLength;
 
                 // Write block header:
-                outerInstance.@out.WriteVInt((length << 1) | (isLastInFloor ? 1 : 0));
+                outerInstance.@out.WriteVInt32((length << 1) | (isLastInFloor ? 1 : 0));
 
                 // 1st pass: pack term suffix bytes into byte[] blob
                 // TODO: cutover to bulk int codec... simple64?
@@ -946,15 +950,15 @@ namespace Lucene.Net.Codecs
                         //   System.out.println("    write term suffix=" + suffixBytes);
                         // }
                         // For leaf block we write suffix straight
-                        suffixWriter.WriteVInt(suffix);
+                        suffixWriter.WriteVInt32(suffix);
                         suffixWriter.WriteBytes(term.Term.Bytes, prefixLength, suffix);
 
                         // Write term stats, to separate byte[] blob:
-                        statsWriter.WriteVInt(state.DocFreq);
+                        statsWriter.WriteVInt32(state.DocFreq);
                         if (fieldInfo.IndexOptions != IndexOptions.DOCS_ONLY)
                         {
                             Debug.Assert(state.TotalTermFreq >= state.DocFreq, state.TotalTermFreq + " vs " + state.DocFreq);
-                            statsWriter.WriteVLong(state.TotalTermFreq - state.DocFreq);
+                            statsWriter.WriteVInt64(state.TotalTermFreq - state.DocFreq);
                         }
 
                         // Write term meta data
@@ -962,7 +966,7 @@ namespace Lucene.Net.Codecs
                         for (int pos = 0; pos < longsSize; pos++)
                         {
                             Debug.Assert(longs[pos] >= 0);
-                            metaWriter.WriteVLong(longs[pos]);
+                            metaWriter.WriteVInt64(longs[pos]);
                         }
                         bytesWriter.WriteTo(metaWriter);
                         bytesWriter.Reset();
@@ -989,15 +993,15 @@ namespace Lucene.Net.Codecs
                             // }
                             // For non-leaf block we borrow 1 bit to record
                             // if entry is term or sub-block
-                            suffixWriter.WriteVInt(suffix << 1);
+                            suffixWriter.WriteVInt32(suffix << 1);
                             suffixWriter.WriteBytes(term.Term.Bytes, prefixLength, suffix);
 
                             // Write term stats, to separate byte[] blob:
-                            statsWriter.WriteVInt(state.DocFreq);
+                            statsWriter.WriteVInt32(state.DocFreq);
                             if (fieldInfo.IndexOptions != IndexOptions.DOCS_ONLY)
                             {
                                 Debug.Assert(state.TotalTermFreq >= state.DocFreq);
-                                statsWriter.WriteVLong(state.TotalTermFreq - state.DocFreq);
+                                statsWriter.WriteVInt64(state.TotalTermFreq - state.DocFreq);
                             }
 
                             // TODO: now that terms dict "sees" these longs,
@@ -1013,7 +1017,7 @@ namespace Lucene.Net.Codecs
                             for (int pos = 0; pos < longsSize; pos++)
                             {
                                 Debug.Assert(longs[pos] >= 0);
-                                metaWriter.WriteVLong(longs[pos]);
+                                metaWriter.WriteVInt64(longs[pos]);
                             }
                             bytesWriter.WriteTo(metaWriter);
                             bytesWriter.Reset();
@@ -1030,7 +1034,7 @@ namespace Lucene.Net.Codecs
 
                             // For non-leaf block we borrow 1 bit to record
                             // if entry is term or sub-block
-                            suffixWriter.WriteVInt((suffix << 1) | 1);
+                            suffixWriter.WriteVInt32((suffix << 1) | 1);
                             suffixWriter.WriteBytes(block.Prefix.Bytes, prefixLength, suffix);
                             Debug.Assert(block.Fp < startFP);
 
@@ -1041,7 +1045,7 @@ namespace Lucene.Net.Codecs
                             //   System.out.println("    write sub-block suffix=" + toString(suffixBytes) + " subFP=" + block.fp + " subCode=" + (startFP-block.fp) + " floor=" + block.isFloor);
                             // }
 
-                            suffixWriter.WriteVLong(startFP - block.Fp);
+                            suffixWriter.WriteVInt64(startFP - block.Fp);
                             subIndices.Add(block.Index);
                         }
                     }
@@ -1054,17 +1058,17 @@ namespace Lucene.Net.Codecs
                 // search on lookup
 
                 // Write suffixes byte[] blob to terms dict output:
-                outerInstance.@out.WriteVInt((int)(suffixWriter.FilePointer << 1) | (isLeafBlock ? 1 : 0));
+                outerInstance.@out.WriteVInt32((int)(suffixWriter.FilePointer << 1) | (isLeafBlock ? 1 : 0));
                 suffixWriter.WriteTo(outerInstance.@out);
                 suffixWriter.Reset();
 
                 // Write term stats byte[] blob
-                outerInstance.@out.WriteVInt((int)statsWriter.FilePointer);
+                outerInstance.@out.WriteVInt32((int)statsWriter.FilePointer);
                 statsWriter.WriteTo(outerInstance.@out);
                 statsWriter.Reset();
 
                 // Write term meta data byte[] blob
-                outerInstance.@out.WriteVInt((int)metaWriter.FilePointer);
+                outerInstance.@out.WriteVInt32((int)metaWriter.FilePointer);
                 metaWriter.WriteTo(outerInstance.@out);
                 metaWriter.Reset();
 
@@ -1134,7 +1138,7 @@ namespace Lucene.Net.Codecs
                 Debug.Assert(stats.DocFreq > 0);
                 //if (DEBUG) System.out.println("BTTW.finishTerm term=" + fieldInfo.name + ":" + toString(text) + " seg=" + segment + " df=" + stats.docFreq);
 
-                blockBuilder.Add(Util.ToIntsRef(text, scratchIntsRef), noOutputs.NoOutput);
+                blockBuilder.Add(Util.ToInt32sRef(text, scratchIntsRef), noOutputs.NoOutput);
                 BlockTermState state = outerInstance.postingsWriter.NewTermState();
                 state.DocFreq = stats.DocFreq;
                 state.TotalTermFreq = stats.TotalTermFreq;
@@ -1199,23 +1203,23 @@ namespace Lucene.Net.Codecs
                 long dirStart = @out.FilePointer;
                 long indexDirStart = indexOut.FilePointer;
 
-                @out.WriteVInt(fields.Count);
+                @out.WriteVInt32(fields.Count);
 
                 foreach (FieldMetaData field in fields)
                 {
                     //System.out.println("  field " + field.fieldInfo.name + " " + field.numTerms + " terms");
-                    @out.WriteVInt(field.FieldInfo.Number);
-                    @out.WriteVLong(field.NumTerms);
-                    @out.WriteVInt(field.RootCode.Length);
+                    @out.WriteVInt32(field.FieldInfo.Number);
+                    @out.WriteVInt64(field.NumTerms);
+                    @out.WriteVInt32(field.RootCode.Length);
                     @out.WriteBytes(field.RootCode.Bytes, field.RootCode.Offset, field.RootCode.Length);
                     if (field.FieldInfo.IndexOptions != IndexOptions.DOCS_ONLY)
                     {
-                        @out.WriteVLong(field.SumTotalTermFreq);
+                        @out.WriteVInt64(field.SumTotalTermFreq);
                     }
-                    @out.WriteVLong(field.SumDocFreq);
-                    @out.WriteVInt(field.DocCount);
-                    @out.WriteVInt(field.LongsSize);
-                    indexOut.WriteVLong(field.IndexStartFP);
+                    @out.WriteVInt64(field.SumDocFreq);
+                    @out.WriteVInt32(field.DocCount);
+                    @out.WriteVInt32(field.Int64sSize);
+                    indexOut.WriteVInt64(field.IndexStartFP);
                 }
                 WriteTrailer(@out, dirStart);
                 CodecUtil.WriteFooter(@out);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f7432173/src/Lucene.Net.Core/Codecs/CodecUtil.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/CodecUtil.cs b/src/Lucene.Net.Core/Codecs/CodecUtil.cs
index a578397..8c565e8 100644
--- a/src/Lucene.Net.Core/Codecs/CodecUtil.cs
+++ b/src/Lucene.Net.Core/Codecs/CodecUtil.cs
@@ -79,9 +79,9 @@ namespace Lucene.Net.Codecs
             {
                 throw new System.ArgumentException("codec must be simple ASCII, less than 128 characters in length [got " + codec + "]");
             }
-            @out.WriteInt(CODEC_MAGIC);
+            @out.WriteInt32(CODEC_MAGIC);
             @out.WriteString(codec);
-            @out.WriteInt(version);
+            @out.WriteInt32(version);
         }
 
         /// <summary>
@@ -124,7 +124,7 @@ namespace Lucene.Net.Codecs
         public static int CheckHeader(DataInput @in, string codec, int minVersion, int maxVersion)
         {
             // Safety to guard against reading a bogus string:
-            int actualHeader = @in.ReadInt();
+            int actualHeader = @in.ReadInt32();
             if (actualHeader != CODEC_MAGIC)
             {
                 throw new System.IO.IOException("codec header mismatch: actual header=" + actualHeader + " vs expected header=" + CODEC_MAGIC + " (resource: " + @in + ")");
@@ -146,7 +146,7 @@ namespace Lucene.Net.Codecs
                 throw new System.IO.IOException("codec mismatch: actual codec=" + actualCodec + " vs expected codec=" + codec + " (resource: " + @in + ")");
             }
 
-            int actualVersion = @in.ReadInt();
+            int actualVersion = @in.ReadInt32();
             if (actualVersion < minVersion)
             {
                 throw new System.IO.IOException("Version: " + actualVersion + " is not supported. Minimum Version number is " + minVersion + ".");
@@ -181,9 +181,9 @@ namespace Lucene.Net.Codecs
         /// <exception cref="IOException"> If there is an I/O error writing to the underlying medium. </exception>
         public static void WriteFooter(IndexOutput @out)
         {
-            @out.WriteInt(FOOTER_MAGIC);
-            @out.WriteInt(0);
-            @out.WriteLong(@out.Checksum);
+            @out.WriteInt32(FOOTER_MAGIC);
+            @out.WriteInt32(0);
+            @out.WriteInt64(@out.Checksum);
         }
 
         /// <summary>
@@ -206,7 +206,7 @@ namespace Lucene.Net.Codecs
         {
             ValidateFooter(@in);
             long actualChecksum = @in.Checksum;
-            long expectedChecksum = @in.ReadLong();
+            long expectedChecksum = @in.ReadInt64();
             if (expectedChecksum != actualChecksum)
             {
                 throw new System.IO.IOException("checksum failed (hardware problem?) : expected=" + expectedChecksum.ToString("x") + " actual=" + actualChecksum.ToString("x") + " (resource=" + @in + ")");
@@ -226,18 +226,18 @@ namespace Lucene.Net.Codecs
         {
             @in.Seek(@in.Length - FooterLength());
             ValidateFooter(@in);
-            return @in.ReadLong();
+            return @in.ReadInt64();
         }
 
         private static void ValidateFooter(IndexInput @in)
         {
-            int magic = @in.ReadInt();
+            int magic = @in.ReadInt32();
             if (magic != FOOTER_MAGIC)
             {
                 throw new System.IO.IOException("codec footer mismatch: actual footer=" + magic + " vs expected footer=" + FOOTER_MAGIC + " (resource: " + @in + ")");
             }
 
-            int algorithmID = @in.ReadInt();
+            int algorithmID = @in.ReadInt32();
             if (algorithmID != 0)
             {
                 throw new System.IO.IOException("codec footer mismatch: unknown algorithmID: " + algorithmID);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f7432173/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs b/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
index 606267e..7a07078 100644
--- a/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
+++ b/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
@@ -57,13 +57,13 @@ namespace Lucene.Net.Codecs.Compressing
             PackedInts.Reader[] docBasesDeltas = new PackedInts.Reader[16];
             PackedInts.Reader[] startPointersDeltas = new PackedInts.Reader[16];
 
-            int packedIntsVersion = fieldsIndexIn.ReadVInt();
+            int packedIntsVersion = fieldsIndexIn.ReadVInt32();
 
             int blockCount = 0;
 
             for (; ; )
             {
-                int numChunks = fieldsIndexIn.ReadVInt();
+                int numChunks = fieldsIndexIn.ReadVInt32();
                 if (numChunks == 0)
                 {
                     break;
@@ -80,9 +80,9 @@ namespace Lucene.Net.Codecs.Compressing
                 }
 
                 // doc bases
-                docBases[blockCount] = fieldsIndexIn.ReadVInt();
-                avgChunkDocs[blockCount] = fieldsIndexIn.ReadVInt();
-                int bitsPerDocBase = fieldsIndexIn.ReadVInt();
+                docBases[blockCount] = fieldsIndexIn.ReadVInt32();
+                avgChunkDocs[blockCount] = fieldsIndexIn.ReadVInt32();
+                int bitsPerDocBase = fieldsIndexIn.ReadVInt32();
                 if (bitsPerDocBase > 32)
                 {
                     throw new CorruptIndexException("Corrupted bitsPerDocBase (resource=" + fieldsIndexIn + ")");
@@ -90,9 +90,9 @@ namespace Lucene.Net.Codecs.Compressing
                 docBasesDeltas[blockCount] = PackedInts.GetReaderNoHeader(fieldsIndexIn, PackedInts.Format.PACKED, packedIntsVersion, numChunks, bitsPerDocBase);
 
                 // start pointers
-                startPointers[blockCount] = fieldsIndexIn.ReadVLong();
-                avgChunkSizes[blockCount] = fieldsIndexIn.ReadVLong();
-                int bitsPerStartPointer = fieldsIndexIn.ReadVInt();
+                startPointers[blockCount] = fieldsIndexIn.ReadVInt64();
+                avgChunkSizes[blockCount] = fieldsIndexIn.ReadVInt64();
+                int bitsPerStartPointer = fieldsIndexIn.ReadVInt32();
                 if (bitsPerStartPointer > 64)
                 {
                     throw new CorruptIndexException("Corrupted bitsPerStartPointer (resource=" + fieldsIndexIn + ")");

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f7432173/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs b/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs
index 101cc79..6d7ce28 100644
--- a/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs
+++ b/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs
@@ -90,7 +90,7 @@ namespace Lucene.Net.Codecs.Compressing
             totalDocs = 0;
             docBaseDeltas = new int[BLOCK_SIZE];
             startPointerDeltas = new long[BLOCK_SIZE];
-            fieldsIndexOut.WriteVInt(PackedInts.VERSION_CURRENT);
+            fieldsIndexOut.WriteVInt32(PackedInts.VERSION_CURRENT);
         }
 
         private void Reset()
@@ -103,7 +103,7 @@ namespace Lucene.Net.Codecs.Compressing
         private void WriteBlock()
         {
             Debug.Assert(blockChunks > 0);
-            fieldsIndexOut.WriteVInt(blockChunks);
+            fieldsIndexOut.WriteVInt32(blockChunks);
 
             // The trick here is that we only store the difference from the average start
             // pointer or doc base, this helps save bits per value.
@@ -122,8 +122,8 @@ namespace Lucene.Net.Codecs.Compressing
             {
                 avgChunkDocs = (int)Math.Round((float)(blockDocs - docBaseDeltas[blockChunks - 1]) / (blockChunks - 1));
             }
-            fieldsIndexOut.WriteVInt(totalDocs - blockDocs); // docBase
-            fieldsIndexOut.WriteVInt(avgChunkDocs);
+            fieldsIndexOut.WriteVInt32(totalDocs - blockDocs); // docBase
+            fieldsIndexOut.WriteVInt32(avgChunkDocs);
             int docBase = 0;
             long maxDelta = 0;
             for (int i = 0; i < blockChunks; ++i)
@@ -134,7 +134,7 @@ namespace Lucene.Net.Codecs.Compressing
             }
 
             int bitsPerDocBase = PackedInts.BitsRequired(maxDelta);
-            fieldsIndexOut.WriteVInt(bitsPerDocBase);
+            fieldsIndexOut.WriteVInt32(bitsPerDocBase);
             PackedInts.Writer writer = PackedInts.GetWriterNoHeader(fieldsIndexOut, PackedInts.Format.PACKED, blockChunks, bitsPerDocBase, 1);
             docBase = 0;
             for (int i = 0; i < blockChunks; ++i)
@@ -147,7 +147,7 @@ namespace Lucene.Net.Codecs.Compressing
             writer.Finish();
 
             // start pointers
-            fieldsIndexOut.WriteVLong(firstStartPointer);
+            fieldsIndexOut.WriteVInt64(firstStartPointer);
             long avgChunkSize;
             if (blockChunks == 1)
             {
@@ -157,7 +157,7 @@ namespace Lucene.Net.Codecs.Compressing
             {
                 avgChunkSize = (maxStartPointer - firstStartPointer) / (blockChunks - 1);
             }
-            fieldsIndexOut.WriteVLong(avgChunkSize);
+            fieldsIndexOut.WriteVInt64(avgChunkSize);
             long startPointer = 0;
             maxDelta = 0;
             for (int i = 0; i < blockChunks; ++i)
@@ -168,7 +168,7 @@ namespace Lucene.Net.Codecs.Compressing
             }
 
             int bitsPerStartPointer = PackedInts.BitsRequired(maxDelta);
-            fieldsIndexOut.WriteVInt(bitsPerStartPointer);
+            fieldsIndexOut.WriteVInt32(bitsPerStartPointer);
             writer = PackedInts.GetWriterNoHeader(fieldsIndexOut, PackedInts.Format.PACKED, blockChunks, bitsPerStartPointer, 1);
             startPointer = 0;
             for (int i = 0; i < blockChunks; ++i)
@@ -214,8 +214,8 @@ namespace Lucene.Net.Codecs.Compressing
             {
                 WriteBlock();
             }
-            fieldsIndexOut.WriteVInt(0); // end marker
-            fieldsIndexOut.WriteVLong(maxPointer);
+            fieldsIndexOut.WriteVInt32(0); // end marker
+            fieldsIndexOut.WriteVInt64(maxPointer);
             CodecUtil.WriteFooter(fieldsIndexOut);
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f7432173/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsReader.cs b/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsReader.cs
index 83dc1d8..7d82d35 100644
--- a/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsReader.cs
+++ b/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsReader.cs
@@ -105,7 +105,7 @@ namespace Lucene.Net.Codecs.Compressing
 
                 if (version >= CompressingStoredFieldsWriter.VERSION_CHECKSUM)
                 {
-                    maxPointer = indexStream.ReadVLong();
+                    maxPointer = indexStream.ReadVInt64();
                     CodecUtil.CheckFooter(indexStream);
                 }
                 else
@@ -141,13 +141,13 @@ namespace Lucene.Net.Codecs.Compressing
 
                 if (version >= CompressingStoredFieldsWriter.VERSION_BIG_CHUNKS)
                 {
-                    chunkSize = fieldsStream.ReadVInt();
+                    chunkSize = fieldsStream.ReadVInt32();
                 }
                 else
                 {
                     chunkSize = -1;
                 }
-                packedIntsVersion = fieldsStream.ReadVInt();
+                packedIntsVersion = fieldsStream.ReadVInt32();
                 decompressor = compressionMode.NewDecompressor();
                 this.bytes = new BytesRef();
 
@@ -188,14 +188,14 @@ namespace Lucene.Net.Codecs.Compressing
             switch (bits & CompressingStoredFieldsWriter.TYPE_MASK)
             {
                 case CompressingStoredFieldsWriter.BYTE_ARR:
-                    int length = @in.ReadVInt();
+                    int length = @in.ReadVInt32();
                     var data = new byte[length];
                     @in.ReadBytes(data, 0, length);
                     visitor.BinaryField(info, data);
                     break;
 
                 case CompressingStoredFieldsWriter.STRING:
-                    length = @in.ReadVInt();
+                    length = @in.ReadVInt32();
                     data = new byte[length];
                     @in.ReadBytes(data, 0, length);
 #pragma warning disable 612, 618
@@ -204,19 +204,19 @@ namespace Lucene.Net.Codecs.Compressing
                     break;
 
                 case CompressingStoredFieldsWriter.NUMERIC_INT:
-                    visitor.Int32Field(info, @in.ReadInt());
+                    visitor.Int32Field(info, @in.ReadInt32());
                     break;
 
                 case CompressingStoredFieldsWriter.NUMERIC_FLOAT:
-                    visitor.SingleField(info, Number.IntBitsToFloat(@in.ReadInt()));
+                    visitor.SingleField(info, Number.Int32BitsToSingle(@in.ReadInt32()));
                     break;
 
                 case CompressingStoredFieldsWriter.NUMERIC_LONG:
-                    visitor.Int64Field(info, @in.ReadLong());
+                    visitor.Int64Field(info, @in.ReadInt64());
                     break;
 
                 case CompressingStoredFieldsWriter.NUMERIC_DOUBLE:
-                    visitor.DoubleField(info, BitConverter.Int64BitsToDouble(@in.ReadLong()));
+                    visitor.DoubleField(info, BitConverter.Int64BitsToDouble(@in.ReadInt64()));
                     break;
 
                 default:
@@ -230,18 +230,18 @@ namespace Lucene.Net.Codecs.Compressing
             {
                 case CompressingStoredFieldsWriter.BYTE_ARR:
                 case CompressingStoredFieldsWriter.STRING:
-                    int length = @in.ReadVInt();
+                    int length = @in.ReadVInt32();
                     @in.SkipBytes(length);
                     break;
 
                 case CompressingStoredFieldsWriter.NUMERIC_INT:
                 case CompressingStoredFieldsWriter.NUMERIC_FLOAT:
-                    @in.ReadInt();
+                    @in.ReadInt32();
                     break;
 
                 case CompressingStoredFieldsWriter.NUMERIC_LONG:
                 case CompressingStoredFieldsWriter.NUMERIC_DOUBLE:
-                    @in.ReadLong();
+                    @in.ReadInt64();
                     break;
 
                 default:
@@ -253,8 +253,8 @@ namespace Lucene.Net.Codecs.Compressing
         {
             fieldsStream.Seek(indexReader.GetStartPointer(docID));
 
-            int docBase = fieldsStream.ReadVInt();
-            int chunkDocs = fieldsStream.ReadVInt();
+            int docBase = fieldsStream.ReadVInt32();
+            int chunkDocs = fieldsStream.ReadVInt32();
             if (docID < docBase || docID >= docBase + chunkDocs || docBase + chunkDocs > numDocs)
             {
                 throw new CorruptIndexException("Corrupted: docID=" + docID + ", docBase=" + docBase + ", chunkDocs=" + chunkDocs + ", numDocs=" + numDocs + " (resource=" + fieldsStream + ")");
@@ -263,17 +263,17 @@ namespace Lucene.Net.Codecs.Compressing
             int numStoredFields, offset, length, totalLength;
             if (chunkDocs == 1)
             {
-                numStoredFields = fieldsStream.ReadVInt();
+                numStoredFields = fieldsStream.ReadVInt32();
                 offset = 0;
-                length = fieldsStream.ReadVInt();
+                length = fieldsStream.ReadVInt32();
                 totalLength = length;
             }
             else
             {
-                int bitsPerStoredFields = fieldsStream.ReadVInt();
+                int bitsPerStoredFields = fieldsStream.ReadVInt32();
                 if (bitsPerStoredFields == 0)
                 {
-                    numStoredFields = fieldsStream.ReadVInt();
+                    numStoredFields = fieldsStream.ReadVInt32();
                 }
                 else if (bitsPerStoredFields > 31)
                 {
@@ -287,10 +287,10 @@ namespace Lucene.Net.Codecs.Compressing
                     fieldsStream.Seek(filePointer + PackedInts.Format.PACKED.ByteCount(packedIntsVersion, chunkDocs, bitsPerStoredFields));
                 }
 
-                int bitsPerLength = fieldsStream.ReadVInt();
+                int bitsPerLength = fieldsStream.ReadVInt32();
                 if (bitsPerLength == 0)
                 {
-                    length = fieldsStream.ReadVInt();
+                    length = fieldsStream.ReadVInt32();
                     offset = (docID - docBase) * length;
                     totalLength = chunkDocs * length;
                 }
@@ -346,7 +346,7 @@ namespace Lucene.Net.Codecs.Compressing
 
             for (int fieldIDX = 0; fieldIDX < numStoredFields; fieldIDX++)
             {
-                long infoAndBits = documentInput.ReadVLong();
+                long infoAndBits = documentInput.ReadVInt64();
                 int fieldNumber = (int)((long)((ulong)infoAndBits >> CompressingStoredFieldsWriter.TYPE_BITS));
                 FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber);
 
@@ -507,8 +507,8 @@ namespace Lucene.Net.Codecs.Compressing
                 Debug.Assert(doc >= this.docBase + this.chunkDocs, doc + " " + this.docBase + " " + this.chunkDocs);
                 fieldsStream.Seek(outerInstance.indexReader.GetStartPointer(doc));
 
-                int docBase = fieldsStream.ReadVInt();
-                int chunkDocs = fieldsStream.ReadVInt();
+                int docBase = fieldsStream.ReadVInt32();
+                int chunkDocs = fieldsStream.ReadVInt32();
                 if (docBase < this.docBase + this.chunkDocs || docBase + chunkDocs > outerInstance.numDocs)
                 {
                     throw new CorruptIndexException("Corrupted: current docBase=" + this.docBase + ", current numDocs=" + this.chunkDocs + ", new docBase=" + docBase + ", new numDocs=" + chunkDocs + " (resource=" + fieldsStream + ")");
@@ -525,15 +525,15 @@ namespace Lucene.Net.Codecs.Compressing
 
                 if (chunkDocs == 1)
                 {
-                    numStoredFields[0] = fieldsStream.ReadVInt();
-                    lengths[0] = fieldsStream.ReadVInt();
+                    numStoredFields[0] = fieldsStream.ReadVInt32();
+                    lengths[0] = fieldsStream.ReadVInt32();
                 }
                 else
                 {
-                    int bitsPerStoredFields = fieldsStream.ReadVInt();
+                    int bitsPerStoredFields = fieldsStream.ReadVInt32();
                     if (bitsPerStoredFields == 0)
                     {
-                        Arrays.Fill(numStoredFields, 0, chunkDocs, fieldsStream.ReadVInt());
+                        Arrays.Fill(numStoredFields, 0, chunkDocs, fieldsStream.ReadVInt32());
                     }
                     else if (bitsPerStoredFields > 31)
                     {
@@ -548,10 +548,10 @@ namespace Lucene.Net.Codecs.Compressing
                         }
                     }
 
-                    int bitsPerLength = fieldsStream.ReadVInt();
+                    int bitsPerLength = fieldsStream.ReadVInt32();
                     if (bitsPerLength == 0)
                     {
-                        Arrays.Fill(lengths, 0, chunkDocs, fieldsStream.ReadVInt());
+                        Arrays.Fill(lengths, 0, chunkDocs, fieldsStream.ReadVInt32());
                     }
                     else if (bitsPerLength > 31)
                     {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f7432173/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsWriter.cs b/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsWriter.cs
index 28f94ad..b1f722e 100644
--- a/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsWriter.cs
+++ b/src/Lucene.Net.Core/Codecs/Compressing/CompressingStoredFieldsWriter.cs
@@ -102,8 +102,8 @@ namespace Lucene.Net.Codecs.Compressing
                 indexWriter = new CompressingStoredFieldsIndexWriter(indexStream);
                 indexStream = null;
 
-                fieldsStream.WriteVInt(chunkSize);
-                fieldsStream.WriteVInt(PackedInts.VERSION_CURRENT);
+                fieldsStream.WriteVInt32(chunkSize);
+                fieldsStream.WriteVInt32(PackedInts.VERSION_CURRENT);
 
                 success = true;
             }
@@ -154,12 +154,15 @@ namespace Lucene.Net.Codecs.Compressing
             }
         }
 
-        private static void SaveInts(int[] values, int length, DataOutput @out) // LUCENENET TODO: Rename SaveInt32s ?
+        /// <summary>
+        /// NOTE: This was saveInts() in Lucene
+        /// </summary>
+        private static void SaveInt32s(int[] values, int length, DataOutput @out)
         {
             Debug.Assert(length > 0);
             if (length == 1)
             {
-                @out.WriteVInt(values[0]);
+                @out.WriteVInt32(values[0]);
             }
             else
             {
@@ -174,8 +177,8 @@ namespace Lucene.Net.Codecs.Compressing
                 }
                 if (allEqual)
                 {
-                    @out.WriteVInt(0);
-                    @out.WriteVInt(values[0]);
+                    @out.WriteVInt32(0);
+                    @out.WriteVInt32(values[0]);
                 }
                 else
                 {
@@ -185,7 +188,7 @@ namespace Lucene.Net.Codecs.Compressing
                         max |= (uint)values[i];
                     }
                     int bitsRequired = PackedInts.BitsRequired(max);
-                    @out.WriteVInt(bitsRequired);
+                    @out.WriteVInt32(bitsRequired);
                     PackedInts.Writer w = PackedInts.GetWriterNoHeader(@out, PackedInts.Format.PACKED, length, bitsRequired, 1);
                     for (int i = 0; i < length; ++i)
                     {
@@ -199,14 +202,14 @@ namespace Lucene.Net.Codecs.Compressing
         private void WriteHeader(int docBase, int numBufferedDocs, int[] numStoredFields, int[] lengths)
         {
             // save docBase and numBufferedDocs
-            fieldsStream.WriteVInt(docBase);
-            fieldsStream.WriteVInt(numBufferedDocs);
+            fieldsStream.WriteVInt32(docBase);
+            fieldsStream.WriteVInt32(numBufferedDocs);
 
             // save numStoredFields
-            SaveInts(numStoredFields, numBufferedDocs, fieldsStream);
+            SaveInt32s(numStoredFields, numBufferedDocs, fieldsStream);
 
             // save lengths
-            SaveInts(lengths, numBufferedDocs, fieldsStream);
+            SaveInt32s(lengths, numBufferedDocs, fieldsStream);
         }
 
         private bool TriggerFlush()
@@ -333,11 +336,11 @@ namespace Lucene.Net.Codecs.Compressing
             }
 
             long infoAndBits = (((long)info.Number) << TYPE_BITS) | bits;
-            bufferedDocs.WriteVLong(infoAndBits);
+            bufferedDocs.WriteVInt64(infoAndBits);
 
             if (bytes != null)
             {
-                bufferedDocs.WriteVInt(bytes.Length);
+                bufferedDocs.WriteVInt32(bytes.Length);
                 bufferedDocs.WriteBytes(bytes.Bytes, bytes.Offset, bytes.Length);
             }
             else if (@string != null)
@@ -381,19 +384,19 @@ namespace Lucene.Net.Codecs.Compressing
                 {
                     if (number is sbyte || number is short || number is int)
                     {
-                        bufferedDocs.WriteInt((int)number);
+                        bufferedDocs.WriteInt32((int)number);
                     }
                     else if (number is long)
                     {
-                        bufferedDocs.WriteLong((long)number);
+                        bufferedDocs.WriteInt64((long)number);
                     }
                     else if (number is float)
                     {
-                        bufferedDocs.WriteInt(Number.FloatToIntBits((float)number));
+                        bufferedDocs.WriteInt32(Number.SingleToInt32Bits((float)number));
                     }
                     else if (number is double)
                     {
-                        bufferedDocs.WriteLong(BitConverter.DoubleToInt64Bits((double)number));
+                        bufferedDocs.WriteInt64(BitConverter.DoubleToInt64Bits((double)number));
                     }
                     else
                     {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f7432173/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsReader.cs b/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsReader.cs
index 1abbb55..af107da 100644
--- a/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsReader.cs
+++ b/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsReader.cs
@@ -83,7 +83,7 @@ namespace Lucene.Net.Codecs.Compressing
 
                 if (version >= CompressingTermVectorsWriter.VERSION_CHECKSUM)
                 {
-                    indexStream.ReadVLong(); // the end of the data file
+                    indexStream.ReadVInt64(); // the end of the data file
                     CodecUtil.CheckFooter(indexStream);
                 }
                 else
@@ -106,8 +106,8 @@ namespace Lucene.Net.Codecs.Compressing
                 }
                 Debug.Assert(CodecUtil.HeaderLength(codecNameDat) == vectorsStream.FilePointer);
 
-                packedIntsVersion = vectorsStream.ReadVInt();
-                chunkSize = vectorsStream.ReadVInt();
+                packedIntsVersion = vectorsStream.ReadVInt32();
+                chunkSize = vectorsStream.ReadVInt32();
                 decompressor = compressionMode.NewDecompressor();
                 this.reader = new BlockPackedReaderIterator(vectorsStream, packedIntsVersion, CompressingTermVectorsWriter.BLOCK_SIZE, 0);
 
@@ -138,7 +138,10 @@ namespace Lucene.Net.Codecs.Compressing
             }
         }
 
-        internal int PackedIntsVersion
+        /// <summary>
+        /// NOTE: This was getPackedIntsVersion() in Lucene
+        /// </summary>
+        internal int PackedInt32sVersion
         {
             get
             {
@@ -206,8 +209,8 @@ namespace Lucene.Net.Codecs.Compressing
             // decode
             // - docBase: first doc ID of the chunk
             // - chunkDocs: number of docs of the chunk
-            int docBase = vectorsStream.ReadVInt();
-            int chunkDocs = vectorsStream.ReadVInt();
+            int docBase = vectorsStream.ReadVInt32();
+            int chunkDocs = vectorsStream.ReadVInt32();
             if (doc < docBase || doc >= docBase + chunkDocs || docBase + chunkDocs > numDocs)
             {
                 throw new CorruptIndexException("docBase=" + docBase + ",chunkDocs=" + chunkDocs + ",doc=" + doc + " (resource=" + vectorsStream + ")");
@@ -219,7 +222,7 @@ namespace Lucene.Net.Codecs.Compressing
             if (chunkDocs == 1)
             {
                 skip = 0;
-                numFields = totalFields = vectorsStream.ReadVInt();
+                numFields = totalFields = vectorsStream.ReadVInt32();
             }
             else
             {
@@ -254,7 +257,7 @@ namespace Lucene.Net.Codecs.Compressing
                 int totalDistinctFields = (int)((uint)token >> 5);
                 if (totalDistinctFields == 0x07)
                 {
-                    totalDistinctFields += vectorsStream.ReadVInt();
+                    totalDistinctFields += vectorsStream.ReadVInt32();
                 }
                 ++totalDistinctFields;
                 PackedInts.IReaderIterator it = PackedInts.GetReaderIteratorNoHeader(vectorsStream, PackedInts.Format.PACKED, packedIntsVersion, totalDistinctFields, bitsPerFieldNum, 1);
@@ -271,7 +274,7 @@ namespace Lucene.Net.Codecs.Compressing
             {
                 int bitsPerOff = PackedInts.BitsRequired(fieldNums.Length - 1);
                 PackedInts.Reader allFieldNumOffs = PackedInts.GetReaderNoHeader(vectorsStream, PackedInts.Format.PACKED, packedIntsVersion, totalFields, bitsPerOff);
-                switch (vectorsStream.ReadVInt())
+                switch (vectorsStream.ReadVInt32())
                 {
                     case 0:
                         PackedInts.Reader fieldFlags = PackedInts.GetReaderNoHeader(vectorsStream, PackedInts.Format.PACKED, packedIntsVersion, fieldNums.Length, CompressingTermVectorsWriter.FLAGS_BITS);
@@ -303,7 +306,7 @@ namespace Lucene.Net.Codecs.Compressing
             PackedInts.Reader numTerms;
             int totalTerms;
             {
-                int bitsRequired = vectorsStream.ReadVInt();
+                int bitsRequired = vectorsStream.ReadVInt32();
                 numTerms = PackedInts.GetReaderNoHeader(vectorsStream, PackedInts.Format.PACKED, packedIntsVersion, totalFields, bitsRequired);
                 int sum = 0;
                 for (int i = 0; i < totalFields; ++i)
@@ -338,7 +341,7 @@ namespace Lucene.Net.Codecs.Compressing
                         LongsRef next = reader.Next(termCount - j);
                         for (int k = 0; k < next.Length; ++k)
                         {
-                            fieldPrefixLengths[j++] = (int)next.Longs[next.Offset + k];
+                            fieldPrefixLengths[j++] = (int)next.Int64s[next.Offset + k];
                         }
                     }
                 }
@@ -364,7 +367,7 @@ namespace Lucene.Net.Codecs.Compressing
                         LongsRef next = reader.Next(termCount - j);
                         for (int k = 0; k < next.Length; ++k)
                         {
-                            fieldSuffixLengths[j++] = (int)next.Longs[next.Offset + k];
+                            fieldSuffixLengths[j++] = (int)next.Int64s[next.Offset + k];
                         }
                     }
                     fieldLengths[i] = Sum(suffixLengths[i]);
@@ -389,7 +392,7 @@ namespace Lucene.Net.Codecs.Compressing
                     LongsRef next = reader.Next(totalTerms - i);
                     for (int k = 0; k < next.Length; ++k)
                     {
-                        termFreqs[i++] = 1 + (int)next.Longs[next.Offset + k];
+                        termFreqs[i++] = 1 + (int)next.Int64s[next.Offset + k];
                     }
                 }
             }
@@ -436,7 +439,7 @@ namespace Lucene.Net.Codecs.Compressing
                 float[] charsPerTerm = new float[fieldNums.Length];
                 for (int i = 0; i < charsPerTerm.Length; ++i)
                 {
-                    charsPerTerm[i] = Number.IntBitsToFloat(vectorsStream.ReadInt());
+                    charsPerTerm[i] = Number.Int32BitsToSingle(vectorsStream.ReadInt32());
                 }
                 startOffsets = ReadPositions(skip, numFields, flags, numTerms, termFreqs, CompressingTermVectorsWriter.OFFSETS, totalOffsets, positionIndex);
                 lengths = ReadPositions(skip, numFields, flags, numTerms, termFreqs, CompressingTermVectorsWriter.OFFSETS, totalOffsets, positionIndex);
@@ -676,7 +679,7 @@ namespace Lucene.Net.Codecs.Compressing
                         LongsRef nextPositions = reader.Next(totalFreq - j);
                         for (int k = 0; k < nextPositions.Length; ++k)
                         {
-                            fieldPositions[j++] = (int)nextPositions.Longs[nextPositions.Offset + k];
+                            fieldPositions[j++] = (int)nextPositions.Int64s[nextPositions.Offset + k];
                         }
                     }
                 }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f7432173/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsWriter.cs b/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsWriter.cs
index 2f5055a..275ca91 100644
--- a/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsWriter.cs
+++ b/src/Lucene.Net.Core/Codecs/Compressing/CompressingTermVectorsWriter.cs
@@ -268,8 +268,8 @@ namespace Lucene.Net.Codecs.Compressing
                 indexWriter = new CompressingStoredFieldsIndexWriter(indexStream);
                 indexStream = null;
 
-                vectorsStream.WriteVInt(PackedInts.VERSION_CURRENT);
-                vectorsStream.WriteVInt(chunkSize);
+                vectorsStream.WriteVInt32(PackedInts.VERSION_CURRENT);
+                vectorsStream.WriteVInt32(chunkSize);
                 writer = new BlockPackedWriter(vectorsStream, BLOCK_SIZE);
 
                 positionsBuf = new int[1024];
@@ -380,8 +380,8 @@ namespace Lucene.Net.Codecs.Compressing
             indexWriter.WriteIndex(chunkDocs, vectorsStream.FilePointer);
 
             int docBase = numDocs - chunkDocs;
-            vectorsStream.WriteVInt(docBase);
-            vectorsStream.WriteVInt(chunkDocs);
+            vectorsStream.WriteVInt32(docBase);
+            vectorsStream.WriteVInt32(chunkDocs);
 
             // total number of fields of the chunk
             int totalFields = FlushNumFields(chunkDocs);
@@ -423,7 +423,7 @@ namespace Lucene.Net.Codecs.Compressing
             if (chunkDocs == 1)
             {
                 int numFields = pendingDocs.First.Value.numFields;
-                vectorsStream.WriteVInt(numFields);
+                vectorsStream.WriteVInt32(numFields);
                 return numFields;
             }
             else
@@ -460,7 +460,7 @@ namespace Lucene.Net.Codecs.Compressing
             vectorsStream.WriteByte((byte)(sbyte)token);
             if (numDistinctFields - 1 >= 0x07)
             {
-                vectorsStream.WriteVInt(numDistinctFields - 1 - 0x07);
+                vectorsStream.WriteVInt32(numDistinctFields - 1 - 0x07);
             }
             PackedInts.Writer writer = PackedInts.GetWriterNoHeader(vectorsStream, PackedInts.Format.PACKED, fieldNums.Count, bitsRequired, 1);
             foreach (int fieldNum in fieldNums)
@@ -524,7 +524,7 @@ namespace Lucene.Net.Codecs.Compressing
             if (nonChangingFlags)
             {
                 // write one flag per field num
-                vectorsStream.WriteVInt(0);
+                vectorsStream.WriteVInt32(0);
                 PackedInts.Writer writer = PackedInts.GetWriterNoHeader(vectorsStream, PackedInts.Format.PACKED, fieldFlags.Length, FLAGS_BITS, 1);
                 foreach (int flags in fieldFlags)
                 {
@@ -537,7 +537,7 @@ namespace Lucene.Net.Codecs.Compressing
             else
             {
                 // write one flag for every field instance
-                vectorsStream.WriteVInt(1);
+                vectorsStream.WriteVInt32(1);
                 PackedInts.Writer writer = PackedInts.GetWriterNoHeader(vectorsStream, PackedInts.Format.PACKED, totalFields, FLAGS_BITS, 1);
                 foreach (DocData dd in pendingDocs)
                 {
@@ -562,7 +562,7 @@ namespace Lucene.Net.Codecs.Compressing
                 }
             }
             int bitsRequired = PackedInts.BitsRequired(maxNumTerms);
-            vectorsStream.WriteVInt(bitsRequired);
+            vectorsStream.WriteVInt32(bitsRequired);
             PackedInts.Writer writer = PackedInts.GetWriterNoHeader(vectorsStream, PackedInts.Format.PACKED, totalFields, bitsRequired, 1);
             foreach (DocData dd in pendingDocs)
             {
@@ -695,7 +695,7 @@ namespace Lucene.Net.Codecs.Compressing
             // start offsets
             for (int i = 0; i < fieldNums.Length; ++i)
             {
-                vectorsStream.WriteInt(Number.FloatToIntBits(charsPerTerm[i]));
+                vectorsStream.WriteInt32(Number.SingleToInt32Bits(charsPerTerm[i]));
             }
 
             writer.Reset(vectorsStream);
@@ -813,11 +813,11 @@ namespace Lucene.Net.Codecs.Compressing
                     }
                     for (int i = 0; i < numProx; ++i)
                     {
-                        int code = positions.ReadVInt();
+                        int code = positions.ReadVInt32();
                         if ((code & 1) != 0)
                         {
                             // this position has a payload
-                            int payloadLength = positions.ReadVInt();
+                            int payloadLength = positions.ReadVInt32();
                             payloadLengthsBuf[payStart + i] = payloadLength;
                             payloadBytes.CopyBytes(positions, payloadLength);
                         }
@@ -833,7 +833,7 @@ namespace Lucene.Net.Codecs.Compressing
                 {
                     for (int i = 0; i < numProx; ++i)
                     {
-                        position += ((int)((uint)positions.ReadVInt() >> 1));
+                        position += ((int)((uint)positions.ReadVInt32() >> 1));
                         positionsBuf[posStart + i] = position;
                     }
                 }
@@ -851,8 +851,8 @@ namespace Lucene.Net.Codecs.Compressing
                 int lastOffset = 0, startOffset, endOffset;
                 for (int i = 0; i < numProx; ++i)
                 {
-                    startOffset = lastOffset + offsets.ReadVInt();
-                    endOffset = startOffset + offsets.ReadVInt();
+                    startOffset = lastOffset + offsets.ReadVInt32();
+                    endOffset = startOffset + offsets.ReadVInt32();
                     lastOffset = endOffset;
                     startOffsetsBuf[offStart + i] = startOffset;
                     lengthsBuf[offStart + i] = endOffset - startOffset;
@@ -884,7 +884,7 @@ namespace Lucene.Net.Codecs.Compressing
                 int maxDoc = reader.MaxDoc;
                 IBits liveDocs = reader.LiveDocs;
 
-                if (matchingVectorsReader == null || matchingVectorsReader.Version != VERSION_CURRENT || matchingVectorsReader.CompressionMode != compressionMode || matchingVectorsReader.ChunkSize != chunkSize || matchingVectorsReader.PackedIntsVersion != PackedInts.VERSION_CURRENT)
+                if (matchingVectorsReader == null || matchingVectorsReader.Version != VERSION_CURRENT || matchingVectorsReader.CompressionMode != compressionMode || matchingVectorsReader.ChunkSize != chunkSize || matchingVectorsReader.PackedInt32sVersion != PackedInts.VERSION_CURRENT)
                 {
                     // naive merge...
                     for (int i = NextLiveDoc(0, liveDocs, maxDoc); i < maxDoc; i = NextLiveDoc(i + 1, liveDocs, maxDoc))
@@ -913,16 +913,16 @@ namespace Lucene.Net.Codecs.Compressing
                         }
                         if ((pendingDocs.Count == 0) && (i == 0 || index.GetStartPointer(i - 1) < startPointer)) // start of a chunk
                         {
-                            int docBase = vectorsStream.ReadVInt();
-                            int chunkDocs = vectorsStream.ReadVInt();
+                            int docBase = vectorsStream.ReadVInt32();
+                            int chunkDocs = vectorsStream.ReadVInt32();
                             Debug.Assert(docBase + chunkDocs <= matchingSegmentReader.MaxDoc);
                             if (docBase + chunkDocs < matchingSegmentReader.MaxDoc && NextDeletedDoc(docBase, liveDocs, docBase + chunkDocs) == docBase + chunkDocs)
                             {
                                 long chunkEnd = index.GetStartPointer(docBase + chunkDocs);
                                 long chunkLength = chunkEnd - vectorsStream.FilePointer;
                                 indexWriter.WriteIndex(chunkDocs, this.vectorsStream.FilePointer);
-                                this.vectorsStream.WriteVInt(docCount);
-                                this.vectorsStream.WriteVInt(chunkDocs);
+                                this.vectorsStream.WriteVInt32(docCount);
+                                this.vectorsStream.WriteVInt32(chunkDocs);
                                 this.vectorsStream.CopyBytes(vectorsStream, chunkLength);
                                 docCount += chunkDocs;
                                 this.numDocs += chunkDocs;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f7432173/src/Lucene.Net.Core/Codecs/Compressing/CompressionMode.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Compressing/CompressionMode.cs b/src/Lucene.Net.Core/Codecs/Compressing/CompressionMode.cs
index 55de4db..ce0857c 100644
--- a/src/Lucene.Net.Core/Codecs/Compressing/CompressionMode.cs
+++ b/src/Lucene.Net.Core/Codecs/Compressing/CompressionMode.cs
@@ -217,7 +217,7 @@ namespace Lucene.Net.Codecs.Compressing
                     return;
                 }
 
-                byte[] compressedBytes = new byte[input.ReadVInt()];
+                byte[] compressedBytes = new byte[input.ReadVInt32()];
                 input.ReadBytes(compressedBytes, 0, compressedBytes.Length);
                 byte[] decompressedBytes = null;
 
@@ -272,12 +272,12 @@ namespace Lucene.Net.Codecs.Compressing
                 if (resultArray.Length == 0)
                 {
                     Debug.Assert(len == 0, len.ToString());
-                    output.WriteVInt(0);
+                    output.WriteVInt32(0);
                     return;
                 }
                 else
                 {
-                    output.WriteVInt(resultArray.Length);
+                    output.WriteVInt32(resultArray.Length);
                     output.WriteBytes(resultArray, resultArray.Length);
                 }
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f7432173/src/Lucene.Net.Core/Codecs/Compressing/LZ4.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Compressing/LZ4.cs b/src/Lucene.Net.Core/Codecs/Compressing/LZ4.cs
index 94921d8..b4316bb 100644
--- a/src/Lucene.Net.Core/Codecs/Compressing/LZ4.cs
+++ b/src/Lucene.Net.Core/Codecs/Compressing/LZ4.cs
@@ -55,15 +55,21 @@ namespace Lucene.Net.Codecs.Compressing
             return Hash(i, HASH_LOG_HC);
         }
 
-        private static int ReadInt(byte[] buf, int i)
+        /// <summary>
+        /// NOTE: This was readInt() in Lucene
+        /// </summary>
+        private static int ReadInt32(byte[] buf, int i)
         {
             return ((((sbyte)buf[i]) & 0xFF) << 24) | ((((sbyte)buf[i + 1]) & 0xFF) << 16) | ((((sbyte)buf[i + 2]) & 0xFF) << 8) |
                 (((sbyte)buf[i + 3]) & 0xFF);
         }
 
-        private static bool ReadIntEquals(byte[] buf, int i, int j)
+        /// <summary>
+        /// NOTE: This was readIntEquals() in Lucene
+        /// </summary>
+        private static bool ReadInt32Equals(byte[] buf, int i, int j)
         {
-            return ReadInt(buf, i) == ReadInt(buf, j);
+            return ReadInt32(buf, i) == ReadInt32(buf, j);
         }
 
         private static int CommonBytes(byte[] b, int o1, int o2, int limit)
@@ -264,12 +270,12 @@ namespace Lucene.Net.Codecs.Compressing
                         {
                             goto mainBreak;
                         }
-                        int v = ReadInt(bytes, off);
+                        int v = ReadInt32(bytes, off);
                         int h = Hash(v, hashLog);
                         @ref = @base + (int)hashTable.Get(h);
                         Debug.Assert(PackedInts.BitsRequired(off - @base) <= hashTable.BitsPerValue);
                         hashTable.Set(h, off - @base);
-                        if (off - @ref < MAX_DISTANCE && ReadInt(bytes, @ref) == v)
+                        if (off - @ref < MAX_DISTANCE && ReadInt32(bytes, @ref) == v)
                         {
                             break;
                         }
@@ -342,7 +348,7 @@ namespace Lucene.Net.Codecs.Compressing
 
             private int HashPointer(byte[] bytes, int off)
             {
-                int v = ReadInt(bytes, off);
+                int v = ReadInt32(bytes, off);
                 int h = HashHC(v);
                 return hashTable[h];
             }
@@ -354,7 +360,7 @@ namespace Lucene.Net.Codecs.Compressing
 
             private void AddHash(byte[] bytes, int off)
             {
-                int v = ReadInt(bytes, off);
+                int v = ReadInt32(bytes, off);
                 int h = HashHC(v);
                 int delta = off - hashTable[h];
                 Debug.Assert(delta > 0, delta.ToString());
@@ -387,7 +393,7 @@ namespace Lucene.Net.Codecs.Compressing
 
                 if (@ref >= off - 4 && @ref <= off && @ref >= @base) // potential repetition
                 {
-                    if (ReadIntEquals(buf, @ref, off)) // confirmed
+                    if (ReadInt32Equals(buf, @ref, off)) // confirmed
                     {
                         delta = off - @ref;
                         repl = match.len = MIN_MATCH + CommonBytes(buf, @ref + MIN_MATCH, off + MIN_MATCH, matchLimit);
@@ -402,7 +408,7 @@ namespace Lucene.Net.Codecs.Compressing
                     {
                         break;
                     }
-                    if (buf[@ref + match.len] == buf[off + match.len] && ReadIntEquals(buf, @ref, off))
+                    if (buf[@ref + match.len] == buf[off + match.len] && ReadInt32Equals(buf, @ref, off))
                     {
                         int matchLen = MIN_MATCH + CommonBytes(buf, @ref + MIN_MATCH, off + MIN_MATCH, matchLimit);
                         if (matchLen > match.len)
@@ -426,7 +432,7 @@ namespace Lucene.Net.Codecs.Compressing
                     do
                     {
                         chainTable[ptr & MASK] = (short)delta;
-                        hashTable[HashHC(ReadInt(buf, ptr))] = ptr;
+                        hashTable[HashHC(ReadInt32(buf, ptr))] = ptr;
                         ++ptr;
                     } while (ptr < end);
                     nextToUpdate = end;
@@ -449,7 +455,7 @@ namespace Lucene.Net.Codecs.Compressing
                     {
                         break;
                     }
-                    if (buf[@ref - delta + match.len] == buf[startLimit + match.len] && ReadIntEquals(buf, @ref, off))
+                    if (buf[@ref - delta + match.len] == buf[startLimit + match.len] && ReadInt32Equals(buf, @ref, off))
                     {
                         int matchLenForward = MIN_MATCH + CommonBytes(buf, @ref + MIN_MATCH, off + MIN_MATCH, matchLimit);
                         int matchLenBackward = CommonBytesBackward(buf, @ref, off, @base, startLimit);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f7432173/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xFieldInfosReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xFieldInfosReader.cs b/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xFieldInfosReader.cs
index 7d3da96..1b06a5c 100644
--- a/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xFieldInfosReader.cs
+++ b/src/Lucene.Net.Core/Codecs/Lucene3x/Lucene3xFieldInfosReader.cs
@@ -66,7 +66,7 @@ namespace Lucene.Net.Codecs.Lucene3x
             bool success = false;
             try
             {
-                int format = input.ReadVInt();
+                int format = input.ReadVInt32();
 
                 if (format > FORMAT_MINIMUM)
                 {
@@ -77,7 +77,7 @@ namespace Lucene.Net.Codecs.Lucene3x
                     throw new IndexFormatTooNewException(input, format, FORMAT_MINIMUM, FORMAT_CURRENT);
                 }
 
-                int size = input.ReadVInt(); //read in the size
+                int size = input.ReadVInt32(); //read in the size
                 FieldInfo[] infos = new FieldInfo[size];
 
                 for (int i = 0; i < size; i++)