You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by sy...@apache.org on 2016/10/02 14:36:01 UTC

[43/50] [abbrv] lucenenet git commit: Fixed most compiler warnings in Analysis (a few lingering issues).

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseAnalyzer.cs
index cb308de..588b74f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseAnalyzer.cs
@@ -72,7 +72,9 @@ namespace Lucene.Net.Analysis.Pt
                 {
                     DEFAULT_STOP_SET = WordlistLoader.GetSnowballWordSet(
                         IOUtils.GetDecodingReader(typeof(SnowballFilter), typeof(SnowballFilter).Namespace + "." + DEFAULT_STOPWORD_FILE, Encoding.UTF8),
+#pragma warning disable 612, 618
                         LuceneVersion.LUCENE_CURRENT);
+#pragma warning restore 612, 618
                 }
                 catch (IOException)
                 {
@@ -136,7 +138,9 @@ namespace Lucene.Net.Analysis.Pt
             {
                 result = new SetKeywordMarkerFilter(result, stemExclusionSet);
             }
+#pragma warning disable 612, 618
             if (matchVersion.OnOrAfter(LuceneVersion.LUCENE_36))
+#pragma warning restore 612, 618
             {
                 result = new PortugueseLightStemFilter(result);
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs
index 35a2740..17a4183 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs
@@ -137,7 +137,11 @@ namespace Lucene.Net.Analysis.Pt
                         throw new Exception("useless exception '" + exceptions[i] + "' does not end with '" + suffix + "'");
                     }
                 }
-                this.exceptions = new CharArraySet(LuceneVersion.LUCENE_CURRENT, Arrays.AsList(exceptions), false);
+                this.exceptions = new CharArraySet(
+#pragma warning disable 612, 618
+                    LuceneVersion.LUCENE_CURRENT,
+#pragma warning restore 612, 618
+                    Arrays.AsList(exceptions), false);
             }
 
             public override bool Matches(char[] s, int len)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Reverse/ReverseStringFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Reverse/ReverseStringFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Reverse/ReverseStringFilter.cs
index 5a78e2a..ffce14f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Reverse/ReverseStringFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Reverse/ReverseStringFilter.cs
@@ -179,9 +179,12 @@ namespace Lucene.Net.Analysis.Reverse
         ///        buffer should be reversed </param>
         public static void Reverse(LuceneVersion matchVersion, char[] buffer, int start, int len)
         {
+#pragma warning disable 612, 618
             if (!matchVersion.OnOrAfter(LuceneVersion.LUCENE_31))
+
             {
                 ReverseUnicode3(buffer, start, len);
+#pragma warning restore 612, 618
                 return;
             }
             /* modified version of Apache Harmony AbstractStringBuilder reverse0() */

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianAnalyzer.cs
index 5040913..bd92ba4 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianAnalyzer.cs
@@ -69,7 +69,9 @@ namespace Lucene.Net.Analysis.Ru
                 {
                     DEFAULT_STOP_SET = WordlistLoader.GetSnowballWordSet(
                         IOUtils.GetDecodingReader(typeof(SnowballFilter), typeof(SnowballFilter).Namespace + "." + DEFAULT_STOPWORD_FILE, Encoding.UTF8),
+#pragma warning disable 612, 618
                         LuceneVersion.LUCENE_CURRENT);
+#pragma warning restore 612, 618
                 }
                 catch (IOException ex)
                 {
@@ -95,7 +97,10 @@ namespace Lucene.Net.Analysis.Ru
         }
 
         public RussianAnalyzer(LuceneVersion matchVersion)
-            : this(matchVersion, matchVersion.OnOrAfter(LuceneVersion.LUCENE_31) ? DefaultSetHolder.DEFAULT_STOP_SET : DefaultSetHolder.DEFAULT_STOP_SET_30)
+#pragma warning disable 612, 618
+            : this(matchVersion, matchVersion.OnOrAfter(LuceneVersion.LUCENE_31) ? 
+                  DefaultSetHolder.DEFAULT_STOP_SET : DefaultSetHolder.DEFAULT_STOP_SET_30)
+#pragma warning restore 612, 618
         {
         }
 
@@ -137,7 +142,9 @@ namespace Lucene.Net.Analysis.Ru
         ///         provided, and <seealso cref="SnowballFilter"/> </returns>
         public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
         {
+#pragma warning disable 612, 618
             if (matchVersion.OnOrAfter(LuceneVersion.LUCENE_31))
+#pragma warning restore 612, 618
             {
                 Tokenizer source = new StandardTokenizer(matchVersion, reader);
                 TokenStream result = new StandardFilter(matchVersion, source);
@@ -152,7 +159,9 @@ namespace Lucene.Net.Analysis.Ru
             }
             else
             {
+#pragma warning disable 612, 618
                 Tokenizer source = new RussianLetterTokenizer(matchVersion, reader);
+#pragma warning restore 612, 618
                 TokenStream result = new LowerCaseFilter(matchVersion, source);
                 result = new StopFilter(matchVersion, result, stopwords);
                 if (stemExclusionSet.Count > 0)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerImpl.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerImpl.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerImpl.cs
index b697e1e..7fd3bc2 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerImpl.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerImpl.cs
@@ -357,6 +357,7 @@ namespace Lucene.Net.Analysis.Standard
         /// the number of characters up to the start of the matched text </summary>
         private int yyChar;
 
+#pragma warning disable 169, 414
         /// <summary>
         /// the number of characters from the last newline up to the start of the 
         /// matched text
@@ -376,9 +377,12 @@ namespace Lucene.Net.Analysis.Standard
         /// denotes if the user-EOF-code has already been executed </summary>
         private bool zzEOFDone;
 
+#pragma warning restore 169, 414
+
         /* user code: */
 
         public static readonly int ALPHANUM = StandardTokenizer.ALPHANUM;
+#pragma warning disable 612, 618
         public static readonly int APOSTROPHE = StandardTokenizer.APOSTROPHE;
         public static readonly int ACRONYM = StandardTokenizer.ACRONYM;
         public static readonly int COMPANY = StandardTokenizer.COMPANY;
@@ -387,6 +391,7 @@ namespace Lucene.Net.Analysis.Standard
         public static readonly int NUM = StandardTokenizer.NUM;
         public static readonly int CJ = StandardTokenizer.CJ;
         public static readonly int ACRONYM_DEP = StandardTokenizer.ACRONYM_DEP;
+#pragma warning restore 612, 618
 
         public static readonly string[] TOKEN_TYPES = StandardTokenizer.TOKEN_TYPES;
 
@@ -746,7 +751,7 @@ namespace Lucene.Net.Analysis.Standard
                         { // Break so we don't hit fall-through warning:
                             break; // ignore
                         }
-                        goto case 11;
+                        //goto case 11; // unreachable
                     case 11:
                         break;
                     case 2:

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardFilter.cs
index a8428bf..35d3467 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardFilter.cs
@@ -44,7 +44,9 @@ namespace Lucene.Net.Analysis.Standard
 
         public override sealed bool IncrementToken()
         {
+#pragma warning disable 612, 618
             if (matchVersion.OnOrAfter(LuceneVersion.LUCENE_31))
+#pragma warning restore 612, 618
             {
                 return input.IncrementToken(); // TODO: add some niceties for the new grammar
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizer.cs
index f67ff00..ad3ad7a 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizer.cs
@@ -142,6 +142,7 @@ namespace Lucene.Net.Analysis.Standard
 
         private void Init(Version matchVersion)
         {
+#pragma warning disable 612, 618
             if (matchVersion.OnOrAfter(Version.LUCENE_47))
             {
                 this.scanner = new StandardTokenizerImpl(input);
@@ -158,6 +159,7 @@ namespace Lucene.Net.Analysis.Standard
             {
                 this.scanner = new StandardTokenizerImpl31(input);
             }
+#pragma warning restore 612, 618
             else
             {
                 this.scanner = new ClassicTokenizerImpl(input);
@@ -206,9 +208,11 @@ namespace Lucene.Net.Analysis.Standard
                     // This 'if' should be removed in the next release. For now, it converts
                     // invalid acronyms to HOST. When removed, only the 'else' part should
                     // remain.
+#pragma warning disable 612, 618
                     if (tokenType == StandardTokenizer.ACRONYM_DEP)
                     {
                         typeAtt.Type = StandardTokenizer.TOKEN_TYPES[StandardTokenizer.HOST];
+#pragma warning restore 612, 618
                         termAtt.Length = termAtt.Length - 1; // remove extra '.'
                     }
                     else

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerImpl.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerImpl.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerImpl.cs
index 31a310a..5e0d6e7 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerImpl.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerImpl.cs
@@ -944,6 +944,7 @@ namespace Lucene.Net.Analysis.Standard
         /// the number of characters up to the start of the matched text </summary>
         private int yyChar;
 
+#pragma warning disable 169, 414
         /// <summary>
         /// the number of characters from the last newline up to the start of the 
         /// matched text
@@ -963,6 +964,8 @@ namespace Lucene.Net.Analysis.Standard
         /// denotes if the user-EOF-code has already been executed </summary>
         private bool zzEOFDone;
 
+#pragma warning restore 169, 414
+
         /* user code: */
         /// <summary>
         /// Alphanumeric sequences </summary>
@@ -1346,7 +1349,7 @@ namespace Lucene.Net.Analysis.Standard
                         { // Break so we don't hit fall-through warning:
                             break; // Not numeric, word, ideographic, hiragana, or SE Asian -- ignore it.
                         }
-                        goto case 9;
+                        // goto case 9; // unreachable
                     case 9:
                         break;
                     case 2:

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/StandardTokenizerImpl31.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/StandardTokenizerImpl31.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/StandardTokenizerImpl31.cs
index 561718b..40f32d2 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/StandardTokenizerImpl31.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/StandardTokenizerImpl31.cs
@@ -678,6 +678,7 @@ namespace Lucene.Net.Analysis.Standard.Std31
         /** the number of characters up to the start of the matched text */
         private int yyChar;
 
+#pragma warning disable 169, 414
         /**
          * the number of characters from the last newline up to the start of the 
          * matched text
@@ -695,6 +696,8 @@ namespace Lucene.Net.Analysis.Standard.Std31
         /** denotes if the user-EOF-code has already been executed */
         private bool zzEOFDone;
 
+#pragma warning disable 169, 414
+
         /* user code: */
         /** Alphanumeric sequences */
         public static readonly int WORD_TYPE = StandardTokenizer.ALPHANUM;
@@ -944,7 +947,7 @@ namespace Lucene.Net.Analysis.Standard.Std31
             {
                 message = ZZ_ERROR_MSG[errorCode];
             }
-            catch (IndexOutOfRangeException e)
+            catch (IndexOutOfRangeException /*e*/)
             {
                 message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/UAX29URLEmailTokenizerImpl31.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/UAX29URLEmailTokenizerImpl31.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/UAX29URLEmailTokenizerImpl31.cs
index 3c6f01b..84764aa 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/UAX29URLEmailTokenizerImpl31.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/UAX29URLEmailTokenizerImpl31.cs
@@ -3233,6 +3233,7 @@ namespace Lucene.Net.Analysis.Standard.Std31
         /** the number of characters up to the start of the matched text */
         private int yychar;
 
+#pragma warning disable 169, 414
         /**
          * the number of characters from the last newline up to the start of the 
          * matched text
@@ -3250,6 +3251,8 @@ namespace Lucene.Net.Analysis.Standard.Std31
         /** denotes if the user-EOF-code has already been executed */
         private bool zzEOFDone;
 
+#pragma warning restore 169, 414
+
         /* user code: */
         /** Alphanumeric sequences */
         public static readonly int WORD_TYPE = UAX29URLEmailTokenizer.ALPHANUM;
@@ -3481,7 +3484,7 @@ namespace Lucene.Net.Analysis.Standard.Std31
             {
                 message = ZZ_ERROR_MSG[errorCode];
             }
-            catch (IndexOutOfRangeException e)
+            catch (IndexOutOfRangeException /*e*/)
             {
                 message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/StandardTokenizerImpl34.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/StandardTokenizerImpl34.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/StandardTokenizerImpl34.cs
index fd63c00..e3d0d26 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/StandardTokenizerImpl34.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/StandardTokenizerImpl34.cs
@@ -694,6 +694,7 @@ namespace Lucene.Net.Analysis.Standard.Std34
         /** the number of characters up to the start of the matched text */
         private int yyChar;
 
+#pragma warning disable 169, 414
         /**
          * the number of characters from the last newline up to the start of the 
          * matched text
@@ -711,6 +712,8 @@ namespace Lucene.Net.Analysis.Standard.Std34
         /** denotes if the user-EOF-code has already been executed */
         private bool zzEOFDone;
 
+#pragma warning restore 169, 414
+
         /* user code: */
         /** Alphanumeric sequences */
         public static readonly int WORD_TYPE = StandardTokenizer.ALPHANUM;
@@ -960,7 +963,7 @@ namespace Lucene.Net.Analysis.Standard.Std34
             {
                 message = ZZ_ERROR_MSG[errorCode];
             }
-            catch (IndexOutOfRangeException e)
+            catch (IndexOutOfRangeException /*e*/)
             {
                 message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/UAX29URLEmailTokenizerImpl34.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/UAX29URLEmailTokenizerImpl34.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/UAX29URLEmailTokenizerImpl34.cs
index 6ad22b9..ec21b8c 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/UAX29URLEmailTokenizerImpl34.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/UAX29URLEmailTokenizerImpl34.cs
@@ -3336,6 +3336,7 @@ namespace Lucene.Net.Analysis.Standard.Std34
         /** the number of characters up to the start of the matched text */
         private int yychar;
 
+#pragma warning disable 169, 414
         /**
          * the number of characters from the last newline up to the start of the 
          * matched text
@@ -3353,6 +3354,8 @@ namespace Lucene.Net.Analysis.Standard.Std34
         /** denotes if the user-EOF-code has already been executed */
         private bool zzEOFDone;
 
+#pragma warning restore 169, 414
+
         /* user code: */
         /** Alphanumeric sequences */
         public static readonly int WORD_TYPE = UAX29URLEmailTokenizer.ALPHANUM;
@@ -3608,7 +3611,7 @@ namespace Lucene.Net.Analysis.Standard.Std34
             {
                 message = ZZ_ERROR_MSG[errorCode];
             }
-            catch (IndexOutOfRangeException e)
+            catch (IndexOutOfRangeException /*e*/)
             {
                 message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std36/UAX29URLEmailTokenizerImpl36.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std36/UAX29URLEmailTokenizerImpl36.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std36/UAX29URLEmailTokenizerImpl36.cs
index e7a05ce..0aca788 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std36/UAX29URLEmailTokenizerImpl36.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std36/UAX29URLEmailTokenizerImpl36.cs
@@ -3771,6 +3771,7 @@ namespace Lucene.Net.Analysis.Standard.Std36
         /** the number of characters up to the start of the matched text */
         private int yychar;
 
+#pragma warning disable 169, 414
         /**
          * the number of characters from the last newline up to the start of the 
          * matched text
@@ -3788,6 +3789,8 @@ namespace Lucene.Net.Analysis.Standard.Std36
         /** denotes if the user-EOF-code has already been executed */
         private bool zzEOFDone;
 
+#pragma warning restore 169, 414
+
         /* user code: */
         /** Alphanumeric sequences */
         public static readonly int WORD_TYPE = UAX29URLEmailTokenizer.ALPHANUM;
@@ -4043,7 +4046,7 @@ namespace Lucene.Net.Analysis.Standard.Std36
             {
                 message = ZZ_ERROR_MSG[errorCode];
             }
-            catch (IndexOutOfRangeException e)
+            catch (IndexOutOfRangeException /*e*/)
             {
                 message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/StandardTokenizerImpl40.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/StandardTokenizerImpl40.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/StandardTokenizerImpl40.cs
index c55e94f..06ebff1 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/StandardTokenizerImpl40.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/StandardTokenizerImpl40.cs
@@ -801,6 +801,7 @@ namespace Lucene.Net.Analysis.Standard.Std40
         /** the number of characters up to the start of the matched text */
         private int yyChar;
 
+#pragma warning disable 169, 414
         /**
          * the number of characters from the last newline up to the start of the 
          * matched text
@@ -818,6 +819,8 @@ namespace Lucene.Net.Analysis.Standard.Std40
         /** denotes if the user-EOF-code has already been executed */
         private bool zzEOFDone;
 
+#pragma warning restore 169, 414
+
         /* user code: */
         /** Alphanumeric sequences */
         public static readonly int WORD_TYPE = StandardTokenizer.ALPHANUM;
@@ -1067,7 +1070,7 @@ namespace Lucene.Net.Analysis.Standard.Std40
             {
                 message = ZZ_ERROR_MSG[errorCode];
             }
-            catch (IndexOutOfRangeException e)
+            catch (IndexOutOfRangeException /*e*/)
             {
                 message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/UAX29URLEmailTokenizerImpl40.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/UAX29URLEmailTokenizerImpl40.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/UAX29URLEmailTokenizerImpl40.cs
index 279643f..1604bdf 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/UAX29URLEmailTokenizerImpl40.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/UAX29URLEmailTokenizerImpl40.cs
@@ -3983,6 +3983,7 @@ namespace Lucene.Net.Analysis.Standard.Std40
         /** the number of characters up to the start of the matched text */
         private int yychar;
 
+#pragma warning disable 169, 414
         /**
          * the number of characters from the last newline up to the start of the 
          * matched text
@@ -4000,6 +4001,8 @@ namespace Lucene.Net.Analysis.Standard.Std40
         /** denotes if the user-EOF-code has already been executed */
         private bool zzEOFDone;
 
+#pragma warning restore 169, 414
+
         /* user code: */
         /** Alphanumeric sequences */
         public static readonly int WORD_TYPE = UAX29URLEmailTokenizer.ALPHANUM;
@@ -4255,7 +4258,7 @@ namespace Lucene.Net.Analysis.Standard.Std40
             {
                 message = ZZ_ERROR_MSG[errorCode];
             }
-            catch (IndexOutOfRangeException e)
+            catch (IndexOutOfRangeException /*e*/)
             {
                 message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizer.cs
index 1dd7d73..51aa76b 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizer.cs
@@ -134,6 +134,7 @@ namespace Lucene.Net.Analysis.Standard
         private IStandardTokenizerInterface GetScannerFor(LuceneVersion matchVersion)
         {
             // best effort NPE if you dont call reset
+#pragma warning disable 612, 618
             if (matchVersion.OnOrAfter(LuceneVersion.LUCENE_47))
             {
                 return new UAX29URLEmailTokenizerImpl(input);
@@ -154,6 +155,7 @@ namespace Lucene.Net.Analysis.Standard
             {
                 return new UAX29URLEmailTokenizerImpl31(input);
             }
+#pragma warning restore 612, 618
         }
 
         // this tokenizer generates three attributes:

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerImpl.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerImpl.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerImpl.cs
index 09a343d..dccb29d 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerImpl.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerImpl.cs
@@ -9088,6 +9088,7 @@ namespace Lucene.Net.Analysis.Standard
         /** the number of characters up to the start of the matched text */
         private int yychar;
 
+#pragma warning disable 169, 414
         /**
          * the number of characters from the last newline up to the start of the 
          * matched text
@@ -9105,6 +9106,8 @@ namespace Lucene.Net.Analysis.Standard
         /** denotes if the user-EOF-code has already been executed */
         private bool zzEOFDone;
 
+#pragma warning restore 169, 414
+
         /* user code: */
         /** Alphanumeric sequences */
         public static readonly int WORD_TYPE = UAX29URLEmailTokenizer.ALPHANUM;
@@ -9360,7 +9363,7 @@ namespace Lucene.Net.Analysis.Standard
             {
                 message = ZZ_ERROR_MSG[errorCode];
             }
-            catch (IndexOutOfRangeException e)
+            catch (IndexOutOfRangeException /*e*/)
             {
                 message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Sv/SwedishAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Sv/SwedishAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Sv/SwedishAnalyzer.cs
index e742a1b..42073b2 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Sv/SwedishAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Sv/SwedishAnalyzer.cs
@@ -64,7 +64,9 @@ namespace Lucene.Net.Analysis.Sv
                 {
                     DEFAULT_STOP_SET = WordlistLoader.GetSnowballWordSet(
                         IOUtils.GetDecodingReader(typeof(SnowballFilter), typeof(SnowballFilter).Namespace + "." + DEFAULT_STOPWORD_FILE, Encoding.UTF8),
+#pragma warning disable 612, 618
                         LuceneVersion.LUCENE_CURRENT);
+#pragma warning restore 612, 618
                 }
                 catch (IOException)
                 {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs
index 2a01557..8514f91 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs
@@ -113,8 +113,10 @@ namespace Lucene.Net.Analysis.Synonym
 
             public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
+#pragma warning disable 612, 618
                 Tokenizer tokenizer = factory == null ? new WhitespaceTokenizer(LuceneVersion.LUCENE_CURRENT, reader) : factory.Create(reader);
                 TokenStream stream = outerInstance.ignoreCase ? (TokenStream)new LowerCaseFilter(LuceneVersion.LUCENE_CURRENT, tokenizer) : tokenizer;
+#pragma warning restore 612, 618
                 return new Analyzer.TokenStreamComponents(tokenizer, stream);
             }
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilterFactory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilterFactory.cs
index 9e0f6a1..eb21c0a 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilterFactory.cs
@@ -63,10 +63,12 @@ namespace Lucene.Net.Analysis.Synonym
             : base(args)
         {
             AssureMatchVersion();
+#pragma warning disable 612, 618
             if (luceneMatchVersion.OnOrAfter(Lucene.Net.Util.LuceneVersion.LUCENE_34))
             {
                 delegator = new FSTSynonymFilterFactory(new Dictionary<string, string>(OriginalArgs));
             }
+#pragma warning restore 612, 618
             else
             {
                 // check if you use the new optional arg "format". this makes no sense for the old one, 
@@ -75,7 +77,9 @@ namespace Lucene.Net.Analysis.Synonym
                 {
                     throw new System.ArgumentException("You must specify luceneMatchVersion >= 3.4 to use alternate synonyms formats");
                 }
+#pragma warning disable 612, 618
                 delegator = new SlowSynonymFilterFactory(new Dictionary<string, string>(OriginalArgs));
+#pragma warning restore 612, 618
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
index 4bfb007..6b42eb4 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
@@ -254,7 +254,9 @@ namespace Lucene.Net.Analysis.Synonym
 
                 IEnumerable<CharsRef> keys = workingSet.Keys;
                 CharsRef[] sortedKeys = keys.ToArray();
+#pragma warning disable 612, 618
                 System.Array.Sort(sortedKeys, CharsRef.UTF16SortedAsUTF8Comparer);
+#pragma warning restore 612, 618
 
 
                 IntsRef scratchIntsRef = new IntsRef();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiAnalyzer.cs
index 0e09fd9..36f3d5e 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiAnalyzer.cs
@@ -87,7 +87,11 @@ namespace Lucene.Net.Analysis.Th
         /// </summary>
         /// <param name="matchVersion"> lucene compatibility version </param>
         public ThaiAnalyzer(LuceneVersion matchVersion)
-              : this(matchVersion, matchVersion.OnOrAfter(LuceneVersion.LUCENE_36) ? DefaultSetHolder.DEFAULT_STOP_SET : StopAnalyzer.ENGLISH_STOP_WORDS_SET)
+              : this(matchVersion,
+#pragma warning disable 612, 618
+                    matchVersion.OnOrAfter(LuceneVersion.LUCENE_36) ?
+#pragma warning restore 612, 618
+                    DefaultSetHolder.DEFAULT_STOP_SET : StopAnalyzer.ENGLISH_STOP_WORDS_SET)
         {
         }
 
@@ -123,11 +127,15 @@ namespace Lucene.Net.Analysis.Th
             {
                 Tokenizer source = new StandardTokenizer(matchVersion, reader);
                 TokenStream result = new StandardFilter(matchVersion, source);
+#pragma warning disable 612, 618
                 if (matchVersion.OnOrAfter(LuceneVersion.LUCENE_31))
+#pragma warning restore 612, 618
                 {
                     result = new LowerCaseFilter(matchVersion, result);
                 }
+#pragma warning disable 612, 618
                 result = new ThaiWordFilter(matchVersion, result);
+#pragma warning restore 612, 618
                 return new TokenStreamComponents(source, new StopFilter(matchVersion, result, stopwords));
             }
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Util/AbstractAnalysisFactory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/AbstractAnalysisFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/AbstractAnalysisFactory.cs
index 8f11f4c..7992094 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/AbstractAnalysisFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/AbstractAnalysisFactory.cs
@@ -61,7 +61,11 @@ namespace Lucene.Net.Analysis.Util
             string version = Get(args, LUCENE_MATCH_VERSION_PARAM);
             // LUCENENET TODO: What should we do if the version is null?
             //luceneMatchVersion = version == null ? (LuceneVersion?)null : LuceneVersionHelpers.ParseLeniently(version);
-            luceneMatchVersion = version == null ? LuceneVersion.LUCENE_CURRENT : LuceneVersionHelpers.ParseLeniently(version);
+            luceneMatchVersion = version == null ?
+#pragma warning disable 612, 618
+                LuceneVersion.LUCENE_CURRENT :
+#pragma warning restore 612, 618
+                LuceneVersionHelpers.ParseLeniently(version);
             args.Remove(CLASS_NAME); // consume the class arg
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs
index c5f09f1..37d204f 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs
@@ -1076,8 +1076,10 @@ namespace Lucene.Net.Analysis.Util
         /// </summary>
         private class EmptyCharArrayMap<V> : UnmodifiableCharArrayMap<V>
         {
-            public EmptyCharArrayMap() 
+            public EmptyCharArrayMap()
+#pragma warning disable 612, 618
                 : base(new CharArrayMap<V>(LuceneVersion.LUCENE_CURRENT, 0, false))
+#pragma warning restore 612, 618
             {
             }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs
index 4193ec1..40f5b79 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs
@@ -46,7 +46,9 @@ namespace Lucene.Net.Analysis.Util
         ///         <seealso cref="LuceneVersion"/> instance. </returns>
         public static CharacterUtils GetInstance(LuceneVersion matchVersion)
         {
+#pragma warning disable 612, 618
             return matchVersion.OnOrAfter(LuceneVersion.LUCENE_31) ? JAVA_5 : JAVA_4;
+#pragma warning restore 612, 618
         }
 
         /// <summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Util/FilteringTokenFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/FilteringTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/FilteringTokenFilter.cs
index 6fa4ea6..386dadf 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/FilteringTokenFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/FilteringTokenFilter.cs
@@ -36,7 +36,10 @@ namespace Lucene.Net.Analysis.Util
 
         private static void CheckPositionIncrement(LuceneVersion version, bool enablePositionIncrements)
         {
-            if (!enablePositionIncrements && version.OnOrAfter(LuceneVersion.LUCENE_44))
+            if (!enablePositionIncrements &&
+#pragma warning disable 612, 618
+                version.OnOrAfter(LuceneVersion.LUCENE_44))
+#pragma warning restore 612, 618
             {
                 throw new System.ArgumentException("enablePositionIncrements=false is not supported anymore as of Lucene 4.4 as it can create broken token streams");
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Util/StopwordAnalyzerBase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/StopwordAnalyzerBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/StopwordAnalyzerBase.cs
index 3be0a67..9660346 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/StopwordAnalyzerBase.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/StopwordAnalyzerBase.cs
@@ -98,9 +98,11 @@ namespace Lucene.Net.Analysis.Util
             TextReader reader = null;
             try
             {
-                //reader = IOUtils.GetDecodingReader(aClass.GetResourceAsStream(resource), StandardCharsets.UTF_8);
                 reader = IOUtils.GetDecodingReader(aClass.Assembly.GetManifestResourceStream(resource), Encoding.UTF8);
-                return WordlistLoader.GetWordSet(reader, comment, new CharArraySet(LuceneVersion.LUCENE_CURRENT, 16, ignoreCase));
+                return WordlistLoader.GetWordSet(reader, comment, new CharArraySet(
+#pragma warning disable 612, 618
+                    LuceneVersion.LUCENE_CURRENT, 16, ignoreCase));
+#pragma warning restore 612, 618
             }
             finally
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerImpl.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerImpl.cs b/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerImpl.cs
index db2a5bb..5b7f987 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerImpl.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerImpl.cs
@@ -426,6 +426,7 @@ namespace Lucene.Net.Analysis.Wikipedia
         /** the number of characters up to the start of the matched text */
         private int yychar;
 
+#pragma warning disable 169, 414
         /**
          * the number of characters from the last newline up to the start of the 
          * matched text
@@ -443,6 +444,8 @@ namespace Lucene.Net.Analysis.Wikipedia
         /** denotes if the user-EOF-code has already been executed */
         private bool zzEOFDone;
 
+#pragma warning disable 169, 414
+
         /* user code: */
 
         public static readonly int ALPHANUM = WikipediaTokenizer.ALPHANUM_ID;
@@ -734,7 +737,7 @@ namespace Lucene.Net.Analysis.Wikipedia
             {
                 message = ZZ_ERROR_MSG[errorCode];
             }
-            catch (IndexOutOfRangeException e)
+            catch (IndexOutOfRangeException /*e*/)
             {
                 message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/KpStemmer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/KpStemmer.cs b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/KpStemmer.cs
index db4ad4a..2afe8fe 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/KpStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/KpStemmer.cs
@@ -1845,7 +1845,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 // setmark p2, line 206
                 I_p2 = cursor;
             } while (false);
-            lab0:
+            //lab0: // not referenced
             cursor = v_1;
             // do, line 208
             v_2 = cursor;
@@ -2136,7 +2136,7 @@ namespace Lucene.Net.Tartarus.Snowball.Ext
                 }
                 replab2:;
             } while (false);
-            lab1:
+            //lab1: // not referenced
             cursor = v_2;
             // call measure, line 221
             if (!r_measure())

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/PorterStemmer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/PorterStemmer.cs b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/PorterStemmer.cs
index 265fd1c..8b7381c 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/PorterStemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Ext/PorterStemmer.cs
@@ -747,7 +747,7 @@
                 }
                 replab2:;
             } while (false);
-            lab1:
+            //lab1: // not referenced
             cursor = v_2;
             I_p1 = limit;
             I_p2 = limit;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/SnowballProgram.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/SnowballProgram.cs b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/SnowballProgram.cs
index 8f183f7..8c75fd9 100644
--- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/SnowballProgram.cs
+++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/SnowballProgram.cs
@@ -333,15 +333,15 @@ namespace Lucene.Net.Tartarus.Snowball
                     bool res;
                     try
                     {
-                        Object resobj = w.method.Invoke(w.methodobject, EMPTY_ARGS);
+                        object resobj = w.method.Invoke(w.methodobject, EMPTY_ARGS);
                         res = resobj.ToString().Equals("true");
                     }
-                    catch (TargetInvocationException e)
+                    catch (TargetInvocationException /*e*/)
                     {
                         res = false;
                         // FIXME - debug message
                     }
-                    catch (Exception e)
+                    catch (Exception /*e*/)
                     {
                         res = false;
                         // FIXME - debug message
@@ -418,12 +418,12 @@ namespace Lucene.Net.Tartarus.Snowball
                         object resobj = w.method.Invoke(w.methodobject, EMPTY_ARGS);
                         res = resobj.ToString().Equals("true");
                     }
-                    catch (TargetInvocationException e)
+                    catch (TargetInvocationException /*e*/)
                     {
                         res = false;
                         // FIXME - debug message
                     }
-                    catch (Exception e)
+                    catch (Exception /*e*/)
                     {
                         res = false;
                         // FIXME - debug message

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicNormalizationFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicNormalizationFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicNormalizationFilter.cs
index 6f7c0f7..d8bfabe 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicNormalizationFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicNormalizationFilter.cs
@@ -113,7 +113,9 @@ namespace Lucene.Net.Analysis.Ar
 
         private void Check(string input, string expected)
         {
+#pragma warning disable 612, 618
             ArabicLetterTokenizer tokenStream = new ArabicLetterTokenizer(TEST_VERSION_CURRENT, new StringReader(input));
+#pragma warning restore 612, 618
             ArabicNormalizationFilter filter = new ArabicNormalizationFilter(tokenStream);
             AssertTokenStreamContents(filter, new string[] { expected });
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicStemFilter.cs
index 171988f..d54cf7c 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ar/TestArabicStemFilter.cs
@@ -161,7 +161,9 @@ namespace Lucene.Net.Analysis.Ar
         {
             CharArraySet set = new CharArraySet(TEST_VERSION_CURRENT, 1, true);
             set.add("\u0633\u0627\u0647\u062f\u0647\u0627\u062a");
+#pragma warning disable 612, 618
             ArabicLetterTokenizer tokenStream = new ArabicLetterTokenizer(TEST_VERSION_CURRENT, new StringReader("\u0633\u0627\u0647\u062f\u0647\u0627\u062a"));
+#pragma warning restore 612, 618
 
             ArabicStemFilter filter = new ArabicStemFilter(new SetKeywordMarkerFilter(tokenStream, set));
             AssertTokenStreamContents(filter, new string[] { "\u0633\u0627\u0647\u062f\u0647\u0627\u062a" });
@@ -169,7 +171,9 @@ namespace Lucene.Net.Analysis.Ar
 
         private void Check(string input, string expected)
         {
+#pragma warning disable 612, 618
             ArabicLetterTokenizer tokenStream = new ArabicLetterTokenizer(TEST_VERSION_CURRENT, new StringReader(input));
+#pragma warning restore 612, 618
             ArabicStemFilter filter = new ArabicStemFilter(tokenStream);
             AssertTokenStreamContents(filter, new string[] { expected });
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAllAnalyzersHaveFactories.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAllAnalyzersHaveFactories.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAllAnalyzersHaveFactories.cs
index 5bf0429..4fa48b9 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAllAnalyzersHaveFactories.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAllAnalyzersHaveFactories.cs
@@ -50,7 +50,9 @@ namespace Lucene.Net.Analysis.Core
                 // LUCENENET: Added this specialized BufferedCharFilter which doesn't need a factory
                 typeof(BufferedCharFilter)
             });
+#pragma warning disable 612, 618
             deprecatedDuplicatedComponents.addAll(new Type[] { typeof(DutchStemFilter), typeof(FrenchStemFilter), typeof(IndicTokenizer) });
+#pragma warning restore 612, 618
             oddlyNamedComponents.addAll(new Type[] { typeof(ReversePathHierarchyTokenizer), typeof(SnowballFilter), typeof(PatternKeywordMarkerFilter), typeof(SetKeywordMarkerFilter) }); // this is called SnowballPorterFilterFactory -  this is supported via an option to PathHierarchyTokenizer's factory
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAnalyzers.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAnalyzers.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAnalyzers.cs
index a75744d..7004614 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAnalyzers.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAnalyzers.cs
@@ -98,6 +98,7 @@ namespace Lucene.Net.Analysis.Core
         // StandardAnalyzer constants remain publicly accessible
         public virtual void _TestStandardConstants()
         {
+#pragma warning disable 219, 612, 618
             int x = StandardTokenizer.ALPHANUM;
             x = StandardTokenizer.APOSTROPHE;
             x = StandardTokenizer.ACRONYM;
@@ -107,6 +108,7 @@ namespace Lucene.Net.Analysis.Core
             x = StandardTokenizer.NUM;
             x = StandardTokenizer.CJ;
             string[] y = StandardTokenizer.TOKEN_TYPES;
+#pragma warning restore 219, 612, 618
         }
 
         private class LowerCaseWhitespaceAnalyzer : Analyzer

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestClassicAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestClassicAnalyzer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestClassicAnalyzer.cs
index 14b6998..0dc6757 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestClassicAnalyzer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestClassicAnalyzer.cs
@@ -154,7 +154,9 @@ namespace Lucene.Net.Analysis.Core
 
             // 2.4 should not show the bug. But, alas, it's also obsolete,
             // so we check latest released (Robert's gonna break this on 4.0 soon :) )
+#pragma warning disable 612, 618
             a2 = new ClassicAnalyzer(LuceneVersion.LUCENE_31);
+#pragma warning restore 612, 618
             AssertAnalyzesTo(a2, "www.nutch.org.", new string[] { "www.nutch.org" }, new string[] { "<HOST>" });
         }
 
@@ -267,7 +269,9 @@ namespace Lucene.Net.Analysis.Core
         [Test]
         public virtual void TestJava14BWCompatibility()
         {
+#pragma warning disable 612, 618
             ClassicAnalyzer sa = new ClassicAnalyzer(LuceneVersion.LUCENE_30);
+#pragma warning restore 612, 618
             AssertAnalyzesTo(sa, "test\u02C6test", new string[] { "test", "test" });
         }
 
@@ -298,8 +302,9 @@ namespace Lucene.Net.Analysis.Core
                     doc.Add(new TextField("content", "abc bbb ccc", Field.Store.NO));
                     writer.AddDocument(doc);
                 }
-
+#pragma warning disable 612, 618
                 using (IndexReader reader = IndexReader.Open(dir))
+#pragma warning restore 612, 618
                 {
 
                     // Make sure all terms < max size were indexed
@@ -331,7 +336,9 @@ namespace Lucene.Net.Analysis.Core
                 {
                     writer.AddDocument(doc);
                 }
+#pragma warning disable 612, 618
                 using (var reader = IndexReader.Open(dir))
+#pragma warning restore 612, 618
                 {
                     assertEquals(1, reader.DocFreq(new Term("content", bigTerm)));
                 }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
index 8da141f..fb2676a 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
@@ -281,7 +281,9 @@ namespace Lucene.Net.Analysis.Core
             { typeof(Regex[]), new RegexArrayArgProducer() },
             { typeof(IPayloadEncoder), new PayloadEncoderArgProducer() },
             { typeof(Dictionary), new DictionaryArgProducer() },
+            #pragma warning disable 612, 618 
             { typeof(Lucene43EdgeNGramTokenizer.Side), new Lucene43SideArgProducer() },
+            #pragma warning restore 612, 618 
             { typeof(EdgeNGramTokenFilter.Side), new SideArgProducer() },
             { typeof(HyphenationTree), new HyphenationTreeArgProducer() },
             { typeof(SnowballProgram), new SnowballProgramArgProducer() },
@@ -470,7 +472,9 @@ namespace Lucene.Net.Analysis.Core
                         catch (Exception ex)
                         {
                             throw ex;
+#pragma warning disable 162
                             return null; // unreachable code
+#pragma warning restore 162
                         }
                     }
                 }
@@ -482,8 +486,10 @@ namespace Lucene.Net.Analysis.Core
             public object Create(Random random)
             {
                 return random.nextBoolean()
+#pragma warning disable 612, 618
                     ? Lucene43EdgeNGramTokenizer.Side.FRONT
                     : Lucene43EdgeNGramTokenizer.Side.BACK;
+#pragma warning restore 612, 618
             }
         }
 
@@ -493,7 +499,9 @@ namespace Lucene.Net.Analysis.Core
             {
                 return random.nextBoolean()
                     ? EdgeNGramTokenFilter.Side.FRONT
+#pragma warning disable 612, 618
                     : EdgeNGramTokenFilter.Side.BACK;
+#pragma warning restore 612, 618
             }
         }
 
@@ -513,7 +521,9 @@ namespace Lucene.Net.Analysis.Core
                 catch (Exception ex)
                 {
                     throw ex;
+#pragma warning disable 162
                     return null; // unreachable code
+#pragma warning restore 162
                 }
             }
         }
@@ -531,7 +541,9 @@ namespace Lucene.Net.Analysis.Core
                 catch (Exception ex)
                 {
                     throw ex;
+#pragma warning disable 162
                     return null; // unreachable code
+#pragma warning restore 162
                 }
             }
         }
@@ -635,7 +647,9 @@ namespace Lucene.Net.Analysis.Core
                 catch (Exception ex)
                 {
                     throw ex;
+#pragma warning disable 162
                     return null; // unreachable code
+#pragma warning restore 162
                 }
             }
         }
@@ -657,7 +671,9 @@ namespace Lucene.Net.Analysis.Core
                 catch (Exception ex)
                 {
                     throw ex;
+#pragma warning disable 162
                     return null; // unreachable code
+#pragma warning restore 162
                 }
             }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStandardAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStandardAnalyzer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStandardAnalyzer.cs
index d99796c..55642b8 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStandardAnalyzer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStandardAnalyzer.cs
@@ -338,7 +338,9 @@ namespace Lucene.Net.Analysis.Core
 
             public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
+#pragma warning disable 612, 618
                 Tokenizer tokenizer = new StandardTokenizer(LuceneVersion.LUCENE_36, reader);
+#pragma warning restore 612, 618
                 return new TokenStreamComponents(tokenizer);
             }
         }
@@ -365,7 +367,9 @@ namespace Lucene.Net.Analysis.Core
 
             public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
+#pragma warning disable 612, 618
                 Tokenizer tokenizer = new StandardTokenizer(LuceneVersion.LUCENE_40, reader);
+#pragma warning restore 612, 618
                 return new TokenStreamComponents(tokenizer);
             }
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStopFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStopFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStopFilter.cs
index ea554f6..95108c6 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStopFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestStopFilter.cs
@@ -76,11 +76,13 @@ namespace Lucene.Net.Analysis.Core
             CharArraySet stopSet = StopFilter.MakeStopSet(TEST_VERSION_CURRENT, stopWords);
             // with increments
             StringReader reader = new StringReader(sb.ToString());
+#pragma warning disable 612, 618
             StopFilter stpf = new StopFilter(Version.LUCENE_40, new MockTokenizer(reader, MockTokenizer.WHITESPACE, false), stopSet);
             DoTestStopPositons(stpf, true);
             // without increments
             reader = new StringReader(sb.ToString());
             stpf = new StopFilter(Version.LUCENE_43, new MockTokenizer(reader, MockTokenizer.WHITESPACE, false), stopSet);
+#pragma warning restore 612, 618
             DoTestStopPositons(stpf, false);
             // with increments, concatenating two stop filters
             List<string> a0 = new List<string>();
@@ -222,7 +224,9 @@ namespace Lucene.Net.Analysis.Core
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenFilter filter = new MockSynonymFilter(outerInstance, tokenizer);
+#pragma warning disable 612, 618
                 StopFilter stopfilter = new StopFilter(Version.LUCENE_43, filter, StopAnalyzer.ENGLISH_STOP_WORDS_SET);
+#pragma warning restore 612, 618
                 stopfilter.EnablePositionIncrements = false;
                 return new TokenStreamComponents(tokenizer, stopfilter);
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestTypeTokenFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestTypeTokenFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestTypeTokenFilter.cs
index fd1f423..0da3198 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestTypeTokenFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestTypeTokenFilter.cs
@@ -34,7 +34,11 @@ namespace Lucene.Net.Analysis.Core
         {
             StringReader reader = new StringReader("121 is palindrome, while 123 is not");
             ISet<string> stopTypes = AsSet("<NUM>");
-            TokenStream stream = new TypeTokenFilter(TEST_VERSION_CURRENT, true, new StandardTokenizer(TEST_VERSION_CURRENT, reader), stopTypes);
+            TokenStream stream =
+#pragma warning disable 612, 618
+                new TypeTokenFilter(
+#pragma warning restore 612, 618
+                    TEST_VERSION_CURRENT, true, new StandardTokenizer(TEST_VERSION_CURRENT, reader), stopTypes);
             AssertTokenStreamContents(stream, new string[] { "is", "palindrome", "while", "is", "not" });
         }
 
@@ -68,7 +72,11 @@ namespace Lucene.Net.Analysis.Core
 
             // without increments
             reader = new StringReader(sb.ToString());
-            typeTokenFilter = new TypeTokenFilter(LuceneVersion.LUCENE_43, false, new StandardTokenizer(TEST_VERSION_CURRENT, reader), stopSet);
+            typeTokenFilter =
+#pragma warning disable 612, 618
+                new TypeTokenFilter(LuceneVersion.LUCENE_43, 
+#pragma warning restore 612, 618
+                    false, new StandardTokenizer(TEST_VERSION_CURRENT, reader), stopSet);
             TestPositons(typeTokenFilter);
 
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestUAX29URLEmailTokenizer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestUAX29URLEmailTokenizer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestUAX29URLEmailTokenizer.cs
index 005446d..c3a8daa 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestUAX29URLEmailTokenizer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestUAX29URLEmailTokenizer.cs
@@ -589,8 +589,9 @@ namespace Lucene.Net.Analysis.Core
 
             public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
-
+#pragma warning disable 612, 618
                 Tokenizer tokenizer = new UAX29URLEmailTokenizer(LuceneVersion.LUCENE_31, reader);
+#pragma warning restore 612, 618
                 return new TokenStreamComponents(tokenizer);
             }
         }
@@ -616,7 +617,9 @@ namespace Lucene.Net.Analysis.Core
 
             public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
+#pragma warning disable 612, 618
                 Tokenizer tokenizer = new UAX29URLEmailTokenizer(LuceneVersion.LUCENE_34, reader);
+#pragma warning restore 612, 618
                 return new TokenStreamComponents(tokenizer);
             }
         }
@@ -641,7 +644,9 @@ namespace Lucene.Net.Analysis.Core
 
             public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
+#pragma warning disable 612, 618
                 Tokenizer tokenizer = new UAX29URLEmailTokenizer(LuceneVersion.LUCENE_36, reader);
+#pragma warning restore 612, 618
                 return new TokenStreamComponents(tokenizer);
             }
         }
@@ -668,7 +673,9 @@ namespace Lucene.Net.Analysis.Core
 
             public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
+#pragma warning disable 612, 618
                 Tokenizer tokenizer = new UAX29URLEmailTokenizer(LuceneVersion.LUCENE_40, reader);
+#pragma warning restore 612, 618
                 return new TokenStreamComponents(tokenizer);
             }
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanAnalyzer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanAnalyzer.cs
index c665d54..c065b9a 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanAnalyzer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/De/TestGermanAnalyzer.cs
@@ -63,7 +63,9 @@ namespace Lucene.Net.Analysis.De
             CheckOneTerm(a, "Schaltfl�chen", "schaltflach");
             CheckOneTerm(a, "Schaltflaechen", "schaltflach");
             // here they are with the old stemmer
+#pragma warning disable 612, 618
             a = new GermanAnalyzer(LuceneVersion.LUCENE_30);
+#pragma warning restore 612, 618
             CheckOneTerm(a, "Schaltfl�chen", "schaltflach");
             CheckOneTerm(a, "Schaltflaechen", "schaltflaech");
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianNormalizationFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianNormalizationFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianNormalizationFilter.cs
index cd62048..66faa65 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianNormalizationFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fa/TestPersianNormalizationFilter.cs
@@ -67,7 +67,9 @@ namespace Lucene.Net.Analysis.Fa
 
         private void Check(string input, string expected)
         {
+#pragma warning disable 612, 618
             ArabicLetterTokenizer tokenStream = new ArabicLetterTokenizer(TEST_VERSION_CURRENT, new StringReader(input));
+#pragma warning restore 612, 618
             PersianNormalizationFilter filter = new PersianNormalizationFilter(tokenStream);
             AssertTokenStreamContents(filter, new string[] { expected });
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchAnalyzer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchAnalyzer.cs
index f8ef708..4df4bb4 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchAnalyzer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Fr/TestFrenchAnalyzer.cs
@@ -166,7 +166,9 @@ namespace Lucene.Net.Analysis.Fr
         [Test]
         public virtual void TestStopwordsCasing()
         {
+#pragma warning disable 612, 618
             FrenchAnalyzer a = new FrenchAnalyzer(LuceneVersion.LUCENE_31);
+#pragma warning restore 612, 618
             AssertAnalyzesTo(a, "Votre", new string[] { });
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/It/TestItalianAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/It/TestItalianAnalyzer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/It/TestItalianAnalyzer.cs
index 5be2b1e..da097a2 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/It/TestItalianAnalyzer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/It/TestItalianAnalyzer.cs
@@ -80,7 +80,9 @@ namespace Lucene.Net.Analysis.It
         [Test]
         public virtual void TestContractionsBackwards()
         {
+#pragma warning disable 612, 618
             Analyzer a = new ItalianAnalyzer(LuceneVersion.LUCENE_31);
+#pragma warning restore 612, 618
             AssertAnalyzesTo(a, "dell'Italia", new string[] { "dell'ital" });
             AssertAnalyzesTo(a, "l'Italiano", new string[] { "l'ital" });
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/PatternAnalyzerTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/PatternAnalyzerTest.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/PatternAnalyzerTest.cs
index 85a9632..6a42d93 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/PatternAnalyzerTest.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/PatternAnalyzerTest.cs
@@ -27,6 +27,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
     /// <summary>
     /// Verifies the behavior of PatternAnalyzer.
     /// </summary>
+#pragma warning disable 612, 618
     public class PatternAnalyzerTest : BaseTokenStreamTestCase
     {
 
@@ -140,4 +141,5 @@ namespace Lucene.Net.Analysis.Miscellaneous
             CheckRandomData(Random(), a, 10000 * RANDOM_MULTIPLIER);
         }
     }
+#pragma warning restore 612, 618
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestKeepWordFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestKeepWordFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestKeepWordFilter.cs
index 1aae3d0..331c8d4 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestKeepWordFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestKeepWordFilter.cs
@@ -49,12 +49,14 @@ namespace Lucene.Net.Analysis.Miscellaneous
 
             // Test Stopwords
             stream = new MockTokenizer(new StringReader(input), MockTokenizer.WHITESPACE, false);
+#pragma warning disable 612, 618
             stream = new KeepWordFilter(LuceneVersion.LUCENE_43, false, stream, new CharArraySet(TEST_VERSION_CURRENT, words, true));
             AssertTokenStreamContents(stream, new string[] { "aaa", "BBB" }, new int[] { 1, 1 });
 
             // Now force case
             stream = new MockTokenizer(new StringReader(input), MockTokenizer.WHITESPACE, false);
             stream = new KeepWordFilter(LuceneVersion.LUCENE_43, false, stream, new CharArraySet(TEST_VERSION_CURRENT, words, false));
+#pragma warning restore 612, 618
             AssertTokenStreamContents(stream, new string[] { "aaa" }, new int[] { 1 });
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLengthFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLengthFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLengthFilter.cs
index b269fed..43eb450 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLengthFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLengthFilter.cs
@@ -30,7 +30,9 @@ namespace Lucene.Net.Analysis.Miscellaneous
         public virtual void TestFilterNoPosIncr()
         {
             TokenStream stream = new MockTokenizer(new StringReader("short toolong evenmuchlongertext a ab toolong foo"), MockTokenizer.WHITESPACE, false);
+#pragma warning disable 612, 618
             LengthFilter filter = new LengthFilter(LuceneVersion.LUCENE_43, false, stream, 2, 6);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(filter, new string[] { "short", "ab", "foo" }, new int[] { 1, 1, 1 });
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLengthFilterFactory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLengthFilterFactory.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLengthFilterFactory.cs
index 9dda1d2..2d3d08f 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLengthFilterFactory.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLengthFilterFactory.cs
@@ -31,7 +31,10 @@ namespace Lucene.Net.Analysis.Miscellaneous
         {
             Reader reader = new StringReader("foo foobar super-duper-trooper");
             TokenStream stream = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
-            stream = TokenFilterFactory("Length", LuceneVersion.LUCENE_43, new ClasspathResourceLoader(this.GetType()), "min", "4", "max", "10", "enablePositionIncrements", "false").Create(stream);
+#pragma warning disable 612, 618
+            stream = TokenFilterFactory("Length", LuceneVersion.LUCENE_43,
+#pragma warning restore 612, 618
+                new ClasspathResourceLoader(this.GetType()), "min", "4", "max", "10", "enablePositionIncrements", "false").Create(stream);
             AssertTokenStreamContents(stream, new string[] { "foobar" }, new int[] { 1 });
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLucene47WordDelimiterFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLucene47WordDelimiterFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLucene47WordDelimiterFilter.cs
index c93ff09..8bb8cd1 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLucene47WordDelimiterFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestLucene47WordDelimiterFilter.cs
@@ -179,7 +179,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
             internal ICharTermAttribute termAtt;
             internal IPositionIncrementAttribute posIncAtt;
 
-            protected internal LargePosIncTokenFilter(TestLucene47WordDelimiterFilter outerInstance, TokenStream input) : base(input)
+            public LargePosIncTokenFilter(TestLucene47WordDelimiterFilter outerInstance, TokenStream input) : base(input)
             {
                 this.outerInstance = outerInstance;
                 this.termAtt = AddAttribute<ICharTermAttribute>();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestTrimFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestTrimFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestTrimFilter.cs
index 7805df9..5a7f614 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestTrimFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestTrimFilter.cs
@@ -25,7 +25,7 @@ namespace Lucene.Net.Analysis.Miscellaneous
      * See the License for the specific language governing permissions and
      * limitations under the License.
      */
-
+#pragma warning disable 612, 618
     public class TestTrimFilter : BaseTokenStreamTestCase
     {
 
@@ -173,4 +173,5 @@ namespace Lucene.Net.Analysis.Miscellaneous
             }
         }
     }
+#pragma warning restore 612, 618
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/EdgeNGramTokenFilterTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/EdgeNGramTokenFilterTest.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/EdgeNGramTokenFilterTest.cs
index 5186cd3..c8bd552 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/EdgeNGramTokenFilterTest.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/EdgeNGramTokenFilterTest.cs
@@ -46,7 +46,9 @@ namespace Lucene.Net.Analysis.Ngram
             bool gotException = false;
             try
             {
+#pragma warning disable 612, 618
                 new EdgeNGramTokenFilter(TEST_VERSION_CURRENT, input, EdgeNGramTokenFilter.Side.FRONT, 0, 0);
+#pragma warning restore 612, 618
             }
             catch (System.ArgumentException)
             {
@@ -61,7 +63,9 @@ namespace Lucene.Net.Analysis.Ngram
             bool gotException = false;
             try
             {
+#pragma warning disable 612, 618
                 new EdgeNGramTokenFilter(TEST_VERSION_CURRENT, input, EdgeNGramTokenFilter.Side.FRONT, 2, 1);
+#pragma warning restore 612, 618
             }
             catch (System.ArgumentException)
             {
@@ -76,7 +80,9 @@ namespace Lucene.Net.Analysis.Ngram
             bool gotException = false;
             try
             {
+#pragma warning disable 612, 618
                 new EdgeNGramTokenFilter(TEST_VERSION_CURRENT, input, EdgeNGramTokenFilter.Side.FRONT, -1, 2);
+#pragma warning restore 612, 618
             }
             catch (System.ArgumentException)
             {
@@ -88,35 +94,45 @@ namespace Lucene.Net.Analysis.Ngram
         [Test]
         public virtual void TestFrontUnigram()
         {
+#pragma warning disable 612, 618
             EdgeNGramTokenFilter tokenizer = new EdgeNGramTokenFilter(TEST_VERSION_CURRENT, input, EdgeNGramTokenFilter.Side.FRONT, 1, 1);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(tokenizer, new string[] { "a" }, new int[] { 0 }, new int[] { 5 });
         }
 
         [Test]
         public virtual void TestBackUnigram()
         {
+#pragma warning disable 612, 618
             EdgeNGramTokenFilter tokenizer = new EdgeNGramTokenFilter(LuceneVersion.LUCENE_43, input, EdgeNGramTokenFilter.Side.BACK, 1, 1);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(tokenizer, new string[] { "e" }, new int[] { 4 }, new int[] { 5 });
         }
 
         [Test]
         public virtual void TestOversizedNgrams()
         {
+#pragma warning disable 612, 618
             EdgeNGramTokenFilter tokenizer = new EdgeNGramTokenFilter(TEST_VERSION_CURRENT, input, EdgeNGramTokenFilter.Side.FRONT, 6, 6);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(tokenizer, new string[0], new int[0], new int[0]);
         }
 
         [Test]
         public virtual void TestFrontRangeOfNgrams()
         {
+#pragma warning disable 612, 618
             EdgeNGramTokenFilter tokenizer = new EdgeNGramTokenFilter(TEST_VERSION_CURRENT, input, EdgeNGramTokenFilter.Side.FRONT, 1, 3);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(tokenizer, new string[] { "a", "ab", "abc" }, new int[] { 0, 0, 0 }, new int[] { 5, 5, 5 });
         }
 
         [Test]
         public virtual void TestBackRangeOfNgrams()
         {
+#pragma warning disable 612, 618
             EdgeNGramTokenFilter tokenizer = new EdgeNGramTokenFilter(LuceneVersion.LUCENE_43, input, EdgeNGramTokenFilter.Side.BACK, 1, 3);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(tokenizer, new string[] { "e", "de", "cde" }, new int[] { 4, 3, 2 }, new int[] { 5, 5, 5 }, null, null, null, null, false);
         }
 
@@ -124,7 +140,9 @@ namespace Lucene.Net.Analysis.Ngram
         public virtual void TestFilterPositions()
         {
             TokenStream ts = new MockTokenizer(new StringReader("abcde vwxyz"), MockTokenizer.WHITESPACE, false);
+#pragma warning disable 612, 618
             EdgeNGramTokenFilter tokenizer = new EdgeNGramTokenFilter(TEST_VERSION_CURRENT, ts, EdgeNGramTokenFilter.Side.FRONT, 1, 3);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(tokenizer, new string[] { "a", "ab", "abc", "v", "vw", "vwx" }, new int[] { 0, 0, 0, 6, 6, 6 }, new int[] { 5, 5, 5, 11, 11, 11 }, null, new int[] { 1, 0, 0, 1, 0, 0 }, null, null, false);
         }
 
@@ -171,7 +189,9 @@ namespace Lucene.Net.Analysis.Ngram
         {
             TokenStream ts = new MockTokenizer(new StringReader("a abc"), MockTokenizer.WHITESPACE, false);
             ts = new PositionFilter(ts); // All but first token will get 0 position increment
+#pragma warning disable 612, 618
             EdgeNGramTokenFilter filter = new EdgeNGramTokenFilter(TEST_VERSION_CURRENT, ts, EdgeNGramTokenFilter.Side.FRONT, 2, 3);
+#pragma warning restore 612, 618
             // The first token "a" will not be output, since it's smaller than the mingram size of 2.
             // The second token on input to EdgeNGramTokenFilter will have position increment of 0,
             // which should be increased to 1, since this is the first output token in the stream.
@@ -182,7 +202,9 @@ namespace Lucene.Net.Analysis.Ngram
         public virtual void TestSmallTokenInStream()
         {
             input = new MockTokenizer(new StringReader("abc de fgh"), MockTokenizer.WHITESPACE, false);
+#pragma warning disable 612, 618
             EdgeNGramTokenFilter tokenizer = new EdgeNGramTokenFilter(TEST_VERSION_CURRENT, input, EdgeNGramTokenFilter.Side.FRONT, 3, 3);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(tokenizer, new string[] { "abc", "fgh" }, new int[] { 0, 7 }, new int[] { 3, 10 });
         }
 
@@ -190,7 +212,9 @@ namespace Lucene.Net.Analysis.Ngram
         public virtual void TestReset()
         {
             WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader("abcde"));
+#pragma warning disable 612, 618
             EdgeNGramTokenFilter filter = new EdgeNGramTokenFilter(TEST_VERSION_CURRENT, tokenizer, EdgeNGramTokenFilter.Side.FRONT, 1, 3);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(filter, new string[] { "a", "ab", "abc" }, new int[] { 0, 0, 0 }, new int[] { 5, 5, 5 });
             tokenizer.Reader = new StringReader("abcde");
             AssertTokenStreamContents(filter, new string[] { "a", "ab", "abc" }, new int[] { 0, 0, 0 }, new int[] { 5, 5, 5 });
@@ -220,7 +244,9 @@ namespace Lucene.Net.Analysis.Ngram
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenFilter filters = new ASCIIFoldingFilter(tokenizer);
+#pragma warning disable 612, 618
                 filters = new EdgeNGramTokenFilter(LuceneVersion.LUCENE_43, filters, EdgeNGramTokenFilter.Side.FRONT, 2, 15);
+#pragma warning restore 612, 618
                 return new TokenStreamComponents(tokenizer, filters);
             }
         }
@@ -276,7 +302,9 @@ namespace Lucene.Net.Analysis.Ngram
             public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+#pragma warning disable 612, 618
                 return new TokenStreamComponents(tokenizer, new EdgeNGramTokenFilter(LuceneVersion.LUCENE_43, tokenizer, EdgeNGramTokenFilter.Side.BACK, 2, 4));
+#pragma warning restore 612, 618 
             }
         }
 
@@ -303,7 +331,9 @@ namespace Lucene.Net.Analysis.Ngram
             public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
+#pragma warning disable 612, 618
                 return new TokenStreamComponents(tokenizer, new EdgeNGramTokenFilter(TEST_VERSION_CURRENT, tokenizer, EdgeNGramTokenFilter.Side.FRONT, 2, 15));
+#pragma warning restore 612, 618
             }
         }
 
@@ -319,7 +349,9 @@ namespace Lucene.Net.Analysis.Ngram
             public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
+#pragma warning disable 612, 618
                 return new TokenStreamComponents(tokenizer, new EdgeNGramTokenFilter(LuceneVersion.LUCENE_43, tokenizer, EdgeNGramTokenFilter.Side.BACK, 2, 15));
+#pragma warning restore 612, 618
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/EdgeNGramTokenizerTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/EdgeNGramTokenizerTest.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/EdgeNGramTokenizerTest.cs
index 992a9dc..71183c2 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/EdgeNGramTokenizerTest.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/EdgeNGramTokenizerTest.cs
@@ -92,7 +92,9 @@ namespace Lucene.Net.Analysis.Ngram
         [Test]
         public virtual void TestBackUnigram()
         {
+#pragma warning disable 612, 618
             Tokenizer tokenizer = new Lucene43EdgeNGramTokenizer(Version.LUCENE_43, input, Lucene43EdgeNGramTokenizer.Side.BACK, 1, 1);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(tokenizer, new string[] { "e" }, new int[] { 4 }, new int[] { 5 }, 5); // abcde
         }
 
@@ -113,7 +115,9 @@ namespace Lucene.Net.Analysis.Ngram
         [Test]
         public virtual void TestBackRangeOfNgrams()
         {
+#pragma warning disable 612, 618
             Tokenizer tokenizer = new Lucene43EdgeNGramTokenizer(Version.LUCENE_43, input, Lucene43EdgeNGramTokenizer.Side.BACK, 1, 3);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(tokenizer, new string[] { "e", "de", "cde" }, new int[] { 4, 3, 2 }, new int[] { 5, 5, 5 }, null, null, null, 5, false); // abcde
         }
 
@@ -178,7 +182,9 @@ namespace Lucene.Net.Analysis.Ngram
 
             public override TokenStreamComponents CreateComponents(string fieldName, Reader reader)
             {
+#pragma warning disable 612, 618
                 Tokenizer tokenizer = new Lucene43EdgeNGramTokenizer(Version.LUCENE_43, reader, Lucene43EdgeNGramTokenizer.Side.BACK, 2, 4);
+#pragma warning restore 612, 618
                 return new TokenStreamComponents(tokenizer, tokenizer);
             }
         }
@@ -186,7 +192,9 @@ namespace Lucene.Net.Analysis.Ngram
         [Test]
         public virtual void TestTokenizerPositions()
         {
+#pragma warning disable 612, 618
             Tokenizer tokenizer = new Lucene43EdgeNGramTokenizer(Version.LUCENE_43, input, Lucene43EdgeNGramTokenizer.Side.FRONT, 1, 3);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(tokenizer, new string[] { "a", "ab", "abc" }, new int[] { 0, 0, 0 }, new int[] { 1, 2, 3 }, null, new int[] { 1, 0, 0 }, null, null, false);
 
             tokenizer = new EdgeNGramTokenizer(TEST_VERSION_CURRENT, new StringReader("abcde"), 1, 3);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/NGramTokenFilterTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/NGramTokenFilterTest.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/NGramTokenFilterTest.cs
index e4e8032..b6bf723 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/NGramTokenFilterTest.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/NGramTokenFilterTest.cs
@@ -213,7 +213,9 @@ namespace Lucene.Net.Analysis.Ngram
         [Test]
         public virtual void TestLucene43()
         {
+#pragma warning disable 612, 618
             NGramTokenFilter filter = new NGramTokenFilter(LuceneVersion.LUCENE_43, input, 2, 3);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(filter, new string[] { "ab", "bc", "cd", "de", "abc", "bcd", "cde" }, new int[] { 0, 1, 2, 3, 0, 1, 2 }, new int[] { 2, 3, 4, 5, 3, 4, 5 }, null, new int[] { 1, 1, 1, 1, 1, 1, 1 }, null, null, false);
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/da2ae4c8/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/TestNGramFilters.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/TestNGramFilters.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/TestNGramFilters.cs
index 5880b12..b21ddc2 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/TestNGramFilters.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Ngram/TestNGramFilters.cs
@@ -103,7 +103,9 @@ namespace Lucene.Net.Analysis.Ngram
         public virtual void TestEdgeNGramTokenizer3()
         {
             Reader reader = new StringReader("ready");
+#pragma warning disable 612, 618
             TokenStream stream = TokenizerFactory("EdgeNGram", LuceneVersion.LUCENE_43, "side", "back").Create(reader);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(stream, new string[] { "y" });
         }
 
@@ -139,7 +141,9 @@ namespace Lucene.Net.Analysis.Ngram
         {
             Reader reader = new StringReader("ready");
             TokenStream stream = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+#pragma warning disable 612, 618
             stream = TokenFilterFactory("EdgeNGram", LuceneVersion.LUCENE_43, "side", "back").Create(stream);
+#pragma warning restore 612, 618
             AssertTokenStreamContents(stream, new string[] { "y" });
         }