You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2014/01/17 18:23:44 UTC

svn commit: r1559196 [15/19] - in /lucene/dev/branches/lucene5376: ./ dev-tools/ dev-tools/idea/solr/contrib/morphlines-cell/ dev-tools/maven/lucene/facet/ lucene/ lucene/analysis/ lucene/analysis/common/ lucene/analysis/common/src/java/org/apache/luce...

Modified: lucene/dev/branches/lucene5376/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/Token.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/Token.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/Token.java (original)
+++ lucene/dev/branches/lucene5376/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/Token.java Fri Jan 17 17:23:33 2014
@@ -128,4 +128,4 @@ public class Token implements java.io.Se
   }
 
 }
-/* JavaCC - OriginalChecksum=f2df701e24da1cf2d025118ce6efdd2f (do not edit this line) */
+/* JavaCC - OriginalChecksum=db38f23b3674db52ff034369707a0ac3 (do not edit this line) */

Modified: lucene/dev/branches/lucene5376/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/TokenMgrError.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/TokenMgrError.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/TokenMgrError.java (original)
+++ lucene/dev/branches/lucene5376/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/TokenMgrError.java Fri Jan 17 17:23:33 2014
@@ -48,7 +48,7 @@ public class TokenMgrError extends Error
    * equivalents in the given string
    */
   protected static final String addEscapes(String str) {
-    StringBuffer retval = new StringBuffer();
+    StringBuilder retval = new StringBuilder();
     char ch;
     for (int i = 0; i < str.length(); i++) {
       switch (str.charAt(i))
@@ -144,4 +144,4 @@ public class TokenMgrError extends Error
     this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason);
   }
 }
-/* JavaCC - OriginalChecksum=8c69a370d9a9893140562c8bb911678c (do not edit this line) */
+/* JavaCC - OriginalChecksum=dcdd5ccde13b91bcd8f76a86ca618852 (do not edit this line) */

Modified: lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/analyzing/TestAnalyzingQueryParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/analyzing/TestAnalyzingQueryParser.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/analyzing/TestAnalyzingQueryParser.java (original)
+++ lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/analyzing/TestAnalyzingQueryParser.java Fri Jan 17 17:23:33 2014
@@ -253,8 +253,8 @@ public class TestAnalyzingQueryParser ex
 
   final static class ASCIIAnalyzer extends Analyzer {
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+    public TokenStreamComponents createComponents(String fieldName) {
+      Tokenizer result = new MockTokenizer(MockTokenizer.WHITESPACE, true);
       return new TokenStreamComponents(result, new FoldingFilter(result));
     }
   }

Modified: lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiAnalyzer.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiAnalyzer.java (original)
+++ lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiAnalyzer.java Fri Jan 17 17:23:33 2014
@@ -125,8 +125,8 @@ public class TestMultiAnalyzer extends B
   private class MultiAnalyzer extends Analyzer {
 
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+    public TokenStreamComponents createComponents(String fieldName) {
+      Tokenizer result = new MockTokenizer(MockTokenizer.WHITESPACE, true);
       return new TokenStreamComponents(result, new TestFilter(result));
     }
   }
@@ -196,8 +196,8 @@ public class TestMultiAnalyzer extends B
   private class PosIncrementAnalyzer extends Analyzer {
 
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+    public TokenStreamComponents createComponents(String fieldName) {
+      Tokenizer result = new MockTokenizer(MockTokenizer.WHITESPACE, true);
       return new TokenStreamComponents(result, new TestPosIncrementFilter(result));
     }
   }

Modified: lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiFieldQueryParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiFieldQueryParser.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiFieldQueryParser.java (original)
+++ lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiFieldQueryParser.java Fri Jan 17 17:23:33 2014
@@ -327,8 +327,8 @@ public class TestMultiFieldQueryParser e
     }
 
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      return stdAnalyzer.createComponents(fieldName, reader);
+    public TokenStreamComponents createComponents(String fieldName) {
+      return stdAnalyzer.createComponents(fieldName);
     }
   }
   

Modified: lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiPhraseQueryParsing.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiPhraseQueryParsing.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiPhraseQueryParsing.java (original)
+++ lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiPhraseQueryParsing.java Fri Jan 17 17:23:33 2014
@@ -48,8 +48,8 @@ public class TestMultiPhraseQueryParsing
     }
 
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      return new TokenStreamComponents(new CannedTokenizer(reader, tokens));
+    public TokenStreamComponents createComponents(String fieldName) {
+      return new TokenStreamComponents(new CannedTokenizer(tokens));
     }
   }
 
@@ -60,8 +60,8 @@ public class TestMultiPhraseQueryParsing
     private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
     private final PositionIncrementAttribute posIncrAtt = addAttribute(PositionIncrementAttribute.class);
 
-    public CannedTokenizer(Reader reader, TokenAndPos[] tokens) {
-      super(reader);
+    public CannedTokenizer(TokenAndPos[] tokens) {
+      super();
       this.tokens = tokens;
     }
 

Modified: lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestQueryParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestQueryParser.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestQueryParser.java (original)
+++ lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestQueryParser.java Fri Jan 17 17:23:33 2014
@@ -153,6 +153,7 @@ public class TestQueryParser extends Que
   //
   // This test is here as a safety, in case that ant step
   // doesn't work for some reason.
+  @SuppressWarnings("rawtype")
   public void testProtectedCtors() throws Exception {
     try {
       QueryParser.class.getConstructor(new Class[] {CharStream.class});
@@ -321,8 +322,8 @@ public class TestQueryParser extends Que
   /** adds synonym of "dog" for "dogs". */
   static class MockSynonymAnalyzer extends Analyzer {
     @Override
-    protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      MockTokenizer tokenizer = new MockTokenizer(reader);
+    protected TokenStreamComponents createComponents(String fieldName) {
+      MockTokenizer tokenizer = new MockTokenizer();
       return new TokenStreamComponents(tokenizer, new MockSynonymFilter(tokenizer));
     }
   }
@@ -391,8 +392,8 @@ public class TestQueryParser extends Que
   
   static class MockCJKSynonymAnalyzer extends Analyzer {
     @Override
-    protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      Tokenizer tokenizer = new SimpleCJKTokenizer(reader);
+    protected TokenStreamComponents createComponents(String fieldName) {
+      Tokenizer tokenizer = new SimpleCJKTokenizer();
       return new TokenStreamComponents(tokenizer, new MockCJKSynonymFilter(tokenizer));
     }
   }

Modified: lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/precedence/TestPrecedenceQueryParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/precedence/TestPrecedenceQueryParser.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/precedence/TestPrecedenceQueryParser.java (original)
+++ lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/precedence/TestPrecedenceQueryParser.java Fri Jan 17 17:23:33 2014
@@ -130,8 +130,8 @@ public class TestPrecedenceQueryParser e
 
     /** Filters MockTokenizer with StopFilter. */
     @Override
-    public final TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+    public final TokenStreamComponents createComponents(String fieldName) {
+      Tokenizer tokenizer = new MockTokenizer( MockTokenizer.SIMPLE, true);
       return new TokenStreamComponents(tokenizer, new QPTestFilter(tokenizer));
     }
   }

Modified: lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiAnalyzerQPHelper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiAnalyzerQPHelper.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiAnalyzerQPHelper.java (original)
+++ lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiAnalyzerQPHelper.java Fri Jan 17 17:23:33 2014
@@ -146,8 +146,8 @@ public class TestMultiAnalyzerQPHelper e
   private class MultiAnalyzer extends Analyzer {
 
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+    public TokenStreamComponents createComponents(String fieldName) {
+      Tokenizer result = new MockTokenizer(MockTokenizer.WHITESPACE, true);
       return new TokenStreamComponents(result, new TestFilter(result));
     }
   }
@@ -213,8 +213,8 @@ public class TestMultiAnalyzerQPHelper e
   private class PosIncrementAnalyzer extends Analyzer {
 
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+    public TokenStreamComponents createComponents(String fieldName) {
+      Tokenizer result = new MockTokenizer(MockTokenizer.WHITESPACE, true);
       return new TokenStreamComponents(result, new TestPosIncrementFilter(result));
     }
   }

Modified: lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java (original)
+++ lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java Fri Jan 17 17:23:33 2014
@@ -363,8 +363,8 @@ public class TestMultiFieldQPHelper exte
     }
 
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      return stdAnalyzer.createComponents(fieldName, reader);
+    public TokenStreamComponents createComponents(String fieldName) {
+      return stdAnalyzer.createComponents(fieldName);
     }
   }
 

Modified: lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java (original)
+++ lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java Fri Jan 17 17:23:33 2014
@@ -147,8 +147,8 @@ public class TestQPHelper extends Lucene
 
     /** Filters MockTokenizer with StopFilter. */
     @Override
-    public final TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+    public final TokenStreamComponents createComponents(String fieldName) {
+      Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
       return new TokenStreamComponents(tokenizer, new QPTestFilter(tokenizer));
     }
   }
@@ -345,8 +345,8 @@ public class TestQPHelper extends Lucene
   private class SimpleCJKTokenizer extends Tokenizer {
     private CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
 
-    public SimpleCJKTokenizer(Reader input) {
-      super(input);
+    public SimpleCJKTokenizer() {
+      super();
     }
 
     @Override
@@ -362,8 +362,8 @@ public class TestQPHelper extends Lucene
 
   private class SimpleCJKAnalyzer extends Analyzer {
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      return new TokenStreamComponents(new SimpleCJKTokenizer(reader));
+    public TokenStreamComponents createComponents(String fieldName) {
+      return new TokenStreamComponents(new SimpleCJKTokenizer());
     }
   }
   
@@ -1267,8 +1267,8 @@ public class TestQPHelper extends Lucene
     private final PositionIncrementAttribute posIncr = addAttribute(PositionIncrementAttribute.class);
     private final CharTermAttribute term = addAttribute(CharTermAttribute.class);
 
-    public CannedTokenizer(Reader reader) {
-      super(reader);
+    public CannedTokenizer() {
+      super();
     }
 
     @Override
@@ -1303,8 +1303,8 @@ public class TestQPHelper extends Lucene
 
   private class CannedAnalyzer extends Analyzer {
     @Override
-    public TokenStreamComponents createComponents(String ignored, Reader alsoIgnored) {
-      return new TokenStreamComponents(new CannedTokenizer(alsoIgnored));
+    public TokenStreamComponents createComponents(String ignored) {
+      return new TokenStreamComponents(new CannedTokenizer());
     }
   }
 

Modified: lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestStandardQP.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestStandardQP.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestStandardQP.java (original)
+++ lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestStandardQP.java Fri Jan 17 17:23:33 2014
@@ -118,10 +118,8 @@ public class TestStandardQP extends Quer
     // TODO implement LUCENE-2566 and remove this (override)method
     Analyzer a = new Analyzer() {
       @Override
-      public TokenStreamComponents createComponents(String fieldName,
-          Reader reader) {
-        return new TokenStreamComponents(new MockTokenizer(reader,
-            MockTokenizer.WHITESPACE, false));
+      public TokenStreamComponents createComponents(String fieldName) {
+        return new TokenStreamComponents(new MockTokenizer(MockTokenizer.WHITESPACE, false));
       }
     };
     assertQueryEquals("a - b", a, "a -b");

Modified: lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/util/QueryParserTestBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/util/QueryParserTestBase.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/util/QueryParserTestBase.java (original)
+++ lucene/dev/branches/lucene5376/lucene/queryparser/src/test/org/apache/lucene/queryparser/util/QueryParserTestBase.java Fri Jan 17 17:23:33 2014
@@ -118,8 +118,8 @@ public abstract class QueryParserTestBas
 
     /** Filters MockTokenizer with StopFilter. */
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+    public TokenStreamComponents createComponents(String fieldName) {
+      Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
       return new TokenStreamComponents(tokenizer, new QPTestFilter(tokenizer));
     }
   }
@@ -250,8 +250,8 @@ public abstract class QueryParserTestBas
   protected static class SimpleCJKTokenizer extends Tokenizer {
     private CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
 
-    public SimpleCJKTokenizer(Reader input) {
-      super(input);
+    public SimpleCJKTokenizer() {
+      super();
     }
 
     @Override
@@ -267,8 +267,8 @@ public abstract class QueryParserTestBas
 
   private class SimpleCJKAnalyzer extends Analyzer {
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      return new TokenStreamComponents(new SimpleCJKTokenizer(reader));
+    public TokenStreamComponents createComponents(String fieldName) {
+      return new TokenStreamComponents(new SimpleCJKTokenizer());
     }
   }
 
@@ -403,8 +403,8 @@ public abstract class QueryParserTestBas
     // +,-,! should be directly adjacent to operand (i.e. not separated by whitespace) to be treated as an operator
     Analyzer a = new Analyzer() {
       @Override
-      public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-        return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
+      public TokenStreamComponents createComponents(String fieldName) {
+        return new TokenStreamComponents(new MockTokenizer(MockTokenizer.WHITESPACE, false));
       }
     };
     assertQueryEquals("a - b", a, "a - b");
@@ -558,6 +558,13 @@ public abstract class QueryParserTestBas
     assertQueryEquals("((stop))", qpAnalyzer, "");
     assertTrue(getQuery("term term term", qpAnalyzer) instanceof BooleanQuery);
     assertTrue(getQuery("term +stop", qpAnalyzer) instanceof TermQuery);
+    
+    CommonQueryParserConfiguration cqpc = getParserConfig(qpAnalyzer);
+    setDefaultOperatorAND(cqpc);
+    assertQueryEquals(cqpc, "field", "term phrase term",
+        "+term +(+phrase1 +phrase2) +term");
+    assertQueryEquals(cqpc, "field", "phrase",
+        "+phrase1 +phrase2");
   }
 
   public void testRange() throws Exception {
@@ -1133,8 +1140,8 @@ public abstract class QueryParserTestBas
       super();
     }
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+    public TokenStreamComponents createComponents(String fieldName) {
+      Tokenizer tokenizer = new MockTokenizer( MockTokenizer.WHITESPACE, true);
       return new TokenStreamComponents(tokenizer, new MockSynonymFilter(tokenizer));
     }
   }
@@ -1145,8 +1152,8 @@ public abstract class QueryParserTestBas
       super();
     }
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, true));
+    public TokenStreamComponents createComponents(String fieldName) {
+      return new TokenStreamComponents(new MockTokenizer(MockTokenizer.WHITESPACE, true));
     }
   }
   
@@ -1176,8 +1183,8 @@ public abstract class QueryParserTestBas
   }
   private class MockCollationAnalyzer extends Analyzer {
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+    public TokenStreamComponents createComponents(String fieldName) {
+      Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);
       return new TokenStreamComponents(tokenizer, new MockCollationFilter(tokenizer));
     }
   }

Modified: lucene/dev/branches/lucene5376/lucene/sandbox/src/java/org/apache/lucene/sandbox/queries/SlowCollatedStringComparator.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/sandbox/src/java/org/apache/lucene/sandbox/queries/SlowCollatedStringComparator.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/sandbox/src/java/org/apache/lucene/sandbox/queries/SlowCollatedStringComparator.java (original)
+++ lucene/dev/branches/lucene5376/lucene/sandbox/src/java/org/apache/lucene/sandbox/queries/SlowCollatedStringComparator.java Fri Jan 17 17:23:33 2014
@@ -44,6 +44,7 @@ public final class SlowCollatedStringCom
   private final String field;
   final Collator collator;
   private String bottom;
+  private String topValue;
   private final BytesRef tempBR = new BytesRef();
 
   public SlowCollatedStringComparator(int numHits, String field, Collator collator) {
@@ -105,6 +106,11 @@ public final class SlowCollatedStringCom
   }
 
   @Override
+  public void setTopValue(final String value) {
+    this.topValue = value;
+  }
+
+  @Override
   public String value(int slot) {
     return values[slot];
   }
@@ -124,7 +130,7 @@ public final class SlowCollatedStringCom
   }
 
   @Override
-  public int compareDocToValue(int doc, String value) {
+  public int compareTop(int doc) {
     currentDocTerms.get(doc, tempBR);
     final String docValue;
     if (tempBR.length == 0 && docsWithField.get(doc) == false) {
@@ -132,6 +138,6 @@ public final class SlowCollatedStringCom
     } else {
       docValue = tempBR.utf8ToString();
     }
-    return compareValues(docValue, value);
+    return compareValues(topValue, docValue);
   }
 }

Modified: lucene/dev/branches/lucene5376/lucene/server/src/java/org/apache/lucene/server/IndexState.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/server/src/java/org/apache/lucene/server/IndexState.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/server/src/java/org/apache/lucene/server/IndexState.java (original)
+++ lucene/dev/branches/lucene5376/lucene/server/src/java/org/apache/lucene/server/IndexState.java Fri Jan 17 17:23:33 2014
@@ -49,10 +49,10 @@ import org.apache.lucene.analysis.Analyz
 import org.apache.lucene.analysis.core.KeywordAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.expressions.Bindings;
-import org.apache.lucene.facet.CachedOrdinalsReader;
-import org.apache.lucene.facet.DocValuesOrdinalsReader;
 import org.apache.lucene.facet.FacetsConfig;
-import org.apache.lucene.facet.OrdinalsReader;
+import org.apache.lucene.facet.taxonomy.CachedOrdinalsReader;
+import org.apache.lucene.facet.taxonomy.DocValuesOrdinalsReader;
+import org.apache.lucene.facet.taxonomy.OrdinalsReader;
 import org.apache.lucene.facet.taxonomy.SearcherTaxonomyManager.SearcherAndTaxonomy;
 import org.apache.lucene.facet.taxonomy.SearcherTaxonomyManager;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;

Modified: lucene/dev/branches/lucene5376/lucene/server/src/java/org/apache/lucene/server/handlers/RegisterFieldHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/server/src/java/org/apache/lucene/server/handlers/RegisterFieldHandler.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/server/src/java/org/apache/lucene/server/handlers/RegisterFieldHandler.java (original)
+++ lucene/dev/branches/lucene5376/lucene/server/src/java/org/apache/lucene/server/handlers/RegisterFieldHandler.java Fri Jan 17 17:23:33 2014
@@ -455,9 +455,12 @@ public class RegisterFieldHandler extend
     }      
 
     if (type.equals("text")) {
+      if (sorted) {
+        f.fail("sort", "cannot sort text fields; use atom instead");
+      }
       ft.setIndexed(true);
       ft.setTokenized(true);
-      if (sorted || grouped) {
+      if (grouped) {
         ft.setDocValueType(DocValuesType.SORTED);
       } else if (dv) {
         ft.setDocValueType(DocValuesType.BINARY);
@@ -717,7 +720,7 @@ public class RegisterFieldHandler extend
    *  fields, solely for .getOffsetGap I think. */
   public final static Analyzer dummyAnalyzer = new Analyzer() {
       @Override
-      protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
+      protected TokenStreamComponents createComponents(String fieldName) {
         throw new UnsupportedOperationException();
       }
     };
@@ -732,7 +735,7 @@ public class RegisterFieldHandler extend
 
   final static Pattern COMMENTS_PATTERN = Pattern.compile("#.*$", Pattern.MULTILINE);
 
-  static TokenStreamComponents buildCustomAnalysisChain(Version matchVersion, Request chain, Reader reader) {
+  static TokenStreamComponents buildCustomAnalysisChain(Version matchVersion, Request chain) {
 
     Request t = chain.getStruct("tokenizer");
 
@@ -743,10 +746,10 @@ public class RegisterFieldHandler extend
     Tokenizer tokenizer;
     // nocommit use analysis factories
     if (pr.name.equals("StandardTokenizer")) {
-      tokenizer = new StandardTokenizer(matchVersion, reader);
+      tokenizer = new StandardTokenizer(matchVersion);
       ((StandardTokenizer) tokenizer).setMaxTokenLength(pr.r.getInt("maxTokenLength"));
     } else if (pr.name.equals("WhitespaceTokenizer")) {
-      tokenizer = new WhitespaceTokenizer(matchVersion, reader);
+      tokenizer = new WhitespaceTokenizer(matchVersion);
     } else if (pr.name.equals("PatternTokenizer")) {
       Pattern p;
       try {
@@ -756,7 +759,7 @@ public class RegisterFieldHandler extend
         // Dead code but compiler disagrees:
         p = null;
       }
-      tokenizer = new PatternTokenizer(reader, p, pr.r.getInt("group"));
+      tokenizer = new PatternTokenizer(p, pr.r.getInt("group"));
     } else if (pr.name.equals("ICUTokenizer")) {
       final BreakIterator breakers[];
       if (pr.r.hasParam("rules")) {
@@ -799,7 +802,7 @@ public class RegisterFieldHandler extend
         // TODO: we could also allow codes->types mapping
       };
 
-      tokenizer = new ICUTokenizer(reader, config);
+      tokenizer = new ICUTokenizer(config);
 
     } else {
       // BUG
@@ -891,7 +894,7 @@ public class RegisterFieldHandler extend
     }
 
     @Override
-    protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
+    protected TokenStreamComponents createComponents(String fieldName) {
       JSONObject o;
       try {
         o = (JSONObject) JSONValue.parseStrict(json);
@@ -903,8 +906,7 @@ public class RegisterFieldHandler extend
       positionIncrementGap = r.getInt("positionIncrementGap");
       offsetGap = r.getInt("offsetGap");
       return buildCustomAnalysisChain(matchVersion,
-                                      r,
-                                      reader);
+                                      r);
     }
 
     @Override
@@ -1022,7 +1024,7 @@ public class RegisterFieldHandler extend
         a.getInt("positionIncrementGap");
         a.getInt("offsetGap");
         // Ensures the args are all correct:
-        buildCustomAnalysisChain(matchVersion, a, new StringReader(""));
+        buildCustomAnalysisChain(matchVersion, a);
         analyzer = new CustomAnalyzer(matchVersion, jsonOrig);
       } else {
         f.fail(name, "either class or tokenizer/tokenFilters are required");

Modified: lucene/dev/branches/lucene5376/lucene/server/src/java/org/apache/lucene/server/handlers/SearchHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/server/src/java/org/apache/lucene/server/handlers/SearchHandler.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/server/src/java/org/apache/lucene/server/handlers/SearchHandler.java (original)
+++ lucene/dev/branches/lucene5376/lucene/server/src/java/org/apache/lucene/server/handlers/SearchHandler.java Fri Jan 17 17:23:33 2014
@@ -143,6 +143,7 @@ public class SearchHandler extends Handl
 
   private final static Type SORT_TYPE = new ListType(
                                             new StructType(new Param("field", "The field to sort on.  Pass <code>docid</code> for index order and <code>score</code> for relevance sort.", new StringType()),
+                                                           new Param("missingLast", "Whether missing values should sort last instead of first.  Note that this runs \"before\" reverse, so if you sort missing firse and reverse=true then missing values will be at the end.", new BooleanType(), false),
                                                            new Param("reverse", "Sort in reverse of the field's natural order", new BooleanType(), false)));
 
   private final static Type BOOLEAN_OCCUR_TYPE = new EnumType("must", "Clause is required.",
@@ -722,6 +723,28 @@ public class SearchHandler extends Handl
                              sortType,
                              sub.getBoolean("reverse"));
         }
+        
+        boolean hasMissingLast = sub.hasParam("missingLast");
+
+        boolean missingLast = sub.getBoolean("missingLast");
+
+        if (sf.getType() == SortField.Type.STRING) {
+          if (missingLast) {
+            sf.setMissingValue(SortField.STRING_LAST);
+          } else {
+            sf.setMissingValue(SortField.STRING_FIRST);
+          }
+        } else if (sf.getType() == SortField.Type.INT) {
+          sf.setMissingValue(missingLast ? Integer.MAX_VALUE : Integer.MIN_VALUE);
+        } else if (sf.getType() == SortField.Type.LONG) {
+          sf.setMissingValue(missingLast ? Long.MAX_VALUE : Long.MIN_VALUE);
+        } else if (sf.getType() == SortField.Type.FLOAT) {
+          sf.setMissingValue(missingLast ? Float.POSITIVE_INFINITY : Float.NEGATIVE_INFINITY);
+        } else if (sf.getType() == SortField.Type.DOUBLE) {
+          sf.setMissingValue(missingLast ? Double.POSITIVE_INFINITY : Double.NEGATIVE_INFINITY);
+        } else if (hasMissingLast) {
+          sub.fail("missingLast", "can only specify missingLast for string and numeric field types");
+        }
       }
       sortFields.add(sf);
     }

Modified: lucene/dev/branches/lucene5376/lucene/server/src/test/org/apache/lucene/server/ServerBaseTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/server/src/test/org/apache/lucene/server/ServerBaseTestCase.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/server/src/test/org/apache/lucene/server/ServerBaseTestCase.java (original)
+++ lucene/dev/branches/lucene5376/lucene/server/src/test/org/apache/lucene/server/ServerBaseTestCase.java Fri Jan 17 17:23:33 2014
@@ -58,6 +58,8 @@ public abstract class ServerBaseTestCase
   protected static boolean useDefaultIndex = true;
   
   protected static File STATE_DIR;
+
+  protected static JSONObject lastResult;
   
   @BeforeClass
   public static void beforeClassServerBase() throws Exception {
@@ -70,6 +72,7 @@ public abstract class ServerBaseTestCase
     // who sets this? netty? what a piece of crap
     System.clearProperty("sun.nio.ch.bugLevel");
     STATE_DIR = null;
+    lastResult = null;
   }
 
   @Override
@@ -250,13 +253,13 @@ public abstract class ServerBaseTestCase
       System.out.println("\nNOTE: ServerBaseTestCase: sendRaw command=" + command + " args:\n" + args.toJSONString(new JSONStyleIdent()));
     }
 
-    JSONObject result = sendRaw(command, args.toJSONString(JSONStyle.NO_COMPRESS));
+    lastResult = sendRaw(command, args.toJSONString(JSONStyle.NO_COMPRESS));
 
     if (VERBOSE) {
-      System.out.println("NOTE: ServerBaseTestCase: server response:\n" + result.toJSONString(new JSONStyleIdent()));
+      System.out.println("NOTE: ServerBaseTestCase: server response:\n" + lastResult.toJSONString(new JSONStyleIdent()));
     }
 
-    return result;
+    return lastResult;
   }
 
   protected static JSONObject sendRaw(String command, String body) throws Exception {
@@ -415,30 +418,58 @@ public abstract class ServerBaseTestCase
     return (String) get(o, path);
   }
 
+  protected String getString(String path) {
+    return getString(lastResult, path);
+  }
+
   protected int getInt(Object o, String path) {
     return ((Number) get(o, path)).intValue();
   }
 
+  protected int getInt(String path) {
+    return getInt(lastResult, path);
+  }
+
   protected boolean getBoolean(Object o, String path) {
     return ((Boolean) get(o, path)).booleanValue();
   }
 
+  protected boolean getBoolean(String path) {
+    return getBoolean(lastResult, path);
+  }
+
   protected long getLong(Object o, String path) {
     return ((Number) get(o, path)).longValue();
   }
 
+  protected long getLong(String path) {
+    return getLong(lastResult, path);
+  }
+
   protected float getFloat(Object o, String path) {
     return ((Number) get(o, path)).floatValue();
   }
 
+  protected float getFloat(String path) {
+    return getFloat(lastResult, path);
+  }
+
   protected JSONObject getObject(Object o, String path) {
     return (JSONObject) get(o, path);
   }
 
+  protected JSONObject getObject(String path) {
+    return getObject(lastResult, path);
+  }
+
   protected JSONArray getArray(Object o, String path) {
     return (JSONArray) get(o, path);
   }
 
+  protected JSONArray getArray(String path) {
+    return getArray(lastResult, path);
+  }
+
   protected JSONArray getArray(JSONArray o, int index) {
     return (JSONArray) o.get(index);
   }

Added: lucene/dev/branches/lucene5376/lucene/server/src/test/org/apache/lucene/server/TestSort.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/server/src/test/org/apache/lucene/server/TestSort.java?rev=1559196&view=auto
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/server/src/test/org/apache/lucene/server/TestSort.java (added)
+++ lucene/dev/branches/lucene5376/lucene/server/src/test/org/apache/lucene/server/TestSort.java Fri Jan 17 17:23:33 2014
@@ -0,0 +1,150 @@
+package org.apache.lucene.server;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.File;
+import java.util.HashSet;
+import java.util.Locale;
+import java.util.Set;
+
+import org.apache.lucene.util._TestUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import net.minidev.json.JSONObject;
+
+public class TestSort extends ServerBaseTestCase {
+
+  @BeforeClass
+  public static void initClass() throws Exception {
+    useDefaultIndex = true;
+    curIndexName = "index";
+    startServer();
+    createAndStartIndex();
+    registerFields();
+    commit();
+  }
+
+  private static void registerFields() throws Exception {
+    JSONObject o = new JSONObject();
+    put(o, "atom", "{type: atom, sort: true}");
+    put(o, "int", "{type: int, sort: true}");
+    put(o, "float", "{type: float, sort: true}");
+    put(o, "long", "{type: long, sort: true}");
+    put(o, "double", "{type: double, sort: true}");
+    put(o, "text", "{type: text, analyzer: WhitespaceAnalyzer}");
+    put(o, "id", "{type: int, store: true}");
+    JSONObject o2 = new JSONObject();
+    o2.put("fields", o);
+    send("registerFields", o2);
+  }
+
+  @AfterClass
+  public static void fini() throws Exception {
+    shutdownServer();
+  }
+
+  public void testMissingLastAtom() throws Exception {
+    deleteAllDocs();
+    send("addDocument", "{fields: {id: 0, atom: a}}");
+    send("addDocument", "{fields: {id: 1, atom: b}}");
+    // field is missing:
+    send("addDocument", "{fields: {id: 2}}");
+    long gen = getLong("indexGen");
+
+    verifySort("atom");
+  }
+
+  public void testMissingLastInt() throws Exception {
+    deleteAllDocs();
+    send("addDocument", "{fields: {id: 0, int: -7}}");
+    send("addDocument", "{fields: {id: 1, int: 7}}");
+    // field is missing:
+    send("addDocument", "{fields: {id: 2}}");
+    verifySort("int");
+  }
+
+  public void testMissingLastLong() throws Exception {
+    deleteAllDocs();
+    send("addDocument", "{fields: {id: 0, long: -7}}");
+    send("addDocument", "{fields: {id: 1, long: 7}}");
+    // field is missing:
+    send("addDocument", "{fields: {id: 2}}");
+    verifySort("long");
+  }
+
+  public void testMissingLastFloat() throws Exception {
+    deleteAllDocs();
+    send("addDocument", "{fields: {id: 0, float: -7}}");
+    send("addDocument", "{fields: {id: 1, float: 7}}");
+    // field is missing:
+    send("addDocument", "{fields: {id: 2}}");
+    verifySort("float");
+  }
+
+  public void testMissingLastDouble() throws Exception {
+    deleteAllDocs();
+    send("addDocument", "{fields: {id: 0, double: -7}}");
+    send("addDocument", "{fields: {id: 1, double: 7}}");
+    // field is missing:
+    send("addDocument", "{fields: {id: 2}}");
+    verifySort("double");
+  }
+
+  public void testNoSortOnText() throws Exception {
+    assertFailsWith("registerFields",
+                    "{fields: {bad: {type: text, sort: true, analyzer: WhitespaceAnalyzer}}}",
+                    "registerFields > fields > bad > sort: cannot sort text fields; use atom instead");
+  }
+
+  private void verifySort(String field) throws Exception {
+
+    long gen = getLong("indexGen");
+
+    // missing is (annoyingly) first by default:
+    send("search",
+         "{query: MatchAllDocsQuery, topHits: 3, retrieveFields: [id], searcher: {indexGen: " + gen + "}, sort: {fields: [{field: " + field + "}]}}");
+    assertEquals(3, getInt("totalHits"));
+    assertEquals(2, getInt("hits[0].fields.id"));
+    assertEquals(0, getInt("hits[1].fields.id"));
+    assertEquals(1, getInt("hits[2].fields.id"));
+
+    // reverse, missing is (annoyingly) first by default:
+    send("search",
+         "{query: MatchAllDocsQuery, topHits: 3, retrieveFields: [id], searcher: {indexGen: " + gen + "}, sort: {fields: [{field: " + field + ", reverse: true}]}}");
+    assertEquals(3, getInt("totalHits"));
+    assertEquals(1, getInt("hits[0].fields.id"));
+    assertEquals(0, getInt("hits[1].fields.id"));
+    assertEquals(2, getInt("hits[2].fields.id"));
+
+    // missing last:
+    send("search",
+         "{query: MatchAllDocsQuery, topHits: 3, retrieveFields: [id], searcher: {indexGen: " + gen + "}, sort: {fields: [{field: " + field + ", missingLast: true}]}}");
+    assertEquals(3, getInt("totalHits"));
+    assertEquals(0, getInt("hits[0].fields.id"));
+    assertEquals(1, getInt("hits[1].fields.id"));
+    assertEquals(2, getInt("hits[2].fields.id"));
+
+    // reverse, missing last:
+    send("search",
+         "{query: MatchAllDocsQuery, topHits: 3, retrieveFields: [id], searcher: {indexGen: " + gen + "}, sort: {fields: [{field: " + field + ", reverse: true, missingLast: true}]}}");
+    assertEquals(3, getInt("totalHits"));
+    assertEquals(2, getInt("hits[0].fields.id"));
+    assertEquals(1, getInt("hits[1].fields.id"));
+    assertEquals(0, getInt("hits[2].fields.id"));
+  }
+}

Modified: lucene/dev/branches/lucene5376/lucene/site/changes/changes2html.pl
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/site/changes/changes2html.pl?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/site/changes/changes2html.pl (original)
+++ lucene/dev/branches/lucene5376/lucene/site/changes/changes2html.pl Fri Jan 17 17:23:33 2014
@@ -27,6 +27,7 @@ use warnings;
 # JIRA REST API documentation: <http://docs.atlassian.com/jira/REST/latest/>
 my $project_info_url = 'https://issues.apache.org/jira/rest/api/2/project';
 my $jira_url_prefix = 'http://issues.apache.org/jira/browse/';
+my $github_pull_request_prefix = 'https://github.com/apache/lucene-solr/pull/';
 my $bugzilla_url_prefix = 'http://issues.apache.org/bugzilla/show_bug.cgi?id=';
 my $month_regex = &setup_month_regex;
 my %month_nums = &setup_month_nums;
@@ -554,6 +555,9 @@ for my $rel (@releases) {
       # Link Lucene XXX, SOLR XXX and INFRA XXX to JIRA
       $item =~ s{((LUCENE|SOLR|INFRA)\s+(\d{3,}))}
                 {<a href="${jira_url_prefix}\U$2\E-$3">$1</a>}gi;
+      # Link "[ github | gh ] pull request [ # ] X+" to Github pull request
+      $item =~ s{((?:(?:(?:github|gh)\s+)?pull\s+request\s*(?:\#?\s*)?|gh-)(\d+))}
+                {<a href="${github_pull_request_prefix}$2">$1</a>}gi;
       if ($product eq 'LUCENE') {
         # Find single Bugzilla issues
         $item =~ s~((?i:bug|patch|issue)\s*\#?\s*(\d+))

Modified: lucene/dev/branches/lucene5376/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java (original)
+++ lucene/dev/branches/lucene5376/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java Fri Jan 17 17:23:33 2014
@@ -111,10 +111,14 @@ public class AnalyzingInfixSuggester ext
   /** {@link IndexSearcher} used for lookups. */
   protected IndexSearcher searcher;
 
-  /** null if payloads were not indexed: */
-  private BinaryDocValues payloadsDV;
-  private BinaryDocValues textDV;
-  private NumericDocValues weightsDV;
+  /** DocValuesField holding the payloads; null if payloads were not indexed. */
+  protected BinaryDocValues payloadsDV;
+
+  /** DocValuesField holding each suggestion's text. */
+  protected BinaryDocValues textDV;
+
+  /** DocValuesField holding each suggestion's weight. */
+  protected NumericDocValues weightsDV;
 
   /** Default minimum number of leading characters before
    *  PrefixQuery is used (4). */
@@ -214,9 +218,7 @@ public class AnalyzingInfixSuggester ext
                           getIndexWriterConfig(matchVersion, gramAnalyzer));
       BytesRef text;
       Document doc = new Document();
-      FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
-      ft.setIndexOptions(IndexOptions.DOCS_ONLY);
-      ft.setOmitNorms(true);
+      FieldType ft = getTextFieldType();
       Field textField = new Field(TEXT_FIELD_NAME, "", ft);
       doc.add(textField);
 
@@ -314,6 +316,18 @@ public class AnalyzingInfixSuggester ext
     }
   }
 
+  /**
+   * Subclass can override this method to change the field type of the text field
+   * e.g. to change the index options
+   */
+  protected FieldType getTextFieldType(){
+    FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+    ft.setIndexOptions(IndexOptions.DOCS_ONLY);
+    ft.setOmitNorms(true);
+
+    return ft;
+  }
+
   @Override
   public List<LookupResult> lookup(CharSequence key, boolean onlyMorePopular, int num) {
     return lookup(key, num, true, true);
@@ -416,40 +430,58 @@ public class AnalyzingInfixSuggester ext
       // Slower way if postings are not pre-sorted by weight:
       // hits = searcher.search(query, null, num, new Sort(new SortField("weight", SortField.Type.LONG, true)));
 
-      List<LookupResult> results = new ArrayList<LookupResult>();
-      BytesRef scratch = new BytesRef();
-      for (int i=0;i<hits.scoreDocs.length;i++) {
-        ScoreDoc sd = hits.scoreDocs[i];
-        textDV.get(sd.doc, scratch);
-        String text = scratch.utf8ToString();
-        long score = weightsDV.get(sd.doc);
-
-        BytesRef payload;
-        if (payloadsDV != null) {
-          payload = new BytesRef();
-          payloadsDV.get(sd.doc, payload);
-        } else {
-          payload = null;
-        }
-
-        LookupResult result;
+      List<LookupResult> results = createResults(hits, num, key, doHighlight, matchedTokens, prefixToken);
 
-        if (doHighlight) {
-          Object highlightKey = highlight(text, matchedTokens, prefixToken);
-          result = new LookupResult(highlightKey.toString(), highlightKey, score, payload);
-        } else {
-          result = new LookupResult(text, score, payload);
-        }
-        results.add(result);
-      }
       //System.out.println((System.currentTimeMillis() - t0) + " msec for infix suggest");
       //System.out.println(results);
+
       return results;
+
     } catch (IOException ioe) {
       throw new RuntimeException(ioe);
     }
   }
 
+  /**
+   * Create the results based on the search hits.
+   * Can be overridden by subclass to add particular behavior (e.g. weight transformation)
+   * @throws IOException If there are problems reading fields from the underlying Lucene index.
+   */
+  protected List<LookupResult> createResults(TopDocs hits, int num, CharSequence charSequence,
+                                             boolean doHighlight, Set<String> matchedTokens, String prefixToken)
+      throws IOException {
+
+    List<LookupResult> results = new ArrayList<LookupResult>();
+    BytesRef scratch = new BytesRef();
+    for (int i=0;i<hits.scoreDocs.length;i++) {
+      ScoreDoc sd = hits.scoreDocs[i];
+      textDV.get(sd.doc, scratch);
+      String text = scratch.utf8ToString();
+      long score = weightsDV.get(sd.doc);
+
+      BytesRef payload;
+      if (payloadsDV != null) {
+        payload = new BytesRef();
+        payloadsDV.get(sd.doc, payload);
+      } else {
+        payload = null;
+      }
+
+      LookupResult result;
+
+      if (doHighlight) {
+        Object highlightKey = highlight(text, matchedTokens, prefixToken);
+        result = new LookupResult(highlightKey.toString(), highlightKey, score, payload);
+      } else {
+        result = new LookupResult(text, score, payload);
+      }
+
+      results.add(result);
+    }
+
+    return results;
+  }
+
   /** Subclass can override this to tweak the Query before
    *  searching. */
   protected Query finishQuery(BooleanQuery in, boolean allTermsRequired) {

Modified: lucene/dev/branches/lucene5376/lucene/suggest/src/java/org/apache/lucene/search/suggest/jaspell/JaspellTernarySearchTrie.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/suggest/src/java/org/apache/lucene/search/suggest/jaspell/JaspellTernarySearchTrie.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/suggest/src/java/org/apache/lucene/search/suggest/jaspell/JaspellTernarySearchTrie.java (original)
+++ lucene/dev/branches/lucene5376/lucene/suggest/src/java/org/apache/lucene/search/suggest/jaspell/JaspellTernarySearchTrie.java Fri Jan 17 17:23:33 2014
@@ -419,7 +419,7 @@ public class JaspellTernarySearchTrie {
    *@return The <code>String</code> that indexes the node argument.
    */
   protected String getKey(TSTNode node) {
-    StringBuffer getKeyBuffer = new StringBuffer();
+    StringBuilder getKeyBuffer = new StringBuilder();
     getKeyBuffer.setLength(0);
     getKeyBuffer.append("" + node.splitchar);
     TSTNode currentNode;

Modified: lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggesterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggesterTest.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggesterTest.java (original)
+++ lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggesterTest.java Fri Jan 17 17:23:33 2014
@@ -397,8 +397,8 @@ public class AnalyzingInfixSuggesterTest
     final CharArraySet stopWords = StopFilter.makeStopSet(TEST_VERSION_CURRENT, "a");
     Analyzer indexAnalyzer = new Analyzer() {
         @Override
-        protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-          MockTokenizer tokens = new MockTokenizer(reader);
+        protected TokenStreamComponents createComponents(String fieldName) {
+          MockTokenizer tokens = new MockTokenizer();
           return new TokenStreamComponents(tokens,
                                            new StopFilter(TEST_VERSION_CURRENT, tokens, stopWords));
         }
@@ -406,8 +406,8 @@ public class AnalyzingInfixSuggesterTest
 
     Analyzer queryAnalyzer = new Analyzer() {
         @Override
-        protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-          MockTokenizer tokens = new MockTokenizer(reader);
+        protected TokenStreamComponents createComponents(String fieldName) {
+          MockTokenizer tokens = new MockTokenizer();
           return new TokenStreamComponents(tokens,
                                            new SuggestStopFilter(tokens, stopWords));
         }

Modified: lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java (original)
+++ lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java Fri Jan 17 17:23:33 2014
@@ -273,8 +273,8 @@ public class AnalyzingSuggesterTest exte
 
     final Analyzer analyzer = new Analyzer() {
       @Override
-      protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-        Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+      protected TokenStreamComponents createComponents(String fieldName) {
+        Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
         
         return new TokenStreamComponents(tokenizer) {
           int tokenStreamCounter = 0;
@@ -346,8 +346,8 @@ public class AnalyzingSuggesterTest exte
 
     final Analyzer analyzer = new Analyzer() {
       @Override
-      protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-        Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+      protected TokenStreamComponents createComponents(String fieldName) {
+        Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
         
         return new TokenStreamComponents(tokenizer) {
           int tokenStreamCounter = 0;
@@ -424,8 +424,8 @@ public class AnalyzingSuggesterTest exte
   private final Analyzer getUnusualAnalyzer() {
     return new Analyzer() {
       @Override
-      protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-        Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+      protected TokenStreamComponents createComponents(String fieldName) {
+        Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
         
         return new TokenStreamComponents(tokenizer) {
 
@@ -631,8 +631,8 @@ public class AnalyzingSuggesterTest exte
     }
 
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      MockTokenizer tokenizer = new MockTokenizer(factory, reader, MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
+    public TokenStreamComponents createComponents(String fieldName) {
+      MockTokenizer tokenizer = new MockTokenizer(factory, MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
       tokenizer.setEnableChecks(true);
       TokenStream next;
       if (numStopChars != 0) {
@@ -948,8 +948,8 @@ public class AnalyzingSuggesterTest exte
   public void testDupSurfaceFormsMissingResults() throws Exception {
     Analyzer a = new Analyzer() {
       @Override
-      protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-        Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+      protected TokenStreamComponents createComponents(String fieldName) {
+        Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
         
         return new TokenStreamComponents(tokenizer) {
 
@@ -1007,8 +1007,8 @@ public class AnalyzingSuggesterTest exte
   public void testDupSurfaceFormsMissingResults2() throws Exception {
     Analyzer a = new Analyzer() {
       @Override
-      protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-        Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+      protected TokenStreamComponents createComponents(String fieldName) {
+        Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
         
         return new TokenStreamComponents(tokenizer) {
 
@@ -1077,8 +1077,8 @@ public class AnalyzingSuggesterTest exte
   public void test0ByteKeys() throws Exception {
     final Analyzer a = new Analyzer() {
         @Override
-        protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-          Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+        protected TokenStreamComponents createComponents(String fieldName) {
+          Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
         
           return new TokenStreamComponents(tokenizer) {
             int tokenStreamCounter = 0;
@@ -1147,8 +1147,8 @@ public class AnalyzingSuggesterTest exte
 
     final Analyzer a = new Analyzer() {
         @Override
-        protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-          Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+        protected TokenStreamComponents createComponents(String fieldName) {
+          Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
         
           return new TokenStreamComponents(tokenizer) {
             @Override

Modified: lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/FuzzySuggesterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/FuzzySuggesterTest.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/FuzzySuggesterTest.java (original)
+++ lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/FuzzySuggesterTest.java Fri Jan 17 17:23:33 2014
@@ -227,8 +227,8 @@ public class FuzzySuggesterTest extends 
 
     final Analyzer analyzer = new Analyzer() {
       @Override
-      protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-        Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+      protected TokenStreamComponents createComponents(String fieldName) {
+        Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
         
         return new TokenStreamComponents(tokenizer) {
           int tokenStreamCounter = 0;
@@ -308,8 +308,8 @@ public class FuzzySuggesterTest extends 
 
     final Analyzer analyzer = new Analyzer() {
       @Override
-      protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-        Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+      protected TokenStreamComponents createComponents(String fieldName) {
+        Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
         
         return new TokenStreamComponents(tokenizer) {
           int tokenStreamCounter = 0;
@@ -382,8 +382,8 @@ public class FuzzySuggesterTest extends 
   private final Analyzer getUnusualAnalyzer() {
     return new Analyzer() {
       @Override
-      protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-        Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+      protected TokenStreamComponents createComponents(String fieldName) {
+        Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
         
         return new TokenStreamComponents(tokenizer) {
 
@@ -579,8 +579,8 @@ public class FuzzySuggesterTest extends 
     }
 
     @Override
-    public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-      MockTokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
+    public TokenStreamComponents createComponents(String fieldName) {
+      MockTokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
       tokenizer.setEnableChecks(true);
       TokenStream next;
       if (numStopChars != 0) {

Modified: lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java (original)
+++ lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java Fri Jan 17 17:23:33 2014
@@ -231,8 +231,8 @@ public class TestFreeTextSuggester exten
     // Just deletes "of"
     Analyzer a = new Analyzer() {
         @Override
-        public TokenStreamComponents createComponents(String field, Reader reader) {
-          Tokenizer tokenizer = new MockTokenizer(reader);
+        public TokenStreamComponents createComponents(String field) {
+          Tokenizer tokenizer = new MockTokenizer();
           CharArraySet stopSet = StopFilter.makeStopSet(TEST_VERSION_CURRENT, "of");
           return new TokenStreamComponents(tokenizer, new StopFilter(TEST_VERSION_CURRENT, tokenizer, stopSet));
         }
@@ -259,8 +259,8 @@ public class TestFreeTextSuggester exten
     // Just deletes "of"
     Analyzer a = new Analyzer() {
         @Override
-        public TokenStreamComponents createComponents(String field, Reader reader) {
-          Tokenizer tokenizer = new MockTokenizer(reader);
+        public TokenStreamComponents createComponents(String field) {
+          Tokenizer tokenizer = new MockTokenizer();
           CharArraySet stopSet = StopFilter.makeStopSet(TEST_VERSION_CURRENT, "of");
           return new TokenStreamComponents(tokenizer, new StopFilter(TEST_VERSION_CURRENT, tokenizer, stopSet));
         }

Modified: lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java (original)
+++ lucene/dev/branches/lucene5376/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java Fri Jan 17 17:23:33 2014
@@ -22,6 +22,7 @@ import java.io.StringReader;
 import org.apache.lucene.analysis.BaseTokenStreamTestCase;
 import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.Tokenizer;
 import org.apache.lucene.analysis.core.StopFilter;
 import org.apache.lucene.analysis.util.CharArraySet;
 
@@ -29,7 +30,8 @@ public class TestSuggestStopFilter exten
 
   public void testEndNotStopWord() throws Exception {
     CharArraySet stopWords = StopFilter.makeStopSet(TEST_VERSION_CURRENT, "to");
-    TokenStream stream = new MockTokenizer(new StringReader("go to"));
+    Tokenizer stream = new MockTokenizer();
+    stream.setReader(new StringReader("go to"));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
     assertTokenStreamContents(filter,
                               new String[] {"go", "to"},
@@ -46,9 +48,9 @@ public class TestSuggestStopFilter exten
   public void testEndIsStopWord() throws Exception {
                               
     CharArraySet stopWords = StopFilter.makeStopSet(TEST_VERSION_CURRENT, "to");
-    TokenStream stream = new MockTokenizer(new StringReader("go to "));
+    Tokenizer stream = new MockTokenizer();
+    stream.setReader(new StringReader("go to "));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
-
     filter = new SuggestStopFilter(stream, stopWords);
     assertTokenStreamContents(filter,
                               new String[] {"go"},
@@ -65,7 +67,8 @@ public class TestSuggestStopFilter exten
   public void testMidStopWord() throws Exception {
                               
     CharArraySet stopWords = StopFilter.makeStopSet(TEST_VERSION_CURRENT, "to");
-    TokenStream stream = new MockTokenizer(new StringReader("go to school"));
+    Tokenizer stream = new MockTokenizer();
+    stream.setReader(new StringReader("go to school"));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
 
     filter = new SuggestStopFilter(stream, stopWords);
@@ -84,7 +87,8 @@ public class TestSuggestStopFilter exten
   public void testMultipleStopWords() throws Exception {
                               
     CharArraySet stopWords = StopFilter.makeStopSet(TEST_VERSION_CURRENT, "to", "the", "a");
-    TokenStream stream = new MockTokenizer(new StringReader("go to a the school"));
+    Tokenizer stream = new MockTokenizer();
+    stream.setReader(new StringReader("go to a the school"));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
 
     filter = new SuggestStopFilter(stream, stopWords);
@@ -103,7 +107,8 @@ public class TestSuggestStopFilter exten
   public void testMultipleStopWordsEnd() throws Exception {
                               
     CharArraySet stopWords = StopFilter.makeStopSet(TEST_VERSION_CURRENT, "to", "the", "a");
-    TokenStream stream = new MockTokenizer(new StringReader("go to a the"));
+    Tokenizer stream = new MockTokenizer();
+    stream.setReader(new StringReader("go to a the"));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
 
     filter = new SuggestStopFilter(stream, stopWords);
@@ -122,7 +127,8 @@ public class TestSuggestStopFilter exten
   public void testMultipleStopWordsEnd2() throws Exception {
                               
     CharArraySet stopWords = StopFilter.makeStopSet(TEST_VERSION_CURRENT, "to", "the", "a");
-    TokenStream stream = new MockTokenizer(new StringReader("go to a the "));
+    Tokenizer stream = new MockTokenizer();
+    stream.setReader(new StringReader("go to a the "));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
 
     filter = new SuggestStopFilter(stream, stopWords);

Modified: lucene/dev/branches/lucene5376/lucene/test-framework/build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/test-framework/build.xml?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/test-framework/build.xml (original)
+++ lucene/dev/branches/lucene5376/lucene/test-framework/build.xml Fri Jan 17 17:23:33 2014
@@ -27,8 +27,7 @@
   <path id="classpath">
     <pathelement location="${common.dir}/build/core/classes/java"/>
     <pathelement location="${common.dir}/build/codecs/classes/java"/>
-    <path refid="junit-path"/>
-    <path refid="ant-path"/>
+    <fileset dir="lib"/>
   </path>
 
   <path id="test.classpath"/>

Modified: lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java (original)
+++ lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java Fri Jan 17 17:23:33 2014
@@ -972,4 +972,29 @@ public abstract class BaseTokenStreamTes
     }
     return ret;
   }
+
+  protected static MockTokenizer whitespaceMockTokenizer(Reader input) throws IOException {
+    MockTokenizer mockTokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
+    mockTokenizer.setReader(input);
+    return mockTokenizer;
+  }
+
+  protected static MockTokenizer whitespaceMockTokenizer(String input) throws IOException {
+    MockTokenizer mockTokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
+    mockTokenizer.setReader(new StringReader(input));
+    return mockTokenizer;
+  }
+
+  protected static MockTokenizer keywordMockTokenizer(Reader input) throws IOException {
+    MockTokenizer mockTokenizer = new MockTokenizer(MockTokenizer.KEYWORD, false);
+    mockTokenizer.setReader(input);
+    return mockTokenizer;
+  }
+
+  protected static MockTokenizer keywordMockTokenizer(String input) throws IOException {
+    MockTokenizer mockTokenizer = new MockTokenizer(MockTokenizer.KEYWORD, false);
+    mockTokenizer.setReader(new StringReader(input));
+    return mockTokenizer;
+  }
+
 }

Modified: lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockAnalyzer.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockAnalyzer.java (original)
+++ lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockAnalyzer.java Fri Jan 17 17:23:33 2014
@@ -87,8 +87,8 @@ public final class MockAnalyzer extends 
   }
 
   @Override
-  public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-    MockTokenizer tokenizer = new MockTokenizer(reader, runAutomaton, lowerCase, maxTokenLength);
+  public TokenStreamComponents createComponents(String fieldName) {
+    MockTokenizer tokenizer = new MockTokenizer(runAutomaton, lowerCase, maxTokenLength);
     tokenizer.setEnableChecks(enableChecks);
     MockTokenFilter filt = new MockTokenFilter(tokenizer, filter);
     return new TokenStreamComponents(tokenizer, maybePayload(filt, fieldName));

Modified: lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java (original)
+++ lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java Fri Jan 17 17:23:33 2014
@@ -17,8 +17,6 @@ package org.apache.lucene.analysis;
  * limitations under the License.
  */
 
-import java.io.Reader;
-
 /**
  * Analyzer for testing that encodes terms as UTF-16 bytes.
  */
@@ -26,8 +24,8 @@ public class MockBytesAnalyzer extends A
   private final MockBytesAttributeFactory factory = new MockBytesAttributeFactory();
   
   @Override
-  protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
-    Tokenizer t = new MockTokenizer(factory, reader, MockTokenizer.KEYWORD, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
+  protected TokenStreamComponents createComponents(String fieldName) {
+    Tokenizer t = new MockTokenizer(factory, MockTokenizer.KEYWORD, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
     return new TokenStreamComponents(t);
   }
 }

Modified: lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockPayloadAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockPayloadAnalyzer.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockPayloadAnalyzer.java (original)
+++ lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockPayloadAnalyzer.java Fri Jan 17 17:23:33 2014
@@ -34,8 +34,8 @@ import java.io.Reader;
 public final class MockPayloadAnalyzer extends Analyzer {
 
   @Override
-  public TokenStreamComponents createComponents(String fieldName, Reader reader) {
-    Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+  public TokenStreamComponents createComponents(String fieldName) {
+    Tokenizer result = new MockTokenizer( MockTokenizer.WHITESPACE, true);
     return new TokenStreamComponents(result, new MockPayloadFilter(result, fieldName));
   }
 }

Modified: lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockTokenizer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockTokenizer.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockTokenizer.java (original)
+++ lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/analysis/MockTokenizer.java Fri Jan 17 17:23:33 2014
@@ -89,35 +89,34 @@ public class MockTokenizer extends Token
   // evil: but we don't change the behavior with this random, we only switch up how we read
   private final Random random = new Random(RandomizedContext.current().getRandom().nextLong());
   
-  public MockTokenizer(AttributeFactory factory, Reader input, CharacterRunAutomaton runAutomaton, boolean lowerCase, int maxTokenLength) {
-    super(factory, input);
+  public MockTokenizer(AttributeFactory factory, CharacterRunAutomaton runAutomaton, boolean lowerCase, int maxTokenLength) {
+    super(factory);
     this.runAutomaton = runAutomaton;
     this.lowerCase = lowerCase;
     this.state = runAutomaton.getInitialState();
-    this.streamState = State.SETREADER;
     this.maxTokenLength = maxTokenLength;
   }
 
-  public MockTokenizer(Reader input, CharacterRunAutomaton runAutomaton, boolean lowerCase, int maxTokenLength) {
-    this(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY, input, runAutomaton, lowerCase, maxTokenLength);
+  public MockTokenizer(CharacterRunAutomaton runAutomaton, boolean lowerCase, int maxTokenLength) {
+    this(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY, runAutomaton, lowerCase, maxTokenLength);
   }
 
-  public MockTokenizer(Reader input, CharacterRunAutomaton runAutomaton, boolean lowerCase) {
-    this(input, runAutomaton, lowerCase, DEFAULT_MAX_TOKEN_LENGTH);
+  public MockTokenizer(CharacterRunAutomaton runAutomaton, boolean lowerCase) {
+    this(runAutomaton, lowerCase, DEFAULT_MAX_TOKEN_LENGTH);
   }
-  /** Calls {@link #MockTokenizer(Reader, CharacterRunAutomaton, boolean) MockTokenizer(Reader, WHITESPACE, true)} */
-  public MockTokenizer(Reader input) {
-    this(input, WHITESPACE, true);
+  /** Calls {@link #MockTokenizer(CharacterRunAutomaton, boolean) MockTokenizer(Reader, WHITESPACE, true)} */
+  public MockTokenizer() {
+    this(WHITESPACE, true);
   }
 
-  public MockTokenizer(AttributeFactory factory, Reader input, CharacterRunAutomaton runAutomaton, boolean lowerCase) {
-    this(factory, input, runAutomaton, lowerCase, DEFAULT_MAX_TOKEN_LENGTH);
+  public MockTokenizer(AttributeFactory factory, CharacterRunAutomaton runAutomaton, boolean lowerCase) {
+    this(factory, runAutomaton, lowerCase, DEFAULT_MAX_TOKEN_LENGTH);
   }
 
-  /** Calls {@link #MockTokenizer(org.apache.lucene.util.AttributeSource.AttributeFactory,Reader,CharacterRunAutomaton,boolean)
+  /** Calls {@link #MockTokenizer(org.apache.lucene.util.AttributeSource.AttributeFactory,CharacterRunAutomaton,boolean)
    *                MockTokenizer(AttributeFactory, Reader, WHITESPACE, true)} */
-  public MockTokenizer(AttributeFactory factory, Reader input) {
-    this(input, WHITESPACE, true);
+  public MockTokenizer(AttributeFactory factory) {
+    this(factory, WHITESPACE, true);
   }
 
   @Override

Modified: lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java (original)
+++ lucene/dev/branches/lucene5376/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java Fri Jan 17 17:23:33 2014
@@ -632,7 +632,7 @@ public class MockDirectoryWrapper extend
       randomIOExceptionRateOnOpen = 0.0;
       if (DirectoryReader.indexExists(this)) {
         if (LuceneTestCase.VERBOSE) {
-          System.out.println("\nNOTE: MockDirectoryWrapper: now crash");
+          System.out.println("\nNOTE: MockDirectoryWrapper: now crush");
         }
         crash(); // corrupt any unsynced-files
         if (LuceneTestCase.VERBOSE) {

Modified: lucene/dev/branches/lucene5376/lucene/tools/build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/tools/build.xml?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/tools/build.xml (original)
+++ lucene/dev/branches/lucene5376/lucene/tools/build.xml Fri Jan 17 17:23:33 2014
@@ -28,7 +28,7 @@
 
   <path id="classpath">
     <!-- TODO: we need this for forbidden-apis to be happy, because it does not support "includeantruntime": -->
-    <path refid="ant-path"/>
+    <fileset dir="lib"/>
   </path>
 
   <path id="test.classpath"/>

Modified: lucene/dev/branches/lucene5376/lucene/tools/ivy.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/lucene/tools/ivy.xml?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/lucene/tools/ivy.xml (original)
+++ lucene/dev/branches/lucene5376/lucene/tools/ivy.xml Fri Jan 17 17:23:33 2014
@@ -18,4 +18,8 @@
 -->
 <ivy-module version="2.0">
     <info organisation="org.apache.lucene" module="core-tools"/>
+    <dependencies>
+       <dependency org="org.apache.ant" name="ant" rev="${/org.apache.ant/ant}" transitive="false" />
+       <exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
+    </dependencies>
 </ivy-module>

Modified: lucene/dev/branches/lucene5376/solr/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/CHANGES.txt?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/CHANGES.txt (original)
+++ lucene/dev/branches/lucene5376/solr/CHANGES.txt Fri Jan 17 17:23:33 2014
@@ -5,7 +5,7 @@ Introduction
 Apache Solr is an open source enterprise search server based on the Apache Lucene Java
 search library, with XML/HTTP and JSON APIs, hit highlighting, faceted search,
 caching, replication, and a web administration interface. It runs in a Java
-servlet container such as Tomcat.
+servlet container such as Jetty.
 
 See http://lucene.apache.org/solr for more information.
 
@@ -47,11 +47,6 @@ Detailed Change List
 New Features
 ----------------------
 
-* SOLR-1301: Add a Solr contrib that allows for building Solr indexes via 
-  Hadoop's MapReduce. (Matt Revelle, Alexander Kanarsky, Steve Rowe, 
-  Mark Miller, Greg Bowyer, Jason Rutherglen, Kris Jirapinyo, Jason Venner ,
-  Andrzej Bialecki, Patrick Hunt, Wolfgang Hoschek, Roman Shaposhnik, 
-  Eric Wong)
 
 Other Changes
 ----------------------
@@ -136,6 +131,26 @@ New Features
 
 * SOLR-5536: Add ValueSource collapse criteria to CollapsingQParsingPlugin (Joel Bernstein)
 
+* SOLR-5541: Allow QueryElevationComponent to accept elevateIds and excludeIds 
+  as http parameters (Joel Bernstein)
+
+* SOLR-5463: new 'cursorMark' request param for deep paging of sorted result sets
+  (sarowe, hossman)
+
+* SOLR-5529: Add support for queries to use multiple suggesters.
+  (Areek Zillur, Erick Erickson, via Robert Muir)
+
+* SOLR-5631: Add support for Lucene's FreeTextSuggester.
+  (Areek Zillur via Robert Muir)
+
+* SOLR-1301: Add a Solr contrib that allows for building Solr indexes via 
+  Hadoop's MapReduce. (Matt Revelle, Alexander Kanarsky, Steve Rowe, 
+  Mark Miller, Greg Bowyer, Jason Rutherglen, Kris Jirapinyo, Jason Venner ,
+  Andrzej Bialecki, Patrick Hunt, Wolfgang Hoschek, Roman Shaposhnik, 
+  Eric Wong)
+
+* SOLR-5476: Overseer Role for nodes (Noble Paul)
+
 Bug Fixes
 ----------------------
 
@@ -182,21 +197,21 @@ Bug Fixes
 * SOLR-5524: Exception when using Query Function inside Scale Function.
   (Trey Grainger, yonik)
 
-* SOLR-5540: HdfsLockFactory should explicitly create the lock parent directory 
-  if necessary. (Mark Miller)
-
-* SOLR-5543: Core swaps resulted in duplicate core entries in solr.xml when 
-  using solr.xml persistence. (Bill Bell, Alan Woodward)
-
 * SOLR-5562: ConcurrentUpdateSolrServer constructor ignores supplied httpclient.
   (Kyle Halliday via Mark Miller)
 
 * SOLR-5567: ZkController getHostAddress duplicates url prefix.
   (Kyle Halliday, Alexey Serba, shalin)
 
-* SOLR-5577: Likely ZooKeeper expiration should not slow down updates a given
-  amount, but instead cut off updates after a given time. 
-  (Mark Miller, Christine Poerschke)
+* SOLR-4992: Solr eats OutOfMemoryError exceptions in many cases.
+  (Mark Miller, Daniel Collins)  
+
+* SOLR-5636: SolrRequestParsers does some xpath lookups on every request, which
+  can cause concurrency issues. (Mark Miller)
+
+* LUCENE-5399, SOLR-5354 sort wouldn't work correctly with
+  distributed searching for some field types such as legacy numeric
+  types (Rob Muir, Mike McCandless)
 
 Optimizations
 ----------------------
@@ -261,8 +276,11 @@ Other Changes
 * SOLR-5590: Upgrade HttpClient/HttpComponents to 4.3.x.
   (Karl Wright via Shawn Heisey)
 
-* pull request #11: change the default of hl.phraseLimit to 5000.
-  (Michael Della Bitta via Robert Muir)
+* SOLR-2794: change the default of hl.phraseLimit to 5000.
+  (Michael Della Bitta via Robert Muir, Koji, zarni - pull request #11)
+
+* SOLR-5632: Improve response message for reloading a non-existent core.
+  (Anshum Gupta via Mark Miller)
 
 ==================  4.6.1  ==================
 
@@ -302,6 +320,10 @@ Bug Fixes
   sets the alias name and the collections to alias to the same value.
   (Aaron Schram, Mark Miller)
 
+* SOLR-5577: Likely ZooKeeper expiration should not slow down updates a given
+  amount, but instead cut off updates after a given time. 
+  (Mark Miller, Christine Poerschke, Ramkumar Aiyengar)
+  
 * SOLR-5580: NPE when creating a core with both explicit shard and coreNodeName.
   (YouPeng Yang, Mark Miller)
 
@@ -329,6 +351,9 @@ Bug Fixes
   ZkCmdExecutor#ensureExists to ensure their election paths are properly
   created. (Mark Miller)
 
+* SOLR-5540: HdfsLockFactory should explicitly create the lock parent directory if 
+  necessary. (Mark Miller)
+  
 * SOLR-4709: The core reload after replication if config files have changed
   can fail due to a race condition. (Mark Miller, Hossman))
 
@@ -337,16 +362,33 @@ Bug Fixes
 
 * SOLR-5588: PeerSync doesn't count all connect failures as success.
   (Mark Miller)
-    
+
 * SOLR-5564: hl.maxAlternateFieldLength should apply to original field when
   fallback is attempted (janhoy)
 
+* SOLR-5608: Don't allow a closed SolrCore to publish state to ZooKeeper.
+  (Mark Miller, Shawn Heisey)
+
+* SOLR-5615: Deadlock while trying to recover after a ZK session expiration.
+  (Ramkumar Aiyengar, Mark Miller)
+
+* SOLR-5543: Core swaps resulted in duplicate core entries in solr.xml when
+  using solr.xml persistence. (Bill Bell, Alan Woodward)
+
+* SOLR-5618: Fix false cache hits in queryResultCache when hashCodes are equal 
+  and duplicate filter queries exist in one of the requests (hossman)
+
+* SOLR-4260: ConcurrentUpdateSolrServer#blockUntilFinished can return before
+  all previously added updates have finished. This could cause distributed
+  updates meant for replicas to be lost. (Markus Jelsma, Timothy Potter,
+  Joel Bernstein, Mark Miller)
+
 Optimizations
-----------------------
+----------------------  
 
 * SOLR-5576: Improve concurrency when registering and waiting for all 
   SolrCore's to register a DOWN state. (Christine Poerschke via Mark Miller)
-  
+
 ==================  4.6.0 ==================
 
 Versions of Major Components
@@ -421,7 +463,7 @@ New Features
 
 * SOLR-5464: Add option to ConcurrentSolrServer to stream pure delete 
    requests. (Mark Miller)
-
+  
 Bug Fixes
 ----------------------
   
@@ -465,7 +507,7 @@ Bug Fixes
 * SOLR-5465: SolrCmdDistributor retry logic has a concurrency race bug. 
   (Mark Miller)
   
-* SOLR-5452: Do not attempt to proxy internal update requests. (Mark Miller)  
+* SOLR-5452: Do not attempt to proxy internal update requests. (Mark Miller)
 
 Optimizations
 ----------------------  
@@ -702,6 +744,9 @@ Bug Fixes
 * SOLR-4489: SpellCheckComponent can throw StringIndexOutOfBoundsException
   when generating collations involving multiple word-break corrections.
   (James Dyer)
+  
+* SOLR-5087 - CoreAdminHandler.handleMergeAction generating NullPointerException
+ (Patrick Hunt via Erick Erickson)
 
 * SOLR-5107: Fixed NPE when using numTerms=0 in LukeRequestHandler
   (Ahmet Arslan, hossman)
@@ -1087,6 +1132,9 @@ Bug Fixes
 * SOLR-5037: The CSV loader now accepts field names that are not in the schema.
   (gsingers, ehatcher, Steve Rowe)
 
+* SOLR-4791: solr.xml sharedLib does not work in 4.3.0 (Ryan Ernst, Jan Høydahl via 
+  Erick Erickson)
+
 Optimizations
 ----------------------
 
@@ -1187,9 +1235,6 @@ Bug Fixes
 * SOLR-4797: Shard splitting creates sub shards which have the wrong hash
   range in cluster state. This happens when numShards is not a power of two
   and router is compositeId. (shalin)
-  
-* SOLR-4791: solr.xml sharedLib does not work in 4.3.0 (Ryan Ernst, Jan Høydahl via 
-  Erick Erickson)
 
 * SOLR-4806: Shard splitting does not abort if WaitForState times out (shalin)
 

Modified: lucene/dev/branches/lucene5376/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2StemmerFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2StemmerFactory.java?rev=1559196&r1=1559195&r2=1559196&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2StemmerFactory.java (original)
+++ lucene/dev/branches/lucene5376/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2StemmerFactory.java Fri Jan 17 17:23:33 2014
@@ -224,7 +224,7 @@ public class LuceneCarrot2StemmerFactory
     public static IStemmer createStemmer() {
       try {
         return new LuceneStemmerAdapter();
-      } catch (Throwable e) {
+      } catch (Exception e) {
         return IdentityStemmer.INSTANCE;
       }
     }