You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ar...@apache.org on 2015/10/29 00:14:18 UTC

svn commit: r1711159 - in /lucene/dev/branches/branch_5x/lucene: ./ suggest/src/java/org/apache/lucene/search/suggest/document/ suggest/src/test/org/apache/lucene/search/suggest/document/

Author: areek
Date: Wed Oct 28 23:14:18 2015
New Revision: 1711159

URL: http://svn.apache.org/viewvc?rev=1711159&view=rev
Log:
LUCENE-6858: Fix ContextSuggestField to correctly wrap token stream when using CompletionAnalyzer

Added:
    lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/CompletionTokenStreamTest.java   (with props)
Modified:
    lucene/dev/branches/branch_5x/lucene/CHANGES.txt
    lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/CompletionTokenStream.java
    lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextSuggestField.java
    lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggester.java
    lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestContextSuggestField.java
    lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java

Modified: lucene/dev/branches/branch_5x/lucene/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_5x/lucene/CHANGES.txt?rev=1711159&r1=1711158&r2=1711159&view=diff
==============================================================================
--- lucene/dev/branches/branch_5x/lucene/CHANGES.txt (original)
+++ lucene/dev/branches/branch_5x/lucene/CHANGES.txt Wed Oct 28 23:14:18 2015
@@ -156,6 +156,9 @@ Bug Fixes
 * LUCENE-6856: The Weight wrapper used by LRUQueryCache now delegates to the
   original Weight's BulkScorer when applicable. (Adrien Grand)
 
+* LUCENE-6858: Fix ContextSuggestField to correctly wrap token stream
+  when using CompletionAnalyzer. (Areek Zillur)
+
 Other
 
 * LUCENE-6857: Validate StandardQueryParser with NOT operator 

Modified: lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/CompletionTokenStream.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/CompletionTokenStream.java?rev=1711159&r1=1711158&r2=1711159&view=diff
==============================================================================
--- lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/CompletionTokenStream.java (original)
+++ lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/CompletionTokenStream.java Wed Oct 28 23:14:18 2015
@@ -57,7 +57,7 @@ public final class CompletionTokenStream
   private final PayloadAttribute payloadAttr = addAttribute(PayloadAttribute.class);
   private final BytesRefBuilderTermAttribute bytesAtt = addAttribute(BytesRefBuilderTermAttribute.class);
 
-  private final TokenStream input;
+  final TokenStream inputTokenStream;
   final boolean preserveSep;
   final boolean preservePositionIncrements;
   final int maxGraphExpansions;
@@ -73,14 +73,14 @@ public final class CompletionTokenStream
    * The token stream <code>input</code> is converted to an automaton
    * with the default settings of {@link org.apache.lucene.search.suggest.document.CompletionAnalyzer}
    */
-  CompletionTokenStream(TokenStream input) {
-    this(input, DEFAULT_PRESERVE_SEP, DEFAULT_PRESERVE_POSITION_INCREMENTS, DEFAULT_MAX_GRAPH_EXPANSIONS);
+  CompletionTokenStream(TokenStream inputTokenStream) {
+    this(inputTokenStream, DEFAULT_PRESERVE_SEP, DEFAULT_PRESERVE_POSITION_INCREMENTS, DEFAULT_MAX_GRAPH_EXPANSIONS);
   }
 
-  CompletionTokenStream(TokenStream input, boolean preserveSep, boolean preservePositionIncrements, int maxGraphExpansions) {
+  CompletionTokenStream(TokenStream inputTokenStream, boolean preserveSep, boolean preservePositionIncrements, int maxGraphExpansions) {
     // Don't call the super(input) ctor - this is a true delegate and has a new attribute source since we consume
-    // the input stream entirely in toFiniteStrings(input)
-    this.input = input;
+    // the input stream entirely in the first call to incrementToken
+    this.inputTokenStream = inputTokenStream;
     this.preserveSep = preserveSep;
     this.preservePositionIncrements = preservePositionIncrements;
     this.maxGraphExpansions = maxGraphExpansions;
@@ -122,14 +122,14 @@ public final class CompletionTokenStream
   public void end() throws IOException {
     super.end();
     if (finiteStrings == null) {
-      input.end();
+      inputTokenStream.end();
     }
   }
 
   @Override
   public void close() throws IOException {
     if (finiteStrings == null) {
-      input.close();
+      inputTokenStream.close();
     }
   }
 
@@ -173,9 +173,9 @@ public final class CompletionTokenStream
       tsta.setPreservePositionIncrements(preservePositionIncrements);
       tsta.setUnicodeArcs(unicodeAware);
 
-      automaton = tsta.toAutomaton(input);
+      automaton = tsta.toAutomaton(inputTokenStream);
     } finally {
-      IOUtils.closeWhileHandlingException(input);
+      IOUtils.closeWhileHandlingException(inputTokenStream);
     }
 
     // TODO: we can optimize this somewhat by determinizing
@@ -271,7 +271,7 @@ public final class CompletionTokenStream
   /**
    * Attribute providing access to the term builder and UTF-16 conversion
    */
-  private interface BytesRefBuilderTermAttribute extends TermToBytesRefAttribute {
+  public interface BytesRefBuilderTermAttribute extends TermToBytesRefAttribute {
     /**
      * Returns the builder from which the term is derived.
      */

Modified: lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextSuggestField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextSuggestField.java?rev=1711159&r1=1711158&r2=1711159&view=diff
==============================================================================
--- lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextSuggestField.java (original)
+++ lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextSuggestField.java Wed Oct 28 23:14:18 2015
@@ -85,19 +85,20 @@ public class ContextSuggestField extends
 
   @Override
   protected CompletionTokenStream wrapTokenStream(TokenStream stream) {
-    for (CharSequence context : contexts()) {
+    final Iterable<CharSequence> contexts = contexts();
+    for (CharSequence context : contexts) {
       validate(context);
     }
-    PrefixTokenFilter prefixTokenFilter = new PrefixTokenFilter(stream, (char) CONTEXT_SEPARATOR, contexts());
     CompletionTokenStream completionTokenStream;
     if (stream instanceof CompletionTokenStream) {
       completionTokenStream = (CompletionTokenStream) stream;
+      PrefixTokenFilter prefixTokenFilter = new PrefixTokenFilter(completionTokenStream.inputTokenStream, (char) CONTEXT_SEPARATOR, contexts);
       completionTokenStream = new CompletionTokenStream(prefixTokenFilter,
           completionTokenStream.preserveSep,
           completionTokenStream.preservePositionIncrements,
           completionTokenStream.maxGraphExpansions);
     } else {
-      completionTokenStream = new CompletionTokenStream(prefixTokenFilter);
+      completionTokenStream = new CompletionTokenStream(new PrefixTokenFilter(stream, (char) CONTEXT_SEPARATOR, contexts));
     }
     return completionTokenStream;
   }

Modified: lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggester.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggester.java?rev=1711159&r1=1711158&r2=1711159&view=diff
==============================================================================
--- lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggester.java (original)
+++ lucene/dev/branches/branch_5x/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggester.java Wed Oct 28 23:14:18 2015
@@ -132,11 +132,20 @@ public final class NRTSuggester implemen
       return;
     }
     final List<FSTUtil.Path<Pair<Long, BytesRef>>> prefixPaths = FSTUtil.intersectPrefixPaths(scorer.automaton, fst);
-    final int queueSize = getMaxTopNSearcherQueueSize(collector.getCountToCollect() * prefixPaths.size(),
-        scorer.reader.numDocs(), liveDocsRatio, scorer.filtered);
+    // The topN is increased by a factor of # of intersected path
+    // to ensure search admissibility. For example, one suggestion can
+    // have multiple contexts, resulting in num_context paths for the
+    // suggestion instead of 1 in the FST. When queried for the suggestion,
+    // the topN value ensures that all paths to the suggestion are evaluated
+    // (in case of a match all context query).
+    // Note that collectors will early terminate as soon as enough suggestions
+    // have been collected, regardless of the set topN value. This value is the
+    // maximum number of suggestions that can be collected.
+    final int topN = collector.getCountToCollect() * prefixPaths.size();
+    final int queueSize = getMaxTopNSearcherQueueSize(topN, scorer.reader.numDocs(), liveDocsRatio, scorer.filtered);
     Comparator<Pair<Long, BytesRef>> comparator = getComparator();
-    Util.TopNSearcher<Pair<Long, BytesRef>> searcher = new Util.TopNSearcher<Pair<Long, BytesRef>>(fst,
-        collector.getCountToCollect(), queueSize, comparator, new ScoringPathComparator(scorer)) {
+    Util.TopNSearcher<Pair<Long, BytesRef>> searcher = new Util.TopNSearcher<Pair<Long, BytesRef>>(fst, topN, queueSize, comparator,
+        new ScoringPathComparator(scorer)) {
 
       private final CharsRefBuilder spare = new CharsRefBuilder();
 

Added: lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/CompletionTokenStreamTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/CompletionTokenStreamTest.java?rev=1711159&view=auto
==============================================================================
--- lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/CompletionTokenStreamTest.java (added)
+++ lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/CompletionTokenStreamTest.java Wed Oct 28 23:14:18 2015
@@ -0,0 +1,178 @@
+package org.apache.lucene.search.suggest.document;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.io.StringReader;
+
+import org.apache.lucene.analysis.BaseTokenStreamTestCase;
+import org.apache.lucene.analysis.MockTokenizer;
+import org.apache.lucene.analysis.TokenFilter;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.Tokenizer;
+import org.apache.lucene.analysis.synonym.SynonymFilter;
+import org.apache.lucene.analysis.synonym.SynonymMap;
+import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
+import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
+import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.CharsRef;
+import org.apache.lucene.util.CharsRefBuilder;
+import org.junit.Test;
+
+public class CompletionTokenStreamTest extends BaseTokenStreamTestCase {
+
+  @Test
+  public void testBasic() throws Exception {
+    Tokenizer tokenStream = new MockTokenizer(MockTokenizer.WHITESPACE, true);
+    String input = "mykeyword";
+    BytesRef payload = new BytesRef("payload");
+    tokenStream.setReader(new StringReader(input));
+    CompletionTokenStream completionTokenStream = new CompletionTokenStream(tokenStream);
+    completionTokenStream.setPayload(payload);
+    PayloadAttrToTypeAttrFilter stream = new PayloadAttrToTypeAttrFilter(completionTokenStream);
+    assertTokenStreamContents(stream, new String[] {input}, null, null, new String[] {payload.utf8ToString()}, new int[] { 1 }, null, null);
+  }
+
+  @Test
+  public void testWithNoPreserveSep() throws Exception {
+    Tokenizer tokenStream = new MockTokenizer(MockTokenizer.WHITESPACE, true);
+    String input = "mykeyword another keyword";
+    BytesRef payload = new BytesRef("payload");
+    tokenStream.setReader(new StringReader(input));
+    CompletionTokenStream completionTokenStream = new CompletionTokenStream(tokenStream, false, false, 100);
+    completionTokenStream.setPayload(payload);
+    PayloadAttrToTypeAttrFilter stream = new PayloadAttrToTypeAttrFilter(completionTokenStream);
+    assertTokenStreamContents(stream, new String[] {"mykeywordanotherkeyword"}, null, null, new String[] {payload.utf8ToString()}, new int[] { 1 }, null, null);
+  }
+
+  @Test
+  public void testWithMultipleTokens() throws Exception {
+    Tokenizer tokenStream = new MockTokenizer(MockTokenizer.WHITESPACE, true);
+    String input = "mykeyword another keyword";
+    tokenStream.setReader(new StringReader(input));
+    BytesRef payload = new BytesRef("payload");
+    CompletionTokenStream completionTokenStream = new CompletionTokenStream(tokenStream);
+    completionTokenStream.setPayload(payload);
+    PayloadAttrToTypeAttrFilter stream = new PayloadAttrToTypeAttrFilter(completionTokenStream);
+    CharsRefBuilder builder = new CharsRefBuilder();
+    builder.append("mykeyword");
+    builder.append(((char) CompletionAnalyzer.SEP_LABEL));
+    builder.append("another");
+    builder.append(((char) CompletionAnalyzer.SEP_LABEL));
+    builder.append("keyword");
+    assertTokenStreamContents(stream, new String[]{builder.toCharsRef().toString()}, null, null, new String[]{payload.utf8ToString()}, new int[]{1}, null, null);
+  }
+
+  @Test
+  public void testWithSynonym() throws Exception {
+    SynonymMap.Builder builder = new SynonymMap.Builder(true);
+    builder.add(new CharsRef("mykeyword"), new CharsRef("mysynonym"), true);
+    Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);
+    tokenizer.setReader(new StringReader("mykeyword"));
+    SynonymFilter filter = new SynonymFilter(tokenizer, builder.build(), true);
+    CompletionTokenStream completionTokenStream = new CompletionTokenStream(filter);
+    BytesRef payload = new BytesRef("payload");
+    completionTokenStream.setPayload(payload);
+    PayloadAttrToTypeAttrFilter stream = new PayloadAttrToTypeAttrFilter(completionTokenStream);
+    assertTokenStreamContents(stream, new String[] {"mykeyword", "mysynonym"}, null, null, new String[] {payload.utf8ToString(), payload.utf8ToString()}, new int[] { 1, 1 }, null, null);
+  }
+
+  @Test
+  public void testWithSynonyms() throws Exception {
+    SynonymMap.Builder builder = new SynonymMap.Builder(true);
+    builder.add(new CharsRef("mykeyword"), new CharsRef("mysynonym"), true);
+    Tokenizer tokenStream = new MockTokenizer(MockTokenizer.WHITESPACE, true);
+    String input = "mykeyword another keyword";
+    tokenStream.setReader(new StringReader(input));
+    SynonymFilter filter = new SynonymFilter(tokenStream, builder.build(), true);
+    BytesRef payload = new BytesRef("payload");
+    CompletionTokenStream completionTokenStream = new CompletionTokenStream(filter, true, false, 100);
+    completionTokenStream.setPayload(payload);
+    PayloadAttrToTypeAttrFilter stream = new PayloadAttrToTypeAttrFilter(completionTokenStream);
+    String[] expectedOutputs = new String[2];
+    CharsRefBuilder expectedOutput = new CharsRefBuilder();
+    expectedOutput.append("mykeyword");
+    expectedOutput.append(((char) CompletionAnalyzer.SEP_LABEL));
+    expectedOutput.append("another");
+    expectedOutput.append(((char) CompletionAnalyzer.SEP_LABEL));
+    expectedOutput.append("keyword");
+    expectedOutputs[0] = expectedOutput.toCharsRef().toString();
+    expectedOutput.clear();
+    expectedOutput.append("mysynonym");
+    expectedOutput.append(((char) CompletionAnalyzer.SEP_LABEL));
+    expectedOutput.append("another");
+    expectedOutput.append(((char) CompletionAnalyzer.SEP_LABEL));
+    expectedOutput.append("keyword");
+    expectedOutputs[1] = expectedOutput.toCharsRef().toString();
+    assertTokenStreamContents(stream, expectedOutputs, null, null, new String[]{payload.utf8ToString(), payload.utf8ToString()}, new int[]{1, 1}, null, null);
+  }
+
+  @Test
+  public void testValidNumberOfExpansions() throws IOException {
+    SynonymMap.Builder builder = new SynonymMap.Builder(true);
+    for (int i = 0; i < 256; i++) {
+      builder.add(new CharsRef("" + (i+1)), new CharsRef("" + (1000 + (i+1))), true);
+    }
+    StringBuilder valueBuilder = new StringBuilder();
+    for (int i = 0 ; i < 8 ; i++) {
+      valueBuilder.append(i+1);
+      valueBuilder.append(" ");
+    }
+    MockTokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);
+    tokenizer.setReader(new StringReader(valueBuilder.toString()));
+    SynonymFilter filter = new SynonymFilter(tokenizer, builder.build(), true);
+
+    CompletionTokenStream completionTokenStream = new CompletionTokenStream(filter);
+    completionTokenStream.setPayload(new BytesRef());
+    PayloadAttrToTypeAttrFilter stream = new PayloadAttrToTypeAttrFilter(completionTokenStream);
+    stream.reset();
+    CompletionTokenStream.BytesRefBuilderTermAttribute attr = stream.addAttribute(CompletionTokenStream.BytesRefBuilderTermAttribute.class);
+    PositionIncrementAttribute posAttr = stream.addAttribute(PositionIncrementAttribute.class);
+    int maxPos = 0;
+    int count = 0;
+    while(stream.incrementToken()) {
+      count++;
+      assertNotNull(attr.getBytesRef());
+      assertTrue(attr.getBytesRef().length > 0);
+      maxPos += posAttr.getPositionIncrement();
+    }
+    stream.close();
+    assertEquals(count, 256);
+    assertEquals(count, maxPos);
+  }
+
+  public final static class PayloadAttrToTypeAttrFilter extends TokenFilter {
+    private PayloadAttribute payload = addAttribute(PayloadAttribute.class);
+    private TypeAttribute type = addAttribute(TypeAttribute.class);
+
+    protected PayloadAttrToTypeAttrFilter(TokenStream input) {
+      super(input);
+    }
+
+    @Override
+    public boolean incrementToken() throws IOException {
+      if (input.incrementToken()) {
+        // we move them over so we can assert them more easily in the tests
+        type.setType(payload.getPayload().utf8ToString());
+        return true;
+      }
+      return false;
+    }
+  }
+}

Modified: lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestContextSuggestField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestContextSuggestField.java?rev=1711159&r1=1711158&r2=1711159&view=diff
==============================================================================
--- lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestContextSuggestField.java (original)
+++ lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestContextSuggestField.java Wed Oct 28 23:14:18 2015
@@ -17,19 +17,26 @@ package org.apache.lucene.search.suggest
  * limitations under the License.
  */
 
+import java.io.ByteArrayOutputStream;
+
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.OutputStreamDataOutput;
+import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.CharsRefBuilder;
 import org.apache.lucene.util.LuceneTestCase;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
+import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents;
 import static org.apache.lucene.search.suggest.document.TestSuggestField.Entry;
 import static org.apache.lucene.search.suggest.document.TestSuggestField.assertSuggestions;
 import static org.apache.lucene.search.suggest.document.TestSuggestField.iwcWithSuggestField;
@@ -87,6 +94,40 @@ public class TestContextSuggestField ext
   }
 
   @Test
+  public void testTokenStream() throws Exception {
+    Analyzer analyzer = new MockAnalyzer(random());
+    ContextSuggestField field = new ContextSuggestField("field", "input", 1, "context1", "context2");
+    BytesRef surfaceForm = new BytesRef("input");
+    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+    try (OutputStreamDataOutput output = new OutputStreamDataOutput(byteArrayOutputStream)) {
+      output.writeVInt(surfaceForm.length);
+      output.writeBytes(surfaceForm.bytes, surfaceForm.offset, surfaceForm.length);
+      output.writeVInt(1 + 1);
+      output.writeByte(ContextSuggestField.TYPE);
+    }
+    BytesRef payload = new BytesRef(byteArrayOutputStream.toByteArray());
+    String[] expectedOutputs = new String[2];
+    CharsRefBuilder builder = new CharsRefBuilder();
+    builder.append("context1");
+    builder.append(((char) ContextSuggestField.CONTEXT_SEPARATOR));
+    builder.append(((char) CompletionAnalyzer.SEP_LABEL));
+    builder.append("input");
+    expectedOutputs[0] = builder.toCharsRef().toString();
+    builder.clear();
+    builder.append("context2");
+    builder.append(((char) ContextSuggestField.CONTEXT_SEPARATOR));
+    builder.append(((char) CompletionAnalyzer.SEP_LABEL));
+    builder.append("input");
+    expectedOutputs[1] = builder.toCharsRef().toString();
+    TokenStream stream = new CompletionTokenStreamTest.PayloadAttrToTypeAttrFilter(field.tokenStream(analyzer, null));
+    assertTokenStreamContents(stream, expectedOutputs, null, null, new String[]{payload.utf8ToString(), payload.utf8ToString()}, new int[]{1, 1}, null, null);
+
+    CompletionAnalyzer completionAnalyzer = new CompletionAnalyzer(analyzer);
+    stream = new CompletionTokenStreamTest.PayloadAttrToTypeAttrFilter(field.tokenStream(completionAnalyzer, null));
+    assertTokenStreamContents(stream, expectedOutputs, null, null, new String[]{payload.utf8ToString(), payload.utf8ToString()}, new int[]{1, 1}, null, null);
+  }
+
+  @Test
   public void testMixedSuggestFields() throws Exception {
     Analyzer analyzer = new MockAnalyzer(random());
     Document document = new Document();
@@ -148,4 +189,39 @@ public class TestContextSuggestField ext
     reader.close();
     iw.close();
   }
+
+  @Test
+  public void testCompletionAnalyzer() throws Exception {
+    CompletionAnalyzer completionAnalyzer = new CompletionAnalyzer(new StandardAnalyzer(), true, true);
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwcWithSuggestField(completionAnalyzer, "suggest_field"));
+    Document document = new Document();
+
+    document.add(new ContextSuggestField("suggest_field", "suggestion1", 4, "type1"));
+    document.add(new ContextSuggestField("suggest_field", "suggestion2", 3, "type2"));
+    document.add(new ContextSuggestField("suggest_field", "suggestion3", 2, "type3"));
+    iw.addDocument(document);
+    document = new Document();
+    document.add(new ContextSuggestField("suggest_field", "suggestion4", 1, "type4"));
+    iw.addDocument(document);
+
+    if (rarely()) {
+      iw.commit();
+    }
+
+    DirectoryReader reader = iw.getReader();
+    SuggestIndexSearcher suggestIndexSearcher = new SuggestIndexSearcher(reader);
+    ContextQuery query = new ContextQuery(new PrefixCompletionQuery(completionAnalyzer, new Term("suggest_field", "sugg")));
+    TopSuggestDocs suggest = suggestIndexSearcher.suggest(query, 4);
+    assertSuggestions(suggest,
+        new Entry("suggestion1", "type1", 4),
+        new Entry("suggestion2", "type2", 3),
+        new Entry("suggestion3", "type3", 2),
+        new Entry("suggestion4", "type4", 1));
+    query.addContext("type1");
+    suggest = suggestIndexSearcher.suggest(query, 4);
+    assertSuggestions(suggest,
+        new Entry("suggestion1", "type1", 4));
+    reader.close();
+    iw.close();
+  }
 }

Modified: lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java?rev=1711159&r1=1711158&r2=1711159&view=diff
==============================================================================
--- lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java (original)
+++ lucene/dev/branches/branch_5x/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java Wed Oct 28 23:14:18 2015
@@ -17,6 +17,7 @@ package org.apache.lucene.search.suggest
  * limitations under the License.
  */
 
+import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -29,6 +30,7 @@ import java.util.concurrent.CyclicBarrie
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.PostingsFormat;
 import org.apache.lucene.codecs.lucene54.Lucene54Codec;
@@ -46,7 +48,9 @@ import org.apache.lucene.search.ScoreDoc
 import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.search.suggest.BitsProducer;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.OutputStreamDataOutput;
 import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.CharsRefBuilder;
 import org.apache.lucene.util.LineFileDocs;
 import org.apache.lucene.util.LuceneTestCase;
@@ -55,6 +59,7 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
+import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents;
 import static org.apache.lucene.search.suggest.document.TopSuggestDocs.SuggestScoreDoc;
 import static org.hamcrest.core.IsEqual.equalTo;
 
@@ -135,6 +140,27 @@ public class TestSuggestField extends Lu
   }
 
   @Test
+  public void testTokenStream() throws Exception {
+    Analyzer analyzer = new MockAnalyzer(random());
+    SuggestField suggestField = new SuggestField("field", "input", 1);
+    BytesRef surfaceForm = new BytesRef("input");
+    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+    try (OutputStreamDataOutput output = new OutputStreamDataOutput(byteArrayOutputStream)) {
+      output.writeVInt(surfaceForm.length);
+      output.writeBytes(surfaceForm.bytes, surfaceForm.offset, surfaceForm.length);
+      output.writeVInt(1 + 1);
+      output.writeByte(SuggestField.TYPE);
+    }
+    BytesRef payload = new BytesRef(byteArrayOutputStream.toByteArray());
+    TokenStream stream = new CompletionTokenStreamTest.PayloadAttrToTypeAttrFilter(suggestField.tokenStream(analyzer, null));
+    assertTokenStreamContents(stream, new String[] {"input"}, null, null, new String[]{payload.utf8ToString()}, new int[]{1}, null, null);
+
+    CompletionAnalyzer completionAnalyzer = new CompletionAnalyzer(analyzer);
+    stream = new CompletionTokenStreamTest.PayloadAttrToTypeAttrFilter(suggestField.tokenStream(completionAnalyzer, null));
+    assertTokenStreamContents(stream, new String[] {"input"}, null, null, new String[]{payload.utf8ToString()}, new int[]{1}, null, null);
+  }
+
+  @Test
   public void testDupSuggestFieldValues() throws Exception {
     Analyzer analyzer = new MockAnalyzer(random());
     RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));