You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2014/03/16 19:11:10 UTC

svn commit: r1578133 [9/11] - in /lucene/dev/branches/lucene5376_2/lucene: ./ analysis/common/src/java/org/apache/lucene/analysis/charfilter/ analysis/common/src/java/org/apache/lucene/analysis/pattern/ analysis/common/src/java/org/apache/lucene/analys...

Added: lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestAnalysis.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestAnalysis.java?rev=1578133&view=auto
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestAnalysis.java (added)
+++ lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestAnalysis.java Sun Mar 16 18:11:07 2014
@@ -0,0 +1,231 @@
+package org.apache.lucene.server;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.File;
+import java.util.Locale;
+
+import org.apache.lucene.util.TestUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+
+public class TestAnalysis extends ServerBaseTestCase {
+
+  @BeforeClass
+  public static void initClass() throws Exception {
+    useDefaultIndex = true;
+    curIndexName = "index";
+    startServer();
+    createAndStartIndex();
+  }
+
+  @AfterClass
+  public static void fini() throws Exception {
+    shutdownServer();
+  }
+
+  public void testCustomAnalysisChain() throws Exception {
+    send("analyze", "{text: 'Here is some text', analyzer: {tokenizer: Standard, tokenFilters: [LowerCase]}}");
+    assertEquals("here is some text", justTokens());
+
+    send("analyze", "{text: 'Here is some text', analyzer: {tokenizer: Standard}}");
+    assertEquals("Here is some text", justTokens());
+
+    send("analyze", "{text: 'Here is some text', analyzer: {tokenizer: {class: Standard, maxTokenLength: 2}, tokenFilters: [LowerCase]}}");
+    assertEquals("is", justTokens());
+  }
+
+  public void testPatternTokenizer() throws Exception {
+    send("analyze", "{text: 'Here is \\'some\\' text', analyzer: {tokenizer: {class: Pattern, pattern: \"\\'([^\\']+)\\'\", group: 1}}}");
+    assertEquals("some", justTokens());
+  }
+
+  public void testExtraArgs() throws Exception {
+    assertFailsWith("analyze", "{text: 'Here is \\'some\\' text', analyzer: {tokenizer: {class: Pattern, pattern: \"\\'([^\\']+)\\'\", group: 1, bad: 14}}}", "analyze > analyzer > tokenizer: failed to create TokenizerFactory for class \"Pattern\": java.lang.IllegalArgumentException: Unknown parameters: {bad=14}");
+  }
+
+  public void testKeywordMarkerFilter() throws Exception {
+    // No KWMarkerFilter, dogs is stemmed:
+    send("analyze", "{text: 'Here is some dogs', analyzer: {tokenizer: Standard, tokenFilters: [EnglishPossessive, LowerCase, Stop, EnglishMinimalStem]}}");
+    assertEquals("here some dog", justTokens());
+
+    // Use KWMarkerFilter with protectedFileContents to protect dogs:
+    send("analyze", "{text: 'Here is some dogs', analyzer: {tokenizer: Standard, tokenFilters: [EnglishPossessive, LowerCase, Stop, {class: KeywordMarker, protectedFileContents:[dogs]}, EnglishMinimalStem]}}");
+    assertEquals("here some dogs", justTokens());
+
+    // Use KWMarkerFilter with pattern to protect dogs:
+    send("analyze", "{text: 'Here is some dogs', analyzer: {tokenizer: Standard, tokenFilters: [EnglishPossessive, LowerCase, Stop, {class: KeywordMarker, pattern: dogs}, EnglishMinimalStem]}}");
+    assertEquals("here some dogs", justTokens());
+  }
+
+  public void testEnglishAnalyzer() throws Exception {
+    send("analyze", "{text: 'dogs go running', analyzer: {class: EnglishAnalyzer}}");
+    assertEquals("dog go run", justTokens());
+
+    // This time protecting dogs from stemming:
+    send("analyze", "{text: 'dogs go running', analyzer: {class: EnglishAnalyzer, stemExclusionSet: [dogs]}}");
+    assertEquals("dogs go run", justTokens());
+  }
+
+  public void testStopFilter() throws Exception {
+    // Uses default (english) stop words:
+    send("analyze", "{text: 'the dogs go running', analyzer: {tokenizer: Whitespace, tokenFilters: [Stop]}}");
+    assertEquals("dogs go running", justTokens());
+
+    // This time making only running a stop word:
+    send("analyze", "{text: 'the dogs go running', analyzer: {tokenizer: Whitespace, tokenFilters: [{class: Stop, wordsFileContents: [running]}]}}");
+    assertEquals("the dogs go", justTokens());
+  }
+
+  public void testBadCharFilterSpec() throws Exception {
+    assertFailsWith("analyze",
+                    "{text: abc, analyzer: {charFilters: [17], tokenizer: Whitespace}}",
+                    "analyze > analyzer > charFilters[0]: expected one of StringType, StructType, but got Integer");
+  }
+
+  public void testNonExistentCharFilter() throws Exception {
+    assertFailsWith("analyze",
+                    "{text: abc, analyzer: {charFilters: [Bad], tokenizer: Whitespace}}",
+                    "analyze > analyzer > charFilters[0]",
+                    "A SPI class of type org.apache.lucene.analysis.util.CharFilterFactory with name 'Bad' does not exist");
+  }
+
+  public void testPatternReplaceCharFilter() throws Exception {
+    send("analyze",
+         "{text: foo bar, analyzer: {charFilters: [{class: PatternReplace, pattern: foo, replacement: bar}], tokenizer: Whitespace}}");
+    assertEquals("bar bar", justTokens());
+  }
+
+  /** Exercises the xxxFileContents hack, for a char filter */
+  public void testMappingCharFilter() throws Exception {
+    send("analyze",
+         "{text: foo bar, analyzer: {charFilters: [{class: Mapping, mappingFileContents: '\"bar\" => \"foo\"'}], tokenizer: Whitespace}}");
+    assertEquals("foo foo", justTokens());
+  }
+
+  public void testPositionIncrementGap() throws Exception {
+    curIndexName = "posinc";
+    TestUtil.rmDir(new File("posinc"));
+    send("createIndex", "{rootDir: posinc}");
+    send("settings", "{directory: RAMDirectory}");
+    send("registerFields", "{fields: {author1: {type: text, analyzer: {tokenizer: Whitespace}, multiValued: true}, author2: {type: text, analyzer: {tokenizer: Whitespace, positionIncrementGap: 1}, multiValued: true}}}");
+    send("startIndex");
+    send("addDocument", "{fields: {author1: [bob, smith], author2: [bob, smith]}}");
+    long gen = getLong("indexGen");
+
+    // This one matches because the two values act like they
+    // were just concatenated:
+    send("search", String.format(Locale.ROOT, "{queryText: 'author1: \"bob smith\"', searcher: {indexGen: %d}}", gen));
+    assertEquals(1, getInt("hits.length"));
+
+    // This one doesn't match because a hole is inserted
+    // between the two values:
+    send("search", String.format(Locale.ROOT, "{queryText: 'author2: \"bob smith\"', searcher: {indexGen: %d}}", gen));
+    assertEquals(0, getInt("hits.length"));
+    send("stopIndex");
+    send("deleteIndex");
+  }
+
+  // nocommit test loading syns from "file" too
+  public void testSynonymFilter() throws Exception {
+    send("analyze", "{text: 'domain name service is complex', analyzer: {tokenizer: Whitespace, tokenFilters: [LowerCase, {class: Synonym, ignoreCase: true, analyzer: WhitespaceAnalyzer, synonyms: [{input: 'domain name service', output: 'dns'}]}]}}");
+    assertEquals("dns/0 is/1 complex/2", tokensAndPositions());
+
+    send("analyze", "{text: 'domain name service is complex', analyzer: {tokenizer: Whitespace, tokenFilters: [LowerCase, {class: Synonym, ignoreCase: true, analyzer: WhitespaceAnalyzer, synonyms: [{input: 'domain name service', output: 'dns', replace: false}]}]}}");
+    assertEquals("domain/0 dns/0:3 name/1 service/2 is/3 complex/4", tokensAndPositions());
+
+    send("analyze", "{text: 'mother knows best', analyzer: {tokenizer: Whitespace, tokenFilters: [LowerCase, {class: Synonym, ignoreCase: true, analyzer: WhitespaceAnalyzer, synonyms: [{input: ['mother', 'mommy'], output: 'mom'}]}]}}");
+    assertEquals("mom/0 knows/1 best/2", tokensAndPositions());
+  }
+
+  String ONLY_WHITESPACE_RULES = "\\n!!forward;\\n" + 
+    "\\n" +
+    "$Whitespace = [\\\\p{Whitespace}];\\n" +
+    "$NonWhitespace = [\\\\P{Whitespace}];\\n" +
+    "$Letter = [\\\\p{Letter}];\\n" +
+    "$Number = [\\\\p{Number}];\\n" +
+    "# Default rule status is {0}=RBBI.WORD_NONE => not tokenized by ICUTokenizer\\n" +
+    "$Whitespace;\\n" +
+    "# Assign rule status {200}=RBBI.WORD_LETTER when the token contains a letter char\\n" +
+    "# Mapped to <ALPHANUM> token type by DefaultICUTokenizerConfig\\n" +
+    "$NonWhitespace* $Letter $NonWhitespace*   {200};\\n" +
+    "# Assign rule status {100}=RBBI.WORD_NUM when the token contains a numeric char\\n" +
+    "# Mapped to <NUM> token type by DefaultICUTokenizerConfig\\n" +
+    "$NonWhitespace* $Number $NonWhitespace*   {100};\\n" +
+    "# Assign rule status {1} (no RBBI equivalent) when the token contains neither a letter nor a numeric char\\n" +
+    "# Mapped to <OTHER> token type by DefaultICUTokenizerConfig\\n" +
+    "$NonWhitespace+   {1};";
+
+  public void testICUTokenizer() throws Exception {
+    send("analyze", "{text: 'domain-name service is complex', analyzer: {tokenizer: {class: ICU, rules: [{script: Latn, rules: \"" + ONLY_WHITESPACE_RULES + "\"}]}}}");
+    assertEquals("domain-name/0 service/1 is/2 complex/3", tokensAndPositions());
+  }
+
+  public void testSpanishLightStem() throws Exception {
+    send("analyze", "{text: 'las lomitas', analyzer: {tokenizer: Standard, tokenFilters: [SpanishLightStem]}}");
+    assertEquals("las/0 lomit/1", tokensAndPositions());
+  }
+
+  private String justTokens() {
+    StringBuilder sb = new StringBuilder();
+    for(Object _o : (JSONArray) lastResult.get("tokens")) {
+      JSONObject token = (JSONObject) _o;
+      if (sb.length() > 0) {
+        sb.append(' ');
+      }
+      sb.append(token.get("token"));
+    }
+    return sb.toString();
+  }
+
+  private String tokensAndPositions() {
+    StringBuilder sb = new StringBuilder();
+    for(Object _o : (JSONArray) lastResult.get("tokens")) {
+      JSONObject token = (JSONObject) _o;
+      if (sb.length() > 0) {
+        sb.append(' ');
+      }
+      sb.append(token.get("token"));
+      sb.append('/');
+      sb.append(token.get("position"));
+      int posLen = ((Integer) token.get("positionLength")).intValue();
+      if (posLen != 1) {
+        sb.append(':');
+        sb.append(posLen);
+      }
+    }
+
+    return sb.toString();
+  }
+
+  public void testCharFilter() throws Exception {
+    send("analyze", "{text: '<pre>here is some text</pre>', analyzer: {tokenizer: Whitespace}}");
+    assertEquals("<pre>here is some text</pre>", justTokens());
+
+    // With HTMLStripCharFilter the <pre> and </pre> are removed:
+    send("analyze", "{text: '<pre>here is some text</pre>', analyzer: {charFilters: [HTMLStrip], tokenizer: Whitespace}}");
+    assertEquals("here is some text", justTokens());
+  }
+
+  // nocommit test loading my custom analyzer using fully
+  // qualified class name...
+
+  // nocommit need testOffsetGap ... how...
+}

Added: lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestBlockJoinQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestBlockJoinQuery.java?rev=1578133&view=auto
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestBlockJoinQuery.java (added)
+++ lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestBlockJoinQuery.java Sun Mar 16 18:11:07 2014
@@ -0,0 +1,126 @@
+package org.apache.lucene.server;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+
+public class TestBlockJoinQuery extends ServerBaseTestCase {
+  
+  @BeforeClass
+  public static void initClass() throws Exception {
+    useDefaultIndex = true;
+    curIndexName = "index";
+    startServer();
+    createAndStartIndex();
+    registerFields();
+    commit();
+  }
+
+  @AfterClass
+  public static void fini() throws Exception {
+    shutdownServer();
+  }
+
+  private static void registerFields() throws Exception {
+    send("registerFields", "{fields: {docType: {type: atom}, name: {type: atom, store: true}, country: {type: atom, store: true, sort: true}, skill: {type: atom, store: true}, year: {type: int, store: true}}}");
+  }
+
+  private JSONObject getResume(String name, String country) {
+    JSONObject o = new JSONObject();
+    o.put("docType", "resume");
+    o.put("name", name);
+    o.put("country", country);
+    JSONObject o2 = new JSONObject();
+    o2.put("fields", o);
+    return o2;
+  }
+
+  private JSONObject getJob(String skill, int year) {
+    JSONObject o = new JSONObject();
+    o.put("skill", skill);
+    o.put("year", year);
+    JSONObject o2 = new JSONObject();
+    o2.put("fields", o);
+    return o2;
+  }
+
+  public void testToParentBlockJoin() throws Exception {
+    deleteAllDocs();
+
+    JSONObject o = new JSONObject();
+    o.put("indexName", "index");
+    o.put("parent", getResume("Lisa", "United Kingdom"));
+    JSONArray arr = new JSONArray();
+    o.put("children", arr);
+    arr.add(getJob("java", 2007));
+    arr.add(getJob("python", 2010));
+    send("addDocuments", o);
+
+    // search on parent:
+    send("search", "{query: {class: ToParentBlockJoinQuery, childQuery: {class: text, field: skill, text: python}, parentsFilter: {class: CachingWrapperFilter, filter: {class: QueryWrapperFilter, query: {class: TermQuery, field: docType, term: resume}}}}}");
+    //System.out.println("GOT: " + result);
+    assertEquals(1, getInt("totalHits"));
+
+    // Returns child docs grouped up to parent doc:
+    send("search", "{retrieveFields: [skill, year, name, country], query: {class: ToParentBlockJoinQuery, childHits: {}, childQuery: {class: text, field: skill, text: python}, parentsFilter: {class: CachingWrapperFilter, filter: {class: QueryWrapperFilter, query: {class: TermQuery, field: docType, term: resume}}}}}");
+
+    assertEquals(1, getInt("totalGroupCount"));
+    // Grouping from a BJQ does not set totalHits:
+    assertEquals(0, getInt("totalHits"));
+
+    assertEquals(1, getInt("groups.length"));
+    assertEquals(1, getInt("groups[0].hits.length"));
+    assertEquals("Lisa", getString("groups[0].fields.name"));
+    assertEquals("United Kingdom", getString("groups[0].fields.country"));
+    assertEquals("python", getString("groups[0].hits[0].fields.skill"));
+    assertEquals(2010, getInt("groups[0].hits[0].fields.year"));
+
+    // Sort by country
+    send("search", "{retrieveFields: [skill, year, name, country], query: {class: ToParentBlockJoinQuery, childHits: {sort: [{field: country}]}, childQuery: {class: text, field: skill, text: python}, parentsFilter: {class: CachingWrapperFilter, filter: {class: QueryWrapperFilter, query: {class: TermQuery, field: docType, term: resume}}}}}");
+  }
+
+  public void testToParentBlockJoinWithExpressions() throws Exception {
+    deleteAllDocs();
+
+    JSONObject o = new JSONObject();
+    o.put("indexName", "index");
+    o.put("parent", getResume("Lisa", "United Kingdom"));
+    JSONArray arr = new JSONArray();
+    o.put("children", arr);
+    arr.add(getJob("java", 2007));
+    arr.add(getJob("python", 2010));
+    send("addDocuments", o);
+
+    // search on parent:
+    send("search",
+         "{virtualFields: [{name: origScore, expression: '_score'}]," +
+          "sort: {fields: [{field: origScore}]}," +
+          "retrieveFields: [origScore]," +
+          "query: {class: ToParentBlockJoinQuery, childHits: {}," +
+                  "scoreMode: Max," +
+                  "childQuery: {class: text, field: skill, text: python}," +
+                  "parentsFilter: {class: CachingWrapperFilter, filter: {class: QueryWrapperFilter, query: {class: TermQuery, field: docType, term: resume}}}}}");
+    //System.out.println("GOT: " + prettyPrint(lastResult));
+    assertEquals(1, getInt("totalGroupedHits"));
+    double childScore = getFloat("groups[0].hits[0].fields.origScore");
+    assertEquals(childScore, getFloat("groups[0].fields.origScore"), 0.0);
+  }
+}

Added: lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestBooleanFieldType.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestBooleanFieldType.java?rev=1578133&view=auto
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestBooleanFieldType.java (added)
+++ lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestBooleanFieldType.java Sun Mar 16 18:11:07 2014
@@ -0,0 +1,70 @@
+package org.apache.lucene.server;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import net.minidev.json.JSONObject;
+
+public class TestBooleanFieldType extends ServerBaseTestCase {
+
+  @BeforeClass
+  public static void initClass() throws Exception {
+    useDefaultIndex = true;
+    curIndexName = "index";
+    startServer();
+    createAndStartIndex();
+    registerFields();
+    commit();
+  }
+
+  @AfterClass
+  public static void fini() throws Exception {
+    shutdownServer();
+  }
+
+  private static void registerFields() throws Exception {
+    JSONObject o = new JSONObject();
+    put(o, "id", "{type: int, store: true, postingsFormat: Memory}");
+    put(o, "flagStored", "{type: boolean, store: true, search: false}");
+    put(o, "flagIndexed", "{type: boolean, store: false, search: true}");
+    JSONObject o2 = new JSONObject();
+    o2.put("indexName", "index");
+    o2.put("fields", o);
+    send("registerFields", o2);
+  }
+
+  public void testStored() throws Exception {
+    deleteAllDocs();
+    send("addDocument", "{fields: {id: 0, flagStored: false}}");
+    long gen = getLong(send("addDocument", "{fields: {id: 1, flagStored: true}}"), "indexGen");
+    JSONObject o = send("search", "{searcher: {indexGen: " + gen + "}, query: MatchAllDocsQuery, retrieveFields: [id, flagStored]}");
+    assertEquals(2, getInt(o, "totalHits"));
+    assertFalse(getBoolean(o, "hits[0].fields.flagStored"));
+    assertTrue(getBoolean(o, "hits[1].fields.flagStored"));
+  }
+
+  public void testIndexed() throws Exception {
+    deleteAllDocs();
+    send("addDocument", "{fields: {id: 0, flagIndexed: false, flagStored: false}}");
+    long gen = getLong(send("addDocument", "{fields: {id: 1, flagIndexed: true, flagStored: true}}"), "indexGen");
+    JSONObject o = send("search", "{searcher: {indexGen: " + gen + "}, query: MatchAllDocsQuery, filter: {class: BooleanFieldFilter, field: flagIndexed}, retrieveFields: [id, flagStored]}");
+    assertEquals(1, getInt(o, "totalHits"));
+    assertTrue(getBoolean(o, "hits[0].fields.flagStored"));
+  }
+}

Added: lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestCommitUserData.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestCommitUserData.java?rev=1578133&view=auto
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestCommitUserData.java (added)
+++ lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestCommitUserData.java Sun Mar 16 18:11:07 2014
@@ -0,0 +1,51 @@
+package org.apache.lucene.server;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+public class TestCommitUserData extends ServerBaseTestCase {
+  @BeforeClass
+  public static void initClass() throws Exception {
+    useDefaultIndex = true;
+    curIndexName = "index";
+    startServer();
+    createAndStartIndex();
+  }
+
+  @AfterClass
+  public static void fini() throws Exception {
+    shutdownServer();
+  }
+
+  /** Make sure we can set commit data even when there are
+   *  not docs */
+  public void testEmpty() throws Exception {
+    send("setCommitUserData", "{userData: {a: c, b: d}}");
+    send("getCommitUserData");
+    assertEquals("c", getString("a"));
+    assertEquals("d", getString("b"));
+    shutdownServer();
+    startServer();
+    send("startIndex");
+    send("getCommitUserData");
+    assertEquals("c", getString("a"));
+    assertEquals("d", getString("b"));
+  }
+}

Added: lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestCustomDirectory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestCustomDirectory.java?rev=1578133&view=auto
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestCustomDirectory.java (added)
+++ lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestCustomDirectory.java Sun Mar 16 18:11:07 2014
@@ -0,0 +1,72 @@
+package org.apache.lucene.server;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.Locale;
+import java.util.Set;
+
+import org.apache.lucene.store.MMapDirectory;
+import org.apache.lucene.util.TestUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import net.minidev.json.JSONObject;
+
+public class TestCustomDirectory extends ServerBaseTestCase {
+
+  @BeforeClass
+  public static void initClass() throws Exception {
+    useDefaultIndex = false;
+    startServer();
+  }
+
+  @AfterClass
+  public static void fini() throws Exception {
+    shutdownServer();
+  }
+
+  private static boolean iWasUsed;
+
+  public static class MyDirectory extends MMapDirectory {
+    public MyDirectory(File path) throws IOException {
+      super(path);
+      iWasUsed = true;
+    }
+  }
+
+  public void testCustomDirectory() throws Exception {
+    curIndexName = "index";
+    TestUtil.rmDir(new File("index"));
+    send("createIndex", "{rootDir: " + curIndexName + "}");
+    send("settings", "{directory: org.apache.lucene.server.TestCustomDirectory$MyDirectory, matchVersion: LUCENE_40}");
+    send("startIndex");
+    send("stopIndex");
+    send("deleteIndex");
+    assertTrue(iWasUsed);
+  }
+
+  public void testInvalidDirectory() throws Exception {
+    curIndexName = "index";
+    TestUtil.rmDir(new File("index"));
+    send("createIndex", "{rootDir: " + curIndexName + "}");
+    assertFailsWith("settings", "{directory: bad}", "could not locate Directory sub-class \"bad\"; verify CLASSPATH");
+    send("deleteIndex");
+  }
+}

Added: lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestFacets.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestFacets.java?rev=1578133&view=auto
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestFacets.java (added)
+++ lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestFacets.java Sun Mar 16 18:11:07 2014
@@ -0,0 +1,350 @@
+package org.apache.lucene.server;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Locale;
+
+import org.apache.lucene.util.TestUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+
+public class TestFacets extends ServerBaseTestCase {
+
+  @BeforeClass
+  public static void initClass() throws Exception {
+    useDefaultIndex = true;
+    curIndexName = "index";
+    startServer();
+    createAndStartIndex();
+    registerFields();
+    commit();
+  }
+
+  @AfterClass
+  public static void fini() throws Exception {
+    shutdownServer();
+  }
+
+  static String indexFacetField;
+
+  // nocommit need test showing how to change the DVF for
+  // the "underlying" facet index field ($facets by default)
+
+  private static void registerFields() throws Exception {
+    JSONObject o = new JSONObject();
+    put(o, "body", "{type: text, highlight: true, store: true, analyzer: {class: StandardAnalyzer, matchVersion: LUCENE_43}, similarity: {class: BM25Similarity, b: 0.15}}");
+    put(o, "price", "{type: float, sort: true, search: true, store: true}");
+    put(o, "longField", "{type: long, search: true, facet: numericRange}");
+    put(o, "doubleField", "{type: double, search: true, facet: numericRange}");
+    put(o, "floatField", "{type: float, search: true, facet: numericRange}");
+    put(o, "id", "{type: int, store: true, postingsFormat: Memory}");
+    put(o, "date", "{type: atom, search: false, store: true}");
+    if (random().nextBoolean()) {
+      // Send facets to two different random fields:
+      String name = "x" + TestUtil.randomSimpleString(random(), 1, 10);
+      put(o, "dateFacet", "{type: atom, search: false, store: false, facet: hierarchy, facetIndexFieldName: " + name + "}");
+      if (VERBOSE) {
+        System.out.println("NOTE: send dateFacet to facetIndexFieldName=" + name);
+      }
+      name = "y" + TestUtil.randomSimpleString(random(), 1, 10);
+      put(o, "author", "{type: text, search: false, facet: flat, group: true, facetIndexFieldName: " + name + "}");
+      if (VERBOSE) {
+        System.out.println("NOTE: send author to facetIndexFieldName=" + name);
+      }
+
+    } else if (random().nextBoolean()) {
+      // Send facets to the same random field:
+      indexFacetField = "x" + TestUtil.randomSimpleString(random(), 1, 10);
+      put(o, "dateFacet", "{type: atom, search: false, store: false, facet: hierarchy, facetIndexFieldName: " + indexFacetField + "}");
+      put(o, "author", "{type: text, search: false, facet: flat, group: true, facetIndexFieldName: " + indexFacetField + "}");
+      if (VERBOSE) {
+        System.out.println("NOTE: send dateFacet to facetIndexFieldName=" + indexFacetField);
+        System.out.println("NOTE: send author to facetIndexFieldName=" + indexFacetField);
+      }
+    } else {
+      // Use default $facets field:
+      put(o, "dateFacet", "{type: atom, search: false, store: false, facet: hierarchy}");
+      put(o, "author", "{type: text, search: false, facet: flat, group: true}");
+    }
+    JSONObject o2 = new JSONObject();
+    o2.put("indexName", "index");
+    o2.put("fields", o);
+    send("registerFields", o2);
+  }
+
+  // Returns gen for the added document
+  private long addDocument(int id, String author, String body, float price, String date) throws Exception {
+    JSONObject o = new JSONObject();
+    o.put("body", body);
+    o.put("author", author);
+    o.put("price", price);
+    o.put("id", id);
+    o.put("date", date);
+    JSONArray path = new JSONArray();
+    o.put("dateFacet", path);
+    for(String part : date.split("/")) {
+      path.add(part);
+    }
+
+    JSONObject o2 = new JSONObject();
+    o2.put("indexName", "index");
+    o2.put("fields", o);
+    JSONObject result = send("addDocument", o2);
+    return getLong(result, "indexGen");
+  }
+
+  private JSONObject search(String query, long indexGen, String sortField, boolean reversed, boolean snippets, String groupField, String groupSortField) throws Exception {
+    JSONObject o = new JSONObject();
+    o.put("indexName", "index");
+    o.put("queryText", query);
+    if (indexGen != -1) {
+      JSONObject o2 = new JSONObject();
+      o.put("searcher", o2);
+      o2.put("indexGen", indexGen);
+    }
+
+    if (sortField != null) {
+      JSONObject sort = new JSONObject();
+      o.put("sort", sort);
+      sort.put("doDocScores", true);
+
+      JSONArray sortFields = new JSONArray();
+      sort.put("fields", sortFields);
+
+      JSONObject o2 = new JSONObject();
+      sortFields.add(o2);
+
+      o2.put("field", sortField);
+      o2.put("reverse", reversed);
+    }
+
+    if (groupField != null) {
+      String s = "{field: '" + groupField + "'";
+      if (groupSortField != null) {
+        s += ", sort: [{field: '" + groupSortField + "'}]";
+      }
+      s += "}";
+      put(o, "grouping", s);
+    }
+
+    put(o, "facets", "[{dim: dateFacet, topN: 10}]");
+    put(o, "retrieveFields", "[id, date, price, {field: body, highlight: " + (snippets ? "snippets" : "whole") + "}]");
+
+    return send("search", o);
+  }
+
+  public void testFacets() throws Exception {
+    deleteAllDocs();
+    addDocument(0, "Bob", "this is a test", 10.99f, "2012/10/17");
+    addDocument(1, "Lisa", "this is a another test", 11.99f, "2012/10/1");
+    addDocument(2, "Frank", "this is a third test", 12.99f, "2010/10/1");
+    search("test", -1, "price", false, true, null, null);
+    assertEquals(3, getInt("totalHits"));
+    assertEquals(3, getInt("hits.length"));
+
+    assertEquals(0, getInt("hits[0].fields.id"));
+    assertEquals("2012/10/17", getString("hits[0].fields.date"));
+
+    assertEquals(1, getInt("hits[1].fields.id"));
+    assertEquals("2012/10/1", getString("hits[1].fields.date"));
+
+    assertEquals(2, getInt("hits[2].fields.id"));
+    assertEquals("2010/10/1", getString("hits[2].fields.date"));
+
+    assertEquals("top: 3, 2012: 2, 2010: 1", formatFacetCounts(getObject("facets[0]")));
+  }    
+
+  public void testFacetsReopen() throws Exception {
+    deleteAllDocs();
+    addDocument(0, "Bob", "this is a test", 10.99f, "2012/10/17");
+    addDocument(1, "Lisa", "this is a another test", 11.99f, "2012/10/1");
+    commit();
+
+    addDocument(2, "Frank", "this is a third test", 12.99f, "2010/10/1");
+    search("test", -1, "price", false, true, null, null);
+    assertEquals(3, getInt("totalHits"));
+    assertEquals(3, getInt("hits.length"));
+
+    assertEquals(0, getInt("hits[0].fields.id"));
+    assertEquals("2012/10/17", getString("hits[0].fields.date"));
+
+    assertEquals(1, getInt("hits[1].fields.id"));
+    assertEquals("2012/10/1", getString("hits[1].fields.date"));
+
+    assertEquals(2, getInt("hits[2].fields.id"));
+    assertEquals("2010/10/1", getString("hits[2].fields.date"));
+
+    assertEquals("top: 3, 2012: 2, 2010: 1", formatFacetCounts(getObject("facets[0]")));
+  }    
+
+  public void testDrillSideways() throws Exception {
+    deleteAllDocs();
+    send("addDocument", "{fields: {author: Bob}}");
+    send("addDocument", "{fields: {author: Lisa}}");
+    send("addDocument", "{fields: {author: Lisa}}");
+    send("addDocument", "{fields: {author: Tom}}");
+    send("addDocument", "{fields: {author: Tom}}");
+    send("addDocument", "{fields: {author: Tom}}");
+
+    // Initial query:
+    send("search", "{query: MatchAllDocsQuery, facets: [{dim: author, topN: 10}]}");
+    assertEquals(6, getInt("totalHits"));
+    assertEquals("top: 6, Tom: 3, Lisa: 2, Bob: 1", formatFacetCounts(getObject("facets[0]")));
+
+    // Now, single drill down:
+    send("search", "{drillDowns: [{field: author, value: Bob}], query: MatchAllDocsQuery, facets: [{dim: author, topN: 10}]}");
+    assertEquals(1, getInt("totalHits"));
+    assertEquals("top: 6, Tom: 3, Lisa: 2, Bob: 1", formatFacetCounts(getObject("facets[0]")));
+
+    // Multi (OR'd) drill down:
+    send("search", "{drillDowns: [{field: author, value: Bob}, {field: author, value: Lisa}], query: MatchAllDocsQuery, facets: [{dim: author, topN: 10}]}");
+    assertEquals(3, getInt("totalHits"));
+    assertEquals("top: 6, Tom: 3, Lisa: 2, Bob: 1", formatFacetCounts(getObject("facets[0]")));
+  }
+
+  public void testCustomLabels() throws Exception {
+    deleteAllDocs();
+    send("addDocument", "{fields: {author: Bob}}");
+    send("addDocument", "{fields: {author: Lisa}}");
+    send("addDocument", "{fields: {author: Lisa}}");
+    send("addDocument", "{fields: {author: Tom}}");
+    send("addDocument", "{fields: {author: Tom}}");
+    send("addDocument", "{fields: {author: Tom}}");
+
+    send("search", "{query: MatchAllDocsQuery, facets: [{dim: author, labels: [Bob, Lisa, Tom]}]}");
+    assertEquals("top: -1, Bob: 1, Lisa: 2, Tom: 3", formatFacetCounts(getObject("facets[0]")));
+  }
+
+  public void testLongRangeFacets() throws Exception {
+    deleteAllDocs();    
+    for(int i=0;i<100;i++) {
+      send("addDocument", "{fields: {longField: " + i + "}}");
+    }
+    send("search", "{facets: [{dim: longField, numericRanges: [{label: All, min: 0, max: 99, minInclusive: true, maxInclusive: true}, {label: Half, min: 0, max: 49, minInclusive: true, maxInclusive: true}]}]}");
+    assertEquals("top: 100, All: 100, Half: 50", formatFacetCounts(getObject("facets[0]")));
+
+    send("search", "{drillDowns: [{field: longField, numericRange: {label: Half, min: 0, max: 49, minInclusive: true, maxInclusive: true}}], facets: [{dim: longField, numericRanges: [{label: All, min: 0, max: 99, minInclusive: true, maxInclusive: true}, {label: Half, min: 0, max: 49, minInclusive: true, maxInclusive: true}]}]}");
+    assertEquals("top: 100, All: 100, Half: 50", formatFacetCounts(getObject("facets[0]")));
+    assertEquals(50, getInt("totalHits"));
+  }
+
+  public void testDoubleRangeFacets() throws Exception {
+    deleteAllDocs();    
+    for(int i=0;i<100;i++) {
+      send("addDocument", "{fields: {doubleField: " + i + "}}");
+    }
+    send("search", "{facets: [{dim: doubleField, numericRanges: [{label: All, min: 0, max: 99, minInclusive: true, maxInclusive: true}, {label: Half, min: 0, max: 49, minInclusive: true, maxInclusive: true}]}]}");
+    assertEquals("top: 100, All: 100, Half: 50", formatFacetCounts(getObject("facets[0]")));
+
+    send("search", "{drillDowns: [{field: doubleField, numericRange: {label: Half, min: 0, max: 49, minInclusive: true, maxInclusive: true}}], facets: [{dim: doubleField, numericRanges: [{label: All, min: 0, max: 99, minInclusive: true, maxInclusive: true}, {label: Half, min: 0, max: 49, minInclusive: true, maxInclusive: true}]}]}");
+    assertEquals("top: 100, All: 100, Half: 50", formatFacetCounts(getObject("facets[0]")));
+    assertEquals(50, getInt("totalHits"));
+  }
+
+  // nocommit fails ... we need to add FloatRangeFacetCounts
+  // to lucene???
+
+  /*
+  public void testFloatRangeFacets() throws Exception {
+    deleteAllDocs();    
+    long gen = -1;
+    for(int i=0;i<100;i++) {
+      gen = getLong(send("addDocument", "{fields: {floatField: " + i + "}}"), "indexGen");
+    }
+    JSONObject o = send("search", "{facets: [{dim: floatField, numericRanges: [{label: All, min: 0, max: 99, minInclusive: true, maxInclusive: true}, {label: Half, min: 0, max: 49, minInclusive: true, maxInclusive: true}]}], searcher: {indexGen: " + gen + "}}");
+    System.out.println("got" + get(o, "facets[0]"));
+    assertEquals("All", getString(o, "facets[0].counts[1][0]"));
+    assertEquals(100, getInt(o, "facets[0].counts[1][1]"));
+    assertEquals("Half", getString(o, "facets[0].counts[2][0]"));
+    assertEquals(50, getInt(o, "facets[0].counts[2][1]"));
+  }
+  */
+
+  public void testSortedSetDocValuesFacets() throws Exception {
+    curIndexName = "ssdvFacets";
+    TestUtil.rmDir(new File(curIndexName));
+    send("createIndex", "{rootDir: " + curIndexName + "}");
+    send("settings", "{directory: FSDirectory, matchVersion: LUCENE_46}");
+    send("startIndex");
+
+    if (indexFacetField != null && random().nextBoolean()) {
+      // Send SSDV facets to same field as the taxo facets:
+      send("registerFields", "{fields: {ssdv: {type: atom, search: false, store: false, facet: sortedSetDocValues, facetIndexFieldName: " + indexFacetField + "}}}");
+    } else if (random().nextBoolean()) {
+      // Send SSDV facets to a random index field:
+      String name = TestUtil.randomSimpleString(random(), 1, 10);
+      send("registerFields", "{fields: {ssdv: {type: atom, search: false, store: false, facet: sortedSetDocValues, facetIndexFieldName: " + name + "}}}");
+    } else {
+      // Send SSDV facets to default field:
+      send("registerFields", "{fields: {ssdv: {type: atom, search: false, store: false, facet: sortedSetDocValues}}}");
+    }
+
+    // Verify error message:
+    try {
+      send("search", "{query: MatchAllDocsQuery, facets: [{dim: ssdv}]}");
+      fail("did not hit expected exception");
+    } catch (IOException ioe) {
+      // nocommit we could/should make this NOT be an error?
+      // you should just get back empty facets?
+      assertTrue(ioe.getMessage().contains("search > facets: field \"ssdv\" was properly registered with facet=\"sortedSetDocValues\", however no documents were indexed as of this searcher"));
+    }
+
+    send("addDocument", "{fields: {ssdv: one}}");
+    send("addDocument", "{fields: {ssdv: two}}");
+    send("commit");
+    send("addDocument", "{fields: {ssdv: two}}");
+    send("addDocument", "{fields: {ssdv: three}}");
+    send("commit");
+    send("addDocument", "{fields: {ssdv: one}}");
+    send("addDocument", "{fields: {ssdv: one}}");
+
+    for(int i=0;i<2;i++) {
+      // nocommit if i remove indexGen from here, the error
+      // message is bad: it says "each element in the array
+      // my have these params:..." when it shouldn't
+      send("search", "{query: MatchAllDocsQuery, facets: [{dim: ssdv}]}");
+      assertEquals(6, getInt("totalHits"));
+      assertEquals("top: 6, one: 3, two: 2, three: 1", formatFacetCounts(getObject("facets[0]")));
+
+      // Make sure suggest survives server restart:    
+      shutdownServer();
+      startServer();
+      send("startIndex");
+    }
+  }
+
+  public static String formatFacetCounts(JSONObject facets) {
+    StringBuilder sb = new StringBuilder();
+    JSONArray arr = getArray(facets, "counts");
+    for(Object o : arr) {
+      JSONArray facet = (JSONArray) o;
+      sb.append(facet.get(0));
+      sb.append(": ");
+      sb.append(facet.get(1));
+      sb.append(", ");
+    }
+    String s = sb.toString();
+    // remove last ', ':
+    return s.substring(0, s.length()-2);
+  }
+}
+

Added: lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestGrouping.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestGrouping.java?rev=1578133&view=auto
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestGrouping.java (added)
+++ lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestGrouping.java Sun Mar 16 18:11:07 2014
@@ -0,0 +1,257 @@
+package org.apache.lucene.server;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+
+public class TestGrouping extends ServerBaseTestCase {
+
+  @BeforeClass
+  public static void initClass() throws Exception {
+    useDefaultIndex = true;
+    curIndexName = "index";
+    startServer();
+    createAndStartIndex();
+    registerFields();
+    commit();
+  }
+
+  @AfterClass
+  public static void fini() throws Exception {
+    shutdownServer();
+  }
+
+  private static void registerFields() throws Exception {
+    JSONObject o = new JSONObject();
+    put(o, "body", "{type: text, highlight: true, store: true, analyzer: {class: StandardAnalyzer, matchVersion: LUCENE_43}, similarity: {class: BM25Similarity, b: 0.15}}");
+    put(o, "price", "{type: float, sort: true, search: true, store: true}");
+    put(o, "id", "{type: int, store: true, postingsFormat: Memory}");
+    put(o, "date", "{type: atom, search: false, store: true}");
+    put(o, "dateFacet", "{type: atom, search: false, store: false, facet: hierarchy}");
+    put(o, "author", "{type: text, search: false, facet: flat, group: true}");
+    JSONObject o2 = new JSONObject();
+    o2.put("fields", o);
+    o2.put("indexName", "index");
+    send("registerFields", o2);
+  }
+
+  // Returns gen for the added document
+  private long addDocument(int id, String author, String body, float price, String date) throws Exception {
+    JSONObject o = new JSONObject();
+    o.put("body", body);
+    o.put("author", author);
+    o.put("price", price);
+    o.put("id", id);
+    o.put("date", date);
+    JSONArray path = new JSONArray();
+    o.put("dateFacet", path);
+    for(String part : date.split("/")) {
+      path.add(part);
+    }
+
+    JSONObject o2 = new JSONObject();
+    o2.put("fields", o);
+    o2.put("indexName", "index");
+    JSONObject result = send("addDocument", o2);
+    return getLong(result, "indexGen");
+  }
+
+  public void testGrouping() throws Exception {
+    deleteAllDocs();
+    addDocument(0, "Lisa", "this is a test.  here is a random sentence.  here is another sentence with test in it.", 10.99f, "2012/10/17");
+    addDocument(0, "Tom", "this is a test.  here is another sentence with test in it.", 10.99f, "2012/10/17");
+    addDocument(0, "Lisa", "this is a test.  this sentence has test twice test.", 10.99f, "2012/10/17");
+    long gen = addDocument(0, "Bob", "this is a test.", 10.99f, "2012/10/17");
+
+    JSONObject o2 = search("test", gen, null, false, false, "author", null);
+    assertEquals(4, ((Number) o2.get("totalHits")).intValue());
+    assertEquals(4, ((Number) o2.get("totalGroupedHits")).intValue());
+    JSONArray a = (JSONArray) o2.get("groups");
+    assertEquals(3, a.size());
+
+    assertEquals("Lisa", ((JSONObject) a.get(0)).get("groupValue"));
+    assertEquals(2, ((Number)((JSONObject) a.get(0)).get("totalHits")).intValue());
+
+    assertEquals("Tom", ((JSONObject) a.get(1)).get("groupValue"));
+    assertEquals(1, ((Number)((JSONObject) a.get(1)).get("totalHits")).intValue());
+
+    assertEquals("Bob", ((JSONObject) a.get(2)).get("groupValue"));
+    assertEquals(1, ((Number)((JSONObject) a.get(2)).get("totalHits")).intValue());
+
+    // Should be this:
+    /*
+{
+    "facets": [
+        {
+            "2012": 4
+        }
+    ],
+    "groups": [
+        {
+            "groupSortFields": {
+                "<score>": 0.7768564
+            },
+            "groupValue": "Bob",
+            "hits": [
+                {
+                    "doc": 3,
+                    "fields": {
+                        "body": "this is a <b>test</b>.",
+                        "date": "2012/10/17",
+                        "id": "0",
+                        "price": "10.99"
+                    },
+                    "score": 0.7768564
+                }
+            ],
+            "maxScore": 0.7768564,
+            "totalHits": 1
+        },
+        {
+            "groupSortFields": {
+                "<score>": 0.50458306
+            },
+            "groupValue": "Lisa",
+            "hits": [
+                {
+                    "doc": 2,
+                    "fields": {
+                        "body": "this is a <b>test</b>.  this sentence has <b>test</b> twice <b>test</b>.",
+                        "date": "2012/10/17",
+                        "id": "0",
+                        "price": "10.99"
+                    },
+                    "score": 0.50458306
+                },
+                {
+                    "doc": 0,
+                    "fields": {
+                        "body": "this is a <b>test</b>.  here is a random sentence.  here is another sentence with <b>test</b> in it.",
+                        "date": "2012/10/17",
+                        "id": "0",
+                        "price": "10.99"
+                    },
+                    "score": 0.3433253
+                }
+            ],
+            "maxScore": 0.50458306,
+            "totalHits": 2
+        },
+        {
+            "groupSortFields": {
+                "<score>": 0.4806554
+            },
+            "groupValue": "Tom",
+            "hits": [
+                {
+                    "doc": 1,
+                    "fields": {
+                        "body": "this is a <b>test</b>.  here is another sentence with <b>test</b> in it.",
+                        "date": "2012/10/17",
+                        "id": "0",
+                        "price": "10.99"
+                    },
+                    "score": 0.4806554
+                }
+            ],
+            "maxScore": 0.4806554,
+            "totalHits": 1
+        }
+    ],
+    "maxScore": 0.7768564,
+    "searchState": {
+        "lastDocID": 1,
+        "searcher": 25
+    },
+    "totalGroupedHits": 4,
+    "totalHits": 4
+}
+    */
+  }
+
+  private JSONObject search(String query, long indexGen, String sortField, boolean reversed, boolean snippets, String groupField, String groupSortField) throws Exception {
+    JSONObject o = new JSONObject();
+    o.put("indexName", "index");
+    o.put("queryText", query);
+    if (indexGen != -1) {
+      JSONObject o2 = new JSONObject();
+      o.put("searcher", o2);
+      o2.put("indexGen", indexGen);
+    }
+
+    if (sortField != null) {
+      JSONObject sort = new JSONObject();
+      o.put("sort", sort);
+      if (groupField == null) {
+        sort.put("doDocScores", true);
+      }
+
+      JSONArray sortFields = new JSONArray();
+      sort.put("fields", sortFields);
+
+      JSONObject o2 = new JSONObject();
+      sortFields.add(o2);
+
+      o2.put("field", sortField);
+      o2.put("reverse", reversed);
+    }
+
+    if (groupField != null) {
+      String s = "{field: '" + groupField + "'";
+      if (groupSortField != null) {
+        s += ", sort: [{field: '" + groupSortField + "'}]";
+      }
+      s += "}";
+      put(o, "grouping", s);
+    }
+
+    put(o, "facets", "[{dim: 'dateFacet', topN: 10}]");
+    put(o, "retrieveFields", "['id', 'date', 'price', {field: 'body', highlight: " + (snippets ? "snippets" : "whole") + "}]");
+
+    return send("search", o);
+  }
+
+  public void testGroupingWithGroupSort() throws Exception {
+    deleteAllDocs();
+    addDocument(0, "Lisa", "this is a test.  here is a random sentence.  here is another sentence with test in it.", 5.99f, "2010/10/17");
+    addDocument(0, "Tom", "this is a test.  here is another sentence with test in it.", 11.99f, "2011/10/17");
+    addDocument(0, "Lisa", "this is a test.  this sentence has test twice test.", 1.99f, "2012/10/17");
+    long gen = addDocument(0, "Bob", "this is a test.", 7.99f, "2013/10/17");
+
+    JSONObject o2 = search("test", gen, "price", false, false, "author", "price");
+    assertEquals(4, ((Number) o2.get("totalHits")).intValue());
+    assertEquals(4, ((Number) o2.get("totalGroupedHits")).intValue());
+    JSONArray a = (JSONArray) o2.get("groups");
+    assertEquals(3, a.size());
+
+    assertEquals("Lisa", ((JSONObject) a.get(0)).get("groupValue"));
+    assertEquals(2, ((Number)((JSONObject) a.get(0)).get("totalHits")).intValue());
+    assertNull(((JSONObject) a.get(0)).get("maxScore"));
+
+    assertEquals("Bob", ((JSONObject) a.get(1)).get("groupValue"));
+    assertEquals(1, ((Number)((JSONObject) a.get(1)).get("totalHits")).intValue());
+    assertNull(((JSONObject) a.get(1)).get("maxScore"));
+
+    assertEquals("Tom", ((JSONObject) a.get(2)).get("groupValue"));
+    assertEquals(1, ((Number)((JSONObject) a.get(2)).get("totalHits")).intValue());
+    assertNull(((JSONObject) a.get(2)).get("maxScore"));
+  }
+}

Added: lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestHighlight.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestHighlight.java?rev=1578133&view=auto
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestHighlight.java (added)
+++ lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestHighlight.java Sun Mar 16 18:11:07 2014
@@ -0,0 +1,240 @@
+package org.apache.lucene.server;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+
+public class TestHighlight extends ServerBaseTestCase {
+
+  @BeforeClass
+  public static void initClass() throws Exception {
+    useDefaultIndex = true;
+    curIndexName = "index";
+    startServer();
+    createAndStartIndex();
+    registerFields();
+    commit();
+  }
+
+  @AfterClass
+  public static void fini() throws Exception {
+    shutdownServer();
+  }
+
+  private static void registerFields() throws Exception {
+    JSONObject o = new JSONObject();
+    put(o, "body", "{type: text, highlight: true, store: true, analyzer: {class: StandardAnalyzer, matchVersion: LUCENE_43}, similarity: {class: BM25Similarity, b: 0.15}}");
+    put(o, "price", "{type: float, sort: true, search: true, store: true}");
+    put(o, "id", "{type: int, store: true, postingsFormat: Memory}");
+    put(o, "date", "{type: atom, search: false, store: true}");
+    put(o, "dateFacet", "{type: atom, search: false, store: false, facet: hierarchy}");
+    put(o, "author", "{type: text, search: false, facet: flat, group: true}");
+    // Register multi-valued field:
+    put(o, "authors", "{type: text, highlight: true, facet: flat, multiValued: true, analyzer: {matchVersion: LUCENE_43, class: StandardAnalyzer}}");
+    JSONObject o2 = new JSONObject();
+    o2.put("fields", o);
+    o2.put("indexName", "index");
+
+    send("registerFields", o2);
+  }
+
+  // Returns gen for the added document
+  private long addDocument(int id, String author, String body, float price, String date) throws Exception {
+    JSONObject o = new JSONObject();
+    o.put("body", body);
+    o.put("author", author);
+    o.put("price", price);
+    o.put("id", id);
+    o.put("date", date);
+    JSONArray path = new JSONArray();
+    o.put("dateFacet", path);
+    for(String part : date.split("/")) {
+      path.add(part);
+    }
+
+    JSONObject o2 = new JSONObject();
+    o2.put("fields", o);
+    o2.put("indexName", "index");
+    JSONObject result = send("addDocument", o2);
+    return getLong(result, "indexGen");
+  }
+
+  private JSONObject search(String query, long indexGen, String sortField, boolean reversed, boolean snippets, String groupField, String groupSortField) throws Exception {
+    JSONObject o = new JSONObject();
+    o.put("indexName", "index");
+    o.put("queryText", query);
+    if (indexGen != -1) {
+      JSONObject o2 = new JSONObject();
+      o.put("searcher", o2);
+      o2.put("indexGen", indexGen);
+    }
+
+    if (sortField != null) {
+      JSONObject sort = new JSONObject();
+      o.put("sort", sort);
+      sort.put("doDocScores", true);
+
+      JSONArray sortFields = new JSONArray();
+      sort.put("fields", sortFields);
+
+      JSONObject o2 = new JSONObject();
+      sortFields.add(o2);
+
+      o2.put("field", sortField);
+      o2.put("reverse", reversed);
+    }
+
+    if (groupField != null) {
+      String s = "{field: '" + groupField + "'";
+      if (groupSortField != null) {
+        s += ", sort: [{field: '" + groupSortField + "'}]";
+      }
+      s += "}";
+      put(o, "grouping", s);
+    }
+
+    put(o, "facets", "[{dim: 'dateFacet', topN: 10}]");
+    put(o, "retrieveFields", "['id', 'date', 'price', {field: 'body', highlight: " + (snippets ? "snippets" : "whole") + "}]");
+
+    return send("search", o);
+  }
+
+  public void testHighlightSnippet() throws Exception {
+    deleteAllDocs();
+    long gen = addDocument(0, "Melanie", "this is a test.  here is a random sentence.  here is another sentence with test in it.", 10.99f, "2012/10/17");
+    JSONObject o = search("test", gen, null, false, true, null, null);
+
+    assertEquals("this is a <b>test</b>.  ...here is another sentence with <b>test</b> in it.",
+                 renderHighlight(getArray(o, "hits[0].fields.body")));
+  }
+
+  // LUCENE-5415
+  public void testHighlightSnippetWildcard() throws Exception {
+    deleteAllDocs();
+    long gen = addDocument(0, "Melanie", "this is a test.  here is a random sentence.  here is another sentence with test in it.", 10.99f, "2012/10/17");
+    search("te*", gen, null, false, true, null, null);
+    JSONArray fragments = getArray("hits[0].fields.body");
+    assertEquals(2, fragments.size());
+    assertEquals("body:te*", getString(fragments, "[0].parts[1].term"));
+    assertEquals("body:te*", getString(fragments, "[1].parts[1].term"));
+  }
+
+  /** Highlight entire value as a single passage (eg good
+   *  for title fields). */
+  public void testWholeHighlight() throws Exception {
+    deleteAllDocs();
+    long gen = addDocument(0, "Lisa", "this is a test.  here is a random sentence.  here is another sentence with test in it.", 10.99f, "2012/10/17");
+    JSONObject o = search("test", gen, null, false, false, null, null);
+    assertEquals("this is a <b>test</b>.  here is a random sentence.  here is another sentence with <b>test</b> in it.",
+                 renderHighlight(getArray(o, "hits[0].fields.body")));
+  }
+
+  /** Make sure we can index a field with 3 values,
+   *  highlight it, and get back 3 values, each of them
+   *  separately highlighted (not a single value with the 3
+   *  values appended). */
+  public void testMultiValuedWholeHighlight() throws Exception {
+    deleteAllDocs();
+
+    long gen = addDocument("{fields: {authors: ['Dr. Seuss', 'Bob Smith', 'Seuss is Fun.  Some extra content.']}}");
+    JSONObject result = send("search", "{queryText: 'authors:seuss', retrieveFields: [{field: authors, highlight: whole}], searcher: {indexGen: " + gen + "}}");
+    assertEquals(1, getInt(result, "totalHits"));
+    JSONArray fields = getArray(result, "hits[0].fields.authors");
+    assertEquals(3, fields.size());
+    assertEquals("Dr. <b>Seuss</b>", renderSingleHighlight((JSONArray) fields.get(0)));
+    assertEquals("Bob Smith", renderSingleHighlight((JSONArray) fields.get(1)));
+    assertEquals("<b>Seuss</b> is Fun.  Some extra content.", renderSingleHighlight((JSONArray) fields.get(2)));
+  }
+
+  public void testMultiValuedSnippetHighlight() throws Exception {
+    deleteAllDocs();
+
+    long gen = addDocument("{fields: {authors: ['Dr. Seuss', 'Bob Smith', 'Seuss is Fun.  Some extra content.']}}");
+    JSONObject result = send("search", "{queryText: 'authors:seuss', retrieveFields: [{field: authors, highlight: snippets, maxPassages: 1}], searcher: {indexGen: " + gen + "}}");
+    assertEquals(1, getInt(result, "totalHits"));
+    assertEquals(1, getInt(result, "hits[0].fields.authors.length"));
+    assertEquals("<b>Seuss</b> Bob Smith <b>Seuss</b> is Fun.  ", renderSingleHighlight(getArray(result, "hits[0].fields.authors[0].parts")));
+  }
+  
+  /** Make sure we can use a different maxPassages per field */
+  public void testPerFieldMaxPassages() throws Exception {
+    deleteAllDocs();
+    long gen = addDocument("{fields: {body: 'This sentence has test.  This one does not.  Here is test again.', authors: ['This sentence has test.  This one does not.  Here is test again.']}}");
+    JSONObject result = send("search", "{queryText: 'test', retrieveFields: [{field: authors, highlight: snippets, maxPassages: 1}, {field: body, highlight: snippets, maxPassages: 2}], searcher: {indexGen: " + gen + "}}");
+    assertEquals(1, getInt(result, "totalHits"));
+
+    // Author has just 1 passage:
+    assertEquals(1, getInt(result, "hits[0].fields.authors.length"));
+    assertEquals("Here is <b>test</b> again.", renderHighlight(getArray(result, "hits[0].fields.authors")));
+
+    // Body has 2 passages:
+    assertEquals(2, getInt(result, "hits[0].fields.body.length"));
+    assertEquals("This sentence has <b>test</b>.  ...Here is <b>test</b> again.", renderHighlight(getArray(result, "hits[0].fields.body")));
+  }
+
+  /** We don't allow INFO_SEP (U+001F) to appear in
+   *  multi-valued highlight fields. */
+  public void testContentWithSep() throws Exception {
+    deleteAllDocs();
+    try {
+      addDocument("{fields: {authors: ['Dr. Seuss', 'Bob \u001F Smith', 'Seuss is Fun']}}");
+      fail("didn't hit exception");
+    } catch (IOException ioe) {
+      // expected
+    }
+  }
+
+  // nocommit fixme
+  /*
+  public void testNonDefaultOffsetGap() throws Exception {
+    // nocommit add test infra to create a randomly named new index?
+    _TestUtil.rmDir(new File("offsetgap"));
+    curIndexName = "offsetgap";
+    send("createIndex", "{rootDir: offsetgap}");
+    // Wait at most 1 msec for a searcher to reopen; this
+    // value is too low for a production site but for
+    // testing we want to minimize sleep time:
+    send("liveSettings", "{minRefreshSec: 0.001}");
+    send("startIndex", "{}");
+    JSONObject o = new JSONObject();
+
+    put(o, "body", "{type: text, multiValued: true, highlight: true, store: true, analyzer: {tokenizer: StandardTokenizer, offsetGap: 100}}");
+
+    JSONObject o2 = new JSONObject();
+    o2.put("fields", o);
+    send("registerFields", o2);
+
+    // Index one document:
+    long indexGen = getLong(send("addDocument", "{fields: {body: ['highlight me', 'highlight me too']}}"), "indexGen");
+
+    // Search w/ highlight:
+    JSONObject result = send("search", "{queryText: highlight, retrieveFields: [{field: 'body', highlight: 'whole'}]}");
+
+    JSONArray parts = getArray(result, "hits[0].fields.body");
+    assertEquals(2, parts.size());
+    assertEquals("<b>highlight</b> me", renderSingleHighlight(getArray(parts, 0)));
+    // nocommit this fails when offsetGap != 1 ... debug!
+    //assertEquals("<b>highlight</b> me too", renderSingleHighlight(getArray(parts, 1)));
+  }
+  */
+}
+

Added: lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestIndexing.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestIndexing.java?rev=1578133&view=auto
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestIndexing.java (added)
+++ lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestIndexing.java Sun Mar 16 18:11:07 2014
@@ -0,0 +1,396 @@
+package org.apache.lucene.server;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Locale;
+
+import org.apache.lucene.util.TestUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+
+public class TestIndexing extends ServerBaseTestCase {
+
+  @BeforeClass
+  public static void initClass() throws Exception {
+    useDefaultIndex = true;
+    curIndexName = "index";
+    startServer();
+    createAndStartIndex();
+    registerFields();
+    commit();
+  }
+
+  @AfterClass
+  public static void fini() throws Exception {
+    shutdownServer();
+  }
+
+  private static void registerFields() throws Exception {
+    JSONObject o = new JSONObject();
+    put(o, "body", "{type: text, highlight: true, store: true, analyzer: {class: StandardAnalyzer, matchVersion: LUCENE_43}, similarity: {class: BM25Similarity, b: 0.15}}");
+    put(o, "id", "{type: atom, store: true, postingsFormat: Memory}");
+    put(o, "price", "{type: float, sort: true, search: true, store: true}");
+    put(o, "date", "{type: atom, search: false, store: true}");
+    put(o, "dateFacet", "{type: atom, search: false, store: false, facet: hierarchy}");
+    put(o, "author", "{type: text, search: false, facet: flat, store: true, group: true}");
+    put(o, "charCount", "{type: int, store: true}");
+    JSONObject o2 = new JSONObject();
+    o2.put("fields", o);
+    send("registerFields", o2);
+  }
+
+  public void testUpdateDocument() throws Exception {
+    send("addDocument", "{fields: {body: 'here is a test', id: '0'}}");
+    long gen = getLong(send("updateDocument", "{term: {field: id, term: '0'}, fields: {body: 'here is another test', id: '0'}}"), "indexGen");
+    JSONObject o = send("search", "{queryText: 'body:test', searcher: {indexGen: " + gen + "}, retrieveFields: [body]}");
+    assertEquals(1, getInt(o, "totalHits"));
+    assertEquals("here is another test", getString(o, "hits[0].fields.body"));
+  }
+
+  public void testBulkUpdateDocuments() throws Exception {
+    deleteAllDocs();
+    StringBuilder sb = new StringBuilder();
+    sb.append("{\"indexName\": \"index\", \"documents\": [");
+    for(int i=0;i<100;i++) {
+      JSONObject o = new JSONObject();
+      o.put("body", "here is the body " + i);
+      o.put("id", ""+i);
+      if (i > 0) {
+        sb.append(',');
+      }
+      JSONObject o2 = new JSONObject();
+      o2.put("fields", o);
+      sb.append(o2.toString());
+    }
+    sb.append("]}");
+
+    String s = sb.toString();
+
+    JSONObject result = sendChunked(s, "bulkAddDocument");
+    assertEquals(100, result.get("indexedDocumentCount"));
+    long indexGen = ((Number) result.get("indexGen")).longValue();
+    assertEquals(1, getInt(send("search", "{queryText: 'body:99', searcher: {indexGen: " + indexGen + "}}"), "totalHits"));
+
+    // Now, update:
+    sb = new StringBuilder();
+    sb.append("{\"indexName\": \"index\", \"documents\": [");
+    for(int i=0;i<100;i++) {
+      JSONObject o2 = new JSONObject();
+      JSONObject o = new JSONObject();
+      o2.put("fields", o);
+      o.put("body", "here is the body " + i);
+      o.put("id", ""+i);
+      if (i > 0) {
+        sb.append(',');
+      }
+      put(o2, "term", "{field: id, term: '" + i + "'}");
+      sb.append(o2.toString());
+    }
+    sb.append("]}");
+
+    s = sb.toString();
+
+    result = sendChunked(s, "bulkUpdateDocument");
+    assertEquals(100, result.get("indexedDocumentCount"));
+    indexGen = ((Number) result.get("indexGen")).longValue();
+    assertEquals(1, getInt(send("search", "{queryText: 'body:99', searcher: {indexGen: " + indexGen + "}}"), "totalHits"));
+
+    assertEquals(100, getInt(send("search", "{query: MatchAllDocsQuery, searcher: {indexGen: " + indexGen + "}}"), "totalHits"));
+  }
+
+  public void testBulkAddException() throws Exception {
+    deleteAllDocs();
+    StringBuilder sb = new StringBuilder();
+    sb.append("{\"indexName\": \"index\", \"documents\": [");
+    for(int i=0;i<100;i++) {
+      JSONObject o = new JSONObject();
+      o.put("body", "here is the body " + i);
+      o.put("id", ""+i);
+      if (i > 0) {
+        sb.append(',');
+      }
+      if (i == 57) {
+        o.put("foobar", 17);
+      }
+      JSONObject o2 = new JSONObject();
+      o2.put("fields", o);
+      sb.append(o2.toString());
+    }
+    sb.append("]}");
+
+    String s = sb.toString();
+
+    try {
+      sendChunked(s, "bulkAddDocument");
+      fail("did not hit expected exception");
+    } catch (IOException ioe) {
+      // expected
+    }
+  }
+
+  private JSONObject search(String query, long indexGen, String sortField, boolean reversed, boolean snippets, String groupField, String groupSortField) throws Exception {
+    JSONObject o = new JSONObject();
+    o.put("indexName", "index");
+    o.put("queryText", query);
+    if (indexGen != -1) {
+      JSONObject o2 = new JSONObject();
+      o.put("searcher", o2);
+      o2.put("indexGen", indexGen);
+    }
+
+    if (sortField != null) {
+      JSONObject sort = new JSONObject();
+      o.put("sort", sort);
+      sort.put("doDocScores", true);
+
+      JSONArray sortFields = new JSONArray();
+      sort.put("fields", sortFields);
+
+      JSONObject o2 = new JSONObject();
+      sortFields.add(o2);
+
+      o2.put("field", sortField);
+      o2.put("reverse", reversed);
+    }
+
+    if (groupField != null) {
+      String s = "{field: '" + groupField + "'";
+      if (groupSortField != null) {
+        s += ", sort: [{field: '" + groupSortField + "'}]";
+      }
+      s += "}";
+      put(o, "grouping", s);
+    }
+
+    put(o, "facets", "[{dim: dateFacet, topN: 10}]");
+    put(o, "retrieveFields", "[id, date, price, {field: body, highlight: " + (snippets ? "snippets" : "whole") + "}]");
+
+    return send("search", o);
+  }
+
+  public void testBulkAddDocument() throws Exception {
+    deleteAllDocs();
+    StringBuilder sb = new StringBuilder();
+    sb.append("{\"indexName\": \"index\", \"documents\": [");
+    for(int i=0;i<100;i++) {
+      JSONObject o = new JSONObject();
+      o.put("body", "here is the body " + i);
+      o.put("author", "Mr. " + i);
+      o.put("price", 15.66);
+      o.put("id", ""+i);
+      o.put("date", "01/01/2013");
+      if (i > 0) {
+        sb.append(",");
+      }
+      JSONObject o2 = new JSONObject();
+      o2.put("fields", o);
+      sb.append(o2.toString());
+    }
+    sb.append("]}");
+    String s = sb.toString();
+
+    JSONObject result = sendChunked(s, "bulkAddDocument");
+    assertEquals(100, result.get("indexedDocumentCount"));
+    long indexGen = getLong(result, "indexGen");
+    JSONObject r = search("99", indexGen, null, false, true, null, null);
+    assertEquals(1, ((Integer) r.get("totalHits")).intValue());
+  }
+
+  /** Make sure you get an error if you try to addDocument
+   *  after index is stopped */
+  public void testAddAfterStop() throws Exception {
+    deleteAllDocs();
+    send("stopIndex");
+    try {
+      send("addDocument", "{fields: {}}");
+      fail();
+    } catch (IOException ioe) {
+      // expected
+    }
+    send("startIndex");
+  }
+
+  public void testBoost() throws Exception {
+    TestUtil.rmDir(new File("boost"));
+    curIndexName = "boost";
+    send("createIndex");
+    send("settings", "{directory: RAMDirectory, matchVersion: LUCENE_40}");
+    // Just to test merge rate limiting:
+    send("settings", "{mergeMaxMBPerSec: 10.0}");
+    // Just to test index.ramBufferSizeMB:
+    send("liveSettings", "{index.ramBufferSizeMB: 20.0}");
+    send("registerFields", "{fields: {id: {type: atom, store: true}, body: {type: text, analyzer: StandardAnalyzer}}}");
+    send("startIndex");
+    send("addDocument", "{fields: {id: '0', body: 'here is a test'}}");
+    long gen = getLong(send("addDocument", "{fields: {id: '1', body: 'here is a test'}}"), "indexGen");
+    JSONObject result = send("search", String.format(Locale.ROOT, "{retrieveFields: [id], queryText: test, searcher: {indexGen: %d}}", gen));
+    assertEquals(2, getInt(result, "hits.length"));
+    // Unboosted, the hits come back in order they were added:
+    assertEquals("0", getString(result, "hits[0].fields.id"));
+    assertEquals("1", getString(result, "hits[1].fields.id"));
+
+    // Do it again, this time setting higher boost for 2nd doc:
+    send("deleteAllDocuments");
+    send("addDocument", "{fields: {id: '0', body: 'here is a test'}}");
+    gen = getLong(send("addDocument", "{fields: {id: '1', body: {boost: 2.0, value: 'here is a test'}}}"), "indexGen");
+    result = send("search", String.format(Locale.ROOT, "{retrieveFields: [id], queryText: test, searcher: {indexGen: %d}}", gen));
+    assertEquals(2, getInt(result, "hits.length"));
+    // Unboosted, the hits come back in order they were added:
+    assertEquals("1", getString(result, "hits[0].fields.id"));
+    assertEquals("0", getString(result, "hits[1].fields.id"));
+
+    send("deleteIndex");
+  }
+
+  public void testInvalidNormsFormat() throws Exception {
+    try {
+      send("settings", "{normsFormat: NoSuchNormsFormat}");
+      fail("did not hit exception");
+    } catch (IOException ioe) {
+      assertTrue(ioe.getMessage().contains("unrecognized value \"NoSuchNormsFormat\""));
+    }
+  }
+
+  public void testNormsFormat() throws Exception {
+    for(int i=0;i<2;i++) {
+      curIndexName = "normsFormat";
+      if (VERBOSE) {
+        System.out.println("\nTEST: createIndex");
+      }
+      send("createIndex");
+      String norms;
+      if (i == 0) {
+        norms = "normsFormat: Lucene42";
+      } else {
+        norms = "normsFormat: {class: Lucene42, acceptableOverheadRatio: 0.0}";
+      }
+      send("settings", "{directory: RAMDirectory, matchVersion: LUCENE_40, " + norms + "}");
+      send("registerFields",
+           "{fields: {id: {type: atom, store: true}," +
+           " body: {type: text, analyzer: StandardAnalyzer}}}");
+      if (VERBOSE) {
+        System.out.println("\nTEST: startIndex");
+      }
+      send("startIndex");
+      send("addDocument", "{fields: {id: '0', body: 'here is a test'}}");
+      long gen = getLong(send("addDocument", "{fields: {id: '1', body: 'here is a test again'}}"), "indexGen");
+      JSONObject result = send("search", String.format(Locale.ROOT, "{retrieveFields: [id], queryText: test, searcher: {indexGen: %d}}", gen));
+      assertEquals(2, getInt(result, "hits.length"));
+      assertEquals("0", getString(result, "hits[0].fields.id"));
+      assertEquals("1", getString(result, "hits[1].fields.id"));
+
+      if (VERBOSE) {
+        System.out.println("\nTEST: deleteIndex");
+      }
+      send("deleteIndex");
+    }
+  }
+
+  public void testOnlySettings() throws Exception {
+    for(int i=0;i<2;i++) {
+      curIndexName = "settings";
+      if (VERBOSE) {
+        System.out.println("\nTEST: create");
+      }
+      if (i == 0) {
+        send("createIndex");
+      } else {
+        File dir = new File(TestUtil.getTempDir("recency"), "root");
+        send("createIndex", "{rootDir: " + dir.getAbsolutePath() + "}");
+      }
+      String dirImpl = i == 0 ? "RAMDirectory" : "FSDirectory";
+
+      if (VERBOSE) {
+        System.out.println("\nTEST: settings1");
+      }
+      send("settings", "{directory: " + dirImpl + ", matchVersion: LUCENE_40}");
+      send("registerFields", "{fields: {id: {type: atom, store: true}}}");
+      //send("stopIndex");
+      if (VERBOSE) {
+        System.out.println("\nTEST: settings2");
+      }
+      JSONObject result = send("settings");
+      assertEquals(dirImpl, getString(result, "directory"));
+      if (i == 1) {
+        // With FSDir, the index & settings should survive a
+        // server bounce, even if the index wasn't ever started:
+
+        if (VERBOSE) {
+          System.out.println("\nTEST: bounce");
+        }
+
+        shutdownServer();
+        startServer();
+
+        if (VERBOSE) {
+          System.out.println("\nTEST: settings3");
+        }
+        result = send("settings");
+        assertEquals(dirImpl, getString(result, "directory"));
+      }
+      send("deleteIndex");
+    }
+  }
+
+  public void testIllegalRegisterFields() throws Exception {
+    // Cannot specify an analyzer with an atom field (it
+    // always uses KeywordAnalyzer):
+    assertFailsWith("registerFields",
+                    "{fields: {bad: {type: atom, analyzer: WhitespaceAnalyzer}}}",
+                    "registerFields > fields > bad > analyzer: no analyzer allowed with atom (it's hardwired to KeywordAnalyzer internally)");
+
+    // Must specify an analyzer with a text field:
+    assertFailsWith("registerFields",
+                    "{fields: {bad: {type: text}}}",
+                    "registerFields > fields > bad > indexAnalyzer: either analyzer or indexAnalyzer must be specified for an indexed text field");
+
+    // Must not specify an analyzer with a non-searched text field:
+    assertFailsWith("registerFields",
+                    "{fields: {bad: {type: text, search: false, analyzer: WhitespaceAnalyzer}}}",
+                    "registerFields > fields > bad > analyzer: no analyzer allowed when search=false");
+
+    // Must not disable store if highlight is true:
+    assertFailsWith("registerFields",
+                    "{fields: {bad: {type: text, store: false, highlight: true, analyzer: WhitespaceAnalyzer}}}",
+                    "registerFields > fields > bad > store: store=false is not allowed when highlight=true");
+
+    // Cannot search a facet=hierarchy field:
+    assertFailsWith("registerFields",
+                    "{fields: {bad: {type: atom, facet: hierarchy, search: true}}}",
+                    "registerFields > fields > bad > facet: facet=hierarchy fields cannot have search=true");
+
+    // Cannot store a facet=hierarchy field:
+    assertFailsWith("registerFields",
+                    "{fields: {bad: {type: atom, facet: hierarchy, search: false, store: true}}}",
+                    "registerFields > fields > bad > facet: facet=hierarchy fields cannot have store=true");
+
+    // Cannot highlight a facet=hierarchy field:
+    assertFailsWith("registerFields",
+                    "{fields: {bad: {type: atom, facet: hierarchy, highlight: true}}}",
+                    "registerFields > fields > bad > facet: facet=hierarchy fields cannot have highlight=true");
+
+    // Cannot create a pointless do-nothing field:
+    assertFailsWith("registerFields",
+                    "{fields: {bad: {type: atom, search: false, store: false}}}",
+                    "registerFields > fields > bad: field does nothing: it's neither searched, stored, sorted, grouped, highlighted nor faceted");
+  }
+}

Added: lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestLiveValues.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestLiveValues.java?rev=1578133&view=auto
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestLiveValues.java (added)
+++ lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestLiveValues.java Sun Mar 16 18:11:07 2014
@@ -0,0 +1,67 @@
+package org.apache.lucene.server;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+
+public class TestLiveValues extends ServerBaseTestCase {
+
+  @BeforeClass
+  public static void initClass() throws Exception {
+    useDefaultIndex = true;
+    curIndexName = "index";
+    startServer();
+    createAndStartIndex();
+    registerFields();
+    commit();
+  }
+
+  @AfterClass
+  public static void fini() throws Exception {
+    shutdownServer();
+  }
+
+  private static void registerFields() throws Exception {
+    send("registerFields", "{fields: {id: {type: atom, store: true, postingsFormat: Memory}}}");
+    send("registerFields", "{fields: {value: {type: atom, search: false, store: true, liveValues: id}}}");
+  }
+
+  // nocommit testDeletions
+
+  public void testLiveFields() throws Exception {
+    JSONArray arr = new JSONArray();
+    for(int i=0;i<100;i++) {
+      send("addDocument", "{fields: {id: '" + i + "', value: 'value is " + i + "'}}");
+      arr.add("" + i);
+    }
+    JSONObject request = new JSONObject();
+    request.put("indexName", "index");
+    request.put("ids", arr);
+    request.put("field", "value");
+    
+    JSONObject o = send("liveValues", request);
+    arr = (JSONArray) o.get("values");
+    assertEquals(100, arr.size());
+    for(int i=0;i<100;i++) {
+      assertEquals("value is " + i, arr.get(i));
+    }
+  }
+}
\ No newline at end of file

Added: lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestNumericFields.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestNumericFields.java?rev=1578133&view=auto
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestNumericFields.java (added)
+++ lucene/dev/branches/lucene5376_2/lucene/server/src/test/org/apache/lucene/server/TestNumericFields.java Sun Mar 16 18:11:07 2014
@@ -0,0 +1,63 @@
+package org.apache.lucene.server;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import net.minidev.json.JSONObject;
+
+public class TestNumericFields extends ServerBaseTestCase {
+
+  @BeforeClass
+  public static void initClass() throws Exception {
+    useDefaultIndex = true;
+    curIndexName = "index";
+    startServer();
+    createAndStartIndex();
+    registerFields();
+    commit();
+  }
+
+  @AfterClass
+  public static void fini() throws Exception {
+    shutdownServer();
+  }
+
+  private static void registerFields() throws Exception {
+    JSONObject o = new JSONObject();
+    put(o, "intNoSort", "{type: int, store: true}");
+    put(o, "intSort", "{type: int, sort: true, store: true}");
+    put(o, "floatNoSort", "{type: float, store: true}");
+    put(o, "floatSort", "{type: float, sort: true, store: true}");
+    JSONObject o2 = new JSONObject();
+    o2.put("fields", o);
+    o2.put("indexName", "index");
+    send("registerFields", o2);
+  }
+
+  public void testRetrieve() throws Exception {
+    deleteAllDocs();
+    long gen = getLong(send("addDocument", "{fields: {intNoSort: 17, intSort: 22, floatNoSort: 17.0, floatSort: 22.0}}"), "indexGen");
+    JSONObject result = send("search", "{retrieveFields: [intNoSort, intSort, floatNoSort, floatSort], query: MatchAllDocsQuery, searcher: {indexGen: " + gen + "}}");
+    assertEquals(1, getInt(result, "totalHits"));
+    assertEquals(17, getInt(result, "hits[0].fields.intNoSort"));
+    assertEquals(22, getInt(result, "hits[0].fields.intSort"));
+    assertEquals(17.0f, getFloat(result, "hits[0].fields.floatNoSort"), 1e-7);
+    assertEquals(22.0f, getFloat(result, "hits[0].fields.floatSort"), 1e-7);
+  }
+}