You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rj...@apache.org on 2015/03/31 07:22:50 UTC
svn commit: r1670257 [33/39] - in /lucene/dev/branches/lucene6271: ./
dev-tools/ dev-tools/idea/.idea/libraries/ dev-tools/scripts/ lucene/
lucene/analysis/ lucene/analysis/common/
lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneou...
Modified: lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java?rev=1670257&r1=1670256&r2=1670257&view=diff
==============================================================================
--- lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java (original)
+++ lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java Tue Mar 31 05:22:40 2015
@@ -18,6 +18,7 @@ import org.junit.BeforeClass;
import org.junit.Test;
import java.io.File;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
@@ -63,7 +64,7 @@ public class DistributedDebugComponentTe
@BeforeClass
public static void createThings() throws Exception {
solrHome = createSolrHome();
- createJetty(solrHome.getAbsolutePath(), null, null);
+ createJetty(solrHome.getAbsolutePath());
String url = jetty.getBaseUrl().toString();
collection1 = new HttpSolrClient(url + "/collection1");
@@ -263,7 +264,7 @@ public class DistributedDebugComponentTe
}
- private void verifyDebugSections(SolrQuery query, SolrClient client) throws SolrServerException {
+ private void verifyDebugSections(SolrQuery query, SolrClient client) throws SolrServerException, IOException {
query.set("debugQuery", "true");
query.remove("debug");
QueryResponse response = client.query(query);
@@ -343,7 +344,7 @@ public class DistributedDebugComponentTe
assertNull(response.getDebugMap());
}
- public void testCompareWithNonDistributedRequest() throws SolrServerException {
+ public void testCompareWithNonDistributedRequest() throws SolrServerException, IOException {
SolrQuery query = new SolrQuery();
query.setQuery("id:1");
query.setFilterQueries("id:[0 TO 10]");
@@ -375,7 +376,7 @@ public class DistributedDebugComponentTe
assertSameKeys((NamedList<?>)nonDistribResponse.getDebugMap().get("timing"), (NamedList<?>)distribResponse.getDebugMap().get("timing"));
}
- public void testTolerantSearch() throws SolrServerException {
+ public void testTolerantSearch() throws SolrServerException, IOException {
String badShard = "[ff01::0083]:3334";
SolrQuery query = new SolrQuery();
query.setQuery("*:*");
Modified: lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallAdvancedTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallAdvancedTest.java?rev=1670257&r1=1670256&r2=1670257&view=diff
==============================================================================
--- lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallAdvancedTest.java (original)
+++ lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallAdvancedTest.java Tue Mar 31 05:22:40 2015
@@ -89,19 +89,23 @@ public class DistributedFacetPivotSmallA
handle.put("maxScore", SKIPVAL);
doTestDeepPivotStatsOnString();
- doTestTopStatsWithRefinement();
+
+ doTestTopStatsWithRefinement(true);
+ doTestTopStatsWithRefinement(false);
}
/**
* we need to ensure that stats never "overcount" the values from a single shard
* even if we hit that shard with a refinement request
*/
- private void doTestTopStatsWithRefinement() throws Exception {
-
-
+ private void doTestTopStatsWithRefinement(final boolean allStats) throws Exception {
+
+ String stat_param = allStats ?
+ "{!tag=s1}foo_i" : "{!tag=s1 min=true max=true count=true missing=true}foo_i";
+
ModifiableSolrParams coreParams = params("q", "*:*", "rows", "0",
"stats", "true",
- "stats.field", "{!tag=s1}foo_i" );
+ "stats.field", stat_param );
ModifiableSolrParams facetParams = new ModifiableSolrParams(coreParams);
facetParams.add(params("facet", "true",
"facet.limit", "1",
@@ -128,10 +132,18 @@ public class DistributedFacetPivotSmallA
assertEquals(msg, 91.0, fieldStatsInfo.getMax());
assertEquals(msg, 10, (long) fieldStatsInfo.getCount());
assertEquals(msg, 0, (long) fieldStatsInfo.getMissing());
- assertEquals(msg, 248.0, fieldStatsInfo.getSum());
- assertEquals(msg, 15294.0, fieldStatsInfo.getSumOfSquares(), 0.1E-7);
- assertEquals(msg, 24.8, (double) fieldStatsInfo.getMean(), 0.1E-7);
- assertEquals(msg, 31.87405772027709, fieldStatsInfo.getStddev(), 0.1E-7);
+
+ if (allStats) {
+ assertEquals(msg, 248.0, fieldStatsInfo.getSum());
+ assertEquals(msg, 15294.0, fieldStatsInfo.getSumOfSquares(), 0.1E-7);
+ assertEquals(msg, 24.8, (double) fieldStatsInfo.getMean(), 0.1E-7);
+ assertEquals(msg, 31.87405772027709, fieldStatsInfo.getStddev(), 0.1E-7);
+ } else {
+ assertNull(msg, fieldStatsInfo.getSum());
+ assertNull(msg, fieldStatsInfo.getSumOfSquares());
+ assertNull(msg, fieldStatsInfo.getMean());
+ assertNull(msg, fieldStatsInfo.getStddev());
+ }
if (params.getBool("facet", false)) {
// if this was a facet request, then the top pivot constraint and pivot
@@ -156,6 +168,12 @@ public class DistributedFacetPivotSmallA
assertEquals(4, (long) dublinMicrosoftStatsInfo.getCount());
assertEquals(0, (long) dublinMicrosoftStatsInfo.getMissing());
+ if (! allStats) {
+ assertNull(msg, dublinMicrosoftStatsInfo.getSum());
+ assertNull(msg, dublinMicrosoftStatsInfo.getSumOfSquares());
+ assertNull(msg, dublinMicrosoftStatsInfo.getMean());
+ assertNull(msg, dublinMicrosoftStatsInfo.getStddev());
+ }
}
}
Modified: lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java?rev=1670257&r1=1670256&r2=1670257&view=diff
==============================================================================
--- lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java (original)
+++ lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java Tue Mar 31 05:22:40 2015
@@ -334,16 +334,21 @@ public class DistributedFacetPivotSmallT
}
}
- doTestDeepPivotStats();
+ doTestDeepPivotStats(false); // all price stats
+ doTestDeepPivotStats(true); // just the mean price stat
doTestPivotStatsFromOneShard();
}
- private void doTestDeepPivotStats() throws Exception {
+ /**
+ * @param justMean - only the mean stat is requested/computed
+ */
+ private void doTestDeepPivotStats(boolean justMean) throws Exception {
SolrParams params = params("q", "*:*", "rows", "0",
"facet", "true", "stats", "true",
"facet.pivot", "{!stats=s1}place_t,company_t",
- "stats.field", "{!key=avg_price tag=s1}price_ti");
+ "stats.field", ("{!key=avg_price tag=s1 "+
+ (justMean ? "mean=true" : "") +"}price_ti"));
QueryResponse rsp = query(params);
List<PivotField> placePivots = rsp.getFacetPivot().get("place_t,company_t");
@@ -357,15 +362,24 @@ public class DistributedFacetPivotSmallT
assertEquals(4, microsoftPivotField.getCount());
FieldStatsInfo dublinMicrosoftStatsInfo = microsoftPivotField.getFieldStatsInfo().get("avg_price");
- assertEquals(15.0, dublinMicrosoftStatsInfo.getMin());
- assertEquals(29.0, dublinMicrosoftStatsInfo.getMax());
- assertEquals(3, (long) dublinMicrosoftStatsInfo.getCount());
- assertEquals(1, (long) dublinMicrosoftStatsInfo.getMissing());
- assertEquals(63.0, dublinMicrosoftStatsInfo.getSum());
- assertEquals(1427.0, dublinMicrosoftStatsInfo.getSumOfSquares(), 0.1E-7);
assertEquals(21.0, (double) dublinMicrosoftStatsInfo.getMean(), 0.1E-7);
- assertEquals(7.211102550927978, dublinMicrosoftStatsInfo.getStddev(), 0.1E-7);
-
+ if (justMean) {
+ assertNull(dublinMicrosoftStatsInfo.getMin());
+ assertNull(dublinMicrosoftStatsInfo.getMax());
+ assertNull(dublinMicrosoftStatsInfo.getCount());
+ assertNull(dublinMicrosoftStatsInfo.getMissing());
+ assertNull(dublinMicrosoftStatsInfo.getSum());
+ assertNull(dublinMicrosoftStatsInfo.getSumOfSquares());
+ assertNull(dublinMicrosoftStatsInfo.getStddev());
+ } else {
+ assertEquals(15.0, dublinMicrosoftStatsInfo.getMin());
+ assertEquals(29.0, dublinMicrosoftStatsInfo.getMax());
+ assertEquals(3, (long) dublinMicrosoftStatsInfo.getCount());
+ assertEquals(1, (long) dublinMicrosoftStatsInfo.getMissing());
+ assertEquals(63.0, dublinMicrosoftStatsInfo.getSum());
+ assertEquals(1427.0, dublinMicrosoftStatsInfo.getSumOfSquares(), 0.1E-7);
+ assertEquals(7.211102550927978, dublinMicrosoftStatsInfo.getStddev(), 0.1E-7);
+ }
PivotField cardiffPivotField = placePivots.get(2);
assertEquals("cardiff", cardiffPivotField.getValue());
@@ -376,15 +390,24 @@ public class DistributedFacetPivotSmallT
assertEquals(3, polecatPivotField.getCount());
FieldStatsInfo cardiffPolecatStatsInfo = polecatPivotField.getFieldStatsInfo().get("avg_price");
- assertEquals(15.0, cardiffPolecatStatsInfo.getMin());
- assertEquals(39.0, cardiffPolecatStatsInfo.getMax());
- assertEquals(2, (long) cardiffPolecatStatsInfo.getCount());
- assertEquals(1, (long) cardiffPolecatStatsInfo.getMissing());
- assertEquals(54.0, cardiffPolecatStatsInfo.getSum());
- assertEquals(1746.0, cardiffPolecatStatsInfo.getSumOfSquares(), 0.1E-7);
assertEquals(27.0, (double) cardiffPolecatStatsInfo.getMean(), 0.1E-7);
- assertEquals(16.97056274847714, cardiffPolecatStatsInfo.getStddev(), 0.1E-7);
-
+ if (justMean) {
+ assertNull(cardiffPolecatStatsInfo.getMin());
+ assertNull(cardiffPolecatStatsInfo.getMax());
+ assertNull(cardiffPolecatStatsInfo.getCount());
+ assertNull(cardiffPolecatStatsInfo.getMissing());
+ assertNull(cardiffPolecatStatsInfo.getSum());
+ assertNull(cardiffPolecatStatsInfo.getSumOfSquares());
+ assertNull(cardiffPolecatStatsInfo.getStddev());
+ } else {
+ assertEquals(15.0, cardiffPolecatStatsInfo.getMin());
+ assertEquals(39.0, cardiffPolecatStatsInfo.getMax());
+ assertEquals(2, (long) cardiffPolecatStatsInfo.getCount());
+ assertEquals(1, (long) cardiffPolecatStatsInfo.getMissing());
+ assertEquals(54.0, cardiffPolecatStatsInfo.getSum());
+ assertEquals(1746.0, cardiffPolecatStatsInfo.getSumOfSquares(), 0.1E-7);
+ assertEquals(16.97056274847714, cardiffPolecatStatsInfo.getStddev(), 0.1E-7);
+ }
PivotField krakowPivotField = placePivots.get(3);
assertEquals("krakow", krakowPivotField.getValue());
@@ -395,14 +418,25 @@ public class DistributedFacetPivotSmallT
assertEquals(1, fujitsuPivotField.getCount());
FieldStatsInfo krakowFujitsuStatsInfo = fujitsuPivotField.getFieldStatsInfo().get("avg_price");
- assertEquals(null, krakowFujitsuStatsInfo.getMin());
- assertEquals(null, krakowFujitsuStatsInfo.getMax());
- assertEquals(0, (long) krakowFujitsuStatsInfo.getCount());
- assertEquals(1, (long) krakowFujitsuStatsInfo.getMissing());
- assertEquals(0.0, krakowFujitsuStatsInfo.getSum());
- assertEquals(0.0, krakowFujitsuStatsInfo.getSumOfSquares(), 0.1E-7);
assertEquals(Double.NaN, (double) krakowFujitsuStatsInfo.getMean(), 0.1E-7);
- assertEquals(0.0, krakowFujitsuStatsInfo.getStddev(), 0.1E-7);
+ if (justMean) {
+ assertNull(krakowFujitsuStatsInfo.getMin());
+ assertNull(krakowFujitsuStatsInfo.getMax());
+ assertNull(krakowFujitsuStatsInfo.getCount());
+ assertNull(krakowFujitsuStatsInfo.getMissing());
+ assertNull(krakowFujitsuStatsInfo.getSum());
+ assertNull(krakowFujitsuStatsInfo.getSumOfSquares());
+ assertNull(krakowFujitsuStatsInfo.getStddev());
+ } else {
+ assertEquals(null, krakowFujitsuStatsInfo.getMin());
+ assertEquals(null, krakowFujitsuStatsInfo.getMax());
+ assertEquals(0, (long) krakowFujitsuStatsInfo.getCount());
+ assertEquals(1, (long) krakowFujitsuStatsInfo.getMissing());
+ assertEquals(0.0, krakowFujitsuStatsInfo.getSum());
+ assertEquals(0.0, krakowFujitsuStatsInfo.getSumOfSquares(), 0.1E-7);
+ assertEquals(Double.NaN, (double) krakowFujitsuStatsInfo.getMean(), 0.1E-7);
+ assertEquals(0.0, krakowFujitsuStatsInfo.getStddev(), 0.1E-7);
+ }
}
// Useful to check for errors, orders lists and does toString() equality check
Modified: lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java?rev=1670257&r1=1670256&r2=1670257&view=diff
==============================================================================
--- lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java (original)
+++ lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java Tue Mar 31 05:22:40 2015
@@ -17,17 +17,19 @@ package org.apache.solr.handler.componen
* limitations under the License.
*/
-import org.apache.solr.BaseDistributedSearchTestCase;
-import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
+import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ShardParams;
import org.apache.solr.common.util.SimpleOrderedMap;
-import org.junit.BeforeClass;
+import org.apache.solr.common.util.StrUtils;
import org.junit.Test;
import java.nio.ByteBuffer;
+import java.util.HashSet;
+import java.util.List;
import java.util.Map;
+import java.util.Set;
/**
* Test for QueryComponent's distributed querying optimization.
@@ -37,37 +39,39 @@ import java.util.Map;
*
* @see QueryComponent
*/
-public class DistributedQueryComponentOptimizationTest extends BaseDistributedSearchTestCase {
+public class DistributedQueryComponentOptimizationTest extends AbstractFullDistribZkTestBase {
public DistributedQueryComponentOptimizationTest() {
stress = 0;
+ schemaString = "schema-custom-field.xml";
}
- @BeforeClass
- public static void setUpBeforeClass() throws Exception {
- initCore("solrconfig.xml", "schema-custom-field.xml");
+ @Override
+ protected String getSolrXml() {
+ return "solr-trackingshardhandler.xml";
}
@Test
@ShardsFixed(num = 3)
public void test() throws Exception {
+ waitForThingsToLevelOut(30);
del("*:*");
- index(id, "1", "text", "a", "test_sS", "21", "payload", ByteBuffer.wrap(new byte[] { 0x12, 0x62, 0x15 }), // 2
- // quick check to prove "*" dynamicField hasn't been broken by somebody mucking with schema
- "asdfasdf_field_should_match_catchall_dynamic_field_adsfasdf", "value");
- index(id, "2", "text", "b", "test_sS", "22", "payload", ByteBuffer.wrap(new byte[] { 0x25, 0x21, 0x16 })); // 5
- index(id, "3", "text", "a", "test_sS", "23", "payload", ByteBuffer.wrap(new byte[] { 0x35, 0x32, 0x58 })); // 8
- index(id, "4", "text", "b", "test_sS", "24", "payload", ByteBuffer.wrap(new byte[] { 0x25, 0x21, 0x15 })); // 4
- index(id, "5", "text", "a", "test_sS", "25", "payload", ByteBuffer.wrap(new byte[] { 0x35, 0x35, 0x10, 0x00 })); // 9
- index(id, "6", "text", "c", "test_sS", "26", "payload", ByteBuffer.wrap(new byte[] { 0x1a, 0x2b, 0x3c, 0x00, 0x00, 0x03 })); // 3
- index(id, "7", "text", "c", "test_sS", "27", "payload", ByteBuffer.wrap(new byte[] { 0x00, 0x3c, 0x73 })); // 1
- index(id, "8", "text", "c", "test_sS", "28", "payload", ByteBuffer.wrap(new byte[] { 0x59, 0x2d, 0x4d })); // 11
- index(id, "9", "text", "a", "test_sS", "29", "payload", ByteBuffer.wrap(new byte[] { 0x39, 0x79, 0x7a })); // 10
- index(id, "10", "text", "b", "test_sS", "30", "payload", ByteBuffer.wrap(new byte[] { 0x31, 0x39, 0x7c })); // 6
- index(id, "11", "text", "d", "test_sS", "31", "payload", ByteBuffer.wrap(new byte[] { (byte)0xff, (byte)0xaf, (byte)0x9c })); // 13
- index(id, "12", "text", "d", "test_sS", "32", "payload", ByteBuffer.wrap(new byte[] { 0x34, (byte)0xdd, 0x4d })); // 7
- index(id, "13", "text", "d", "test_sS", "33", "payload", ByteBuffer.wrap(new byte[] { (byte)0x80, 0x11, 0x33 })); // 12
+ index(id, "1", "text", "a", "test_sS", "21", "payload", ByteBuffer.wrap(new byte[]{0x12, 0x62, 0x15}), // 2
+ // quick check to prove "*" dynamicField hasn't been broken by somebody mucking with schema
+ "asdfasdf_field_should_match_catchall_dynamic_field_adsfasdf", "value");
+ index(id, "2", "text", "b", "test_sS", "22", "payload", ByteBuffer.wrap(new byte[]{0x25, 0x21, 0x16})); // 5
+ index(id, "3", "text", "a", "test_sS", "23", "payload", ByteBuffer.wrap(new byte[]{0x35, 0x32, 0x58})); // 8
+ index(id, "4", "text", "b", "test_sS", "24", "payload", ByteBuffer.wrap(new byte[]{0x25, 0x21, 0x15})); // 4
+ index(id, "5", "text", "a", "test_sS", "25", "payload", ByteBuffer.wrap(new byte[]{0x35, 0x35, 0x10, 0x00})); // 9
+ index(id, "6", "text", "c", "test_sS", "26", "payload", ByteBuffer.wrap(new byte[]{0x1a, 0x2b, 0x3c, 0x00, 0x00, 0x03})); // 3
+ index(id, "7", "text", "c", "test_sS", "27", "payload", ByteBuffer.wrap(new byte[]{0x00, 0x3c, 0x73})); // 1
+ index(id, "8", "text", "c", "test_sS", "28", "payload", ByteBuffer.wrap(new byte[]{0x59, 0x2d, 0x4d})); // 11
+ index(id, "9", "text", "a", "test_sS", "29", "payload", ByteBuffer.wrap(new byte[]{0x39, 0x79, 0x7a})); // 10
+ index(id, "10", "text", "b", "test_sS", "30", "payload", ByteBuffer.wrap(new byte[]{0x31, 0x39, 0x7c})); // 6
+ index(id, "11", "text", "d", "test_sS", "31", "payload", ByteBuffer.wrap(new byte[]{(byte) 0xff, (byte) 0xaf, (byte) 0x9c})); // 13
+ index(id, "12", "text", "d", "test_sS", "32", "payload", ByteBuffer.wrap(new byte[]{0x34, (byte) 0xdd, 0x4d})); // 7
+ index(id, "13", "text", "d", "test_sS", "33", "payload", ByteBuffer.wrap(new byte[]{(byte) 0x80, 0x11, 0x33})); // 12
commit();
QueryResponse rsp;
@@ -95,27 +99,27 @@ public class DistributedQueryComponentOp
compareResponses(rsp, nonDistribRsp); // make sure distrib and distrib.singlePass return the same thing
// verify that the optimization actually works
- verifySinglePass("q", "*:*", "fl", "id", "sort", "payload desc", "rows", "20"); // id only is optimized by default
- verifySinglePass("q", "*:*", "fl", "id,score", "sort", "payload desc", "rows", "20"); // id,score only is optimized by default
- verifySinglePass("q", "*:*", "fl", "score", "sort", "payload asc", "rows", "20", "distrib.singlePass", "true");
+ queryWithAsserts("q", "*:*", "fl", "id", "sort", "payload desc", "rows", "20"); // id only is optimized by default
+ queryWithAsserts("q", "*:*", "fl", "id,score", "sort", "payload desc", "rows", "20"); // id,score only is optimized by default
+ queryWithAsserts("q", "*:*", "fl", "score", "sort", "payload asc", "rows", "20", "distrib.singlePass", "true");
// SOLR-6545, wild card field list
- index(id, "19", "text", "d", "cat_a_sS", "1" ,"dynamic", "2", "payload", ByteBuffer.wrap(new byte[] { (byte)0x80, 0x11, 0x33 }));
+ index(id, "19", "text", "d", "cat_a_sS", "1", "dynamic", "2", "payload", ByteBuffer.wrap(new byte[]{(byte) 0x80, 0x11, 0x34}));
commit();
- nonDistribRsp = query("q", "id:19", "fl", "id,*a_sS", "sort", "payload asc");
- rsp = query("q", "id:19", "fl", "id,*a_sS", "sort", "payload asc", "distrib.singlePass", "true");
+ nonDistribRsp = queryWithAsserts("q", "id:19", "fl", "id,*a_sS", "sort", "payload asc");
+ rsp = queryWithAsserts("q", "id:19", "fl", "id,*a_sS", "sort", "payload asc", "distrib.singlePass", "true");
assertFieldValues(nonDistribRsp.getResults(), "id", 19);
assertFieldValues(rsp.getResults(), "id", 19);
- nonDistribRsp = query("q", "id:19", "fl", "id,dynamic,cat*", "sort", "payload asc");
- rsp = query("q", "id:19", "fl", "id,dynamic,cat*", "sort", "payload asc", "distrib.singlePass", "true");
+ nonDistribRsp = queryWithAsserts("q", "id:19", "fl", "id,dynamic,cat*", "sort", "payload asc");
+ rsp = queryWithAsserts("q", "id:19", "fl", "id,dynamic,cat*", "sort", "payload asc", "distrib.singlePass", "true");
assertFieldValues(nonDistribRsp.getResults(), "id", 19);
assertFieldValues(rsp.getResults(), "id", 19);
- verifySinglePass("q", "id:19", "fl", "id,*a_sS", "sort", "payload asc", "distrib.singlePass", "true");
- verifySinglePass("q", "id:19", "fl", "id,dynamic,cat*", "sort", "payload asc", "distrib.singlePass", "true");
+ queryWithAsserts("q", "id:19", "fl", "id,*a_sS", "sort", "payload asc", "distrib.singlePass", "true");
+ queryWithAsserts("q", "id:19", "fl", "id,dynamic,cat*", "sort", "payload asc", "distrib.singlePass", "true");
// see SOLR-6795, distrib.singlePass=true would return score even when not asked for
handle.clear();
@@ -123,24 +127,168 @@ public class DistributedQueryComponentOp
handle.put("_version_", SKIPVAL);
// we don't to compare maxScore because most distributed requests return it anyway (just because they have score already)
handle.put("maxScore", SKIPVAL);
+ // this trips the queryWithAsserts function because it uses a custom parser, so just query directly
query("q", "{!func}id", ShardParams.DISTRIB_SINGLE_PASS, "true");
// fix for a bug where not all fields are returned if using multiple fl parameters, see SOLR-6796
- query("q","*:*", "fl", "id", "fl","dynamic","sort","payload desc", ShardParams.DISTRIB_SINGLE_PASS, "true");
+ queryWithAsserts("q", "*:*", "fl", "id", "fl", "dynamic", "sort", "payload desc", ShardParams.DISTRIB_SINGLE_PASS, "true");
+
+ // missing fl with sort
+ queryWithAsserts("q", "*:*", "sort", "payload desc", ShardParams.DISTRIB_SINGLE_PASS, "true");
+ queryWithAsserts("q", "*:*", "sort", "payload desc");
+
+ // fl=*
+ queryWithAsserts("q", "*:*", "fl", "*", "sort", "payload desc", ShardParams.DISTRIB_SINGLE_PASS, "true");
+ queryWithAsserts("q", "*:*", "fl", "*", "sort", "payload desc");
+
+ // fl=*,score
+ queryWithAsserts("q", "*:*", "fl", "*,score", "sort", "payload desc", ShardParams.DISTRIB_SINGLE_PASS, "true");
+ queryWithAsserts("q", "*:*", "fl", "*,score", "sort", "payload desc");
}
- private void verifySinglePass(String... q) throws SolrServerException {
- QueryResponse rsp;ModifiableSolrParams params = new ModifiableSolrParams();
+ /**
+ * This test now asserts that every distrib.singlePass query:
+ * <ol>
+ * <li>Makes exactly 'numSlices' number of shard requests</li>
+ * <li>Makes no GET_FIELDS requests</li>
+ * <li>Must request the unique key field from shards</li>
+ * <li>Must request the score if 'fl' has score or sort by score is requested</li>
+ * <li>Requests all fields that are present in 'fl' param</li>
+ * </ol>
+ * <p>
+ * It also asserts that every regular two phase distribtued search:
+ * <ol>
+ * <li>Makes at most 2 * 'numSlices' number of shard requests</li>
+ * <li>Must request the unique key field from shards</li>
+ * <li>Must request the score if 'fl' has score or sort by score is requested</li>
+ * <li>Requests no fields other than id and score in GET_TOP_IDS request</li>
+ * <li>Requests exactly the fields that are present in 'fl' param in GET_FIELDS request and no others</li>
+ * </ol>
+ * <p>
+ * and also asserts that each query which requests id or score or both behaves exactly like a single pass query
+ */
+ private QueryResponse queryWithAsserts(Object... q) throws Exception {
+ TrackingShardHandlerFactory.RequestTrackingQueue trackingQueue = new TrackingShardHandlerFactory.RequestTrackingQueue();
+ // the jettys doesn't include the control jetty which is exactly what we need here
+ TrackingShardHandlerFactory.setTrackingQueue(jettys, trackingQueue);
+
+ // let's add debug=track to such requests so we can use DebugComponent responses for assertions
+ Object[] qq = new Object[q.length + 2];
+ System.arraycopy(q, 0, qq, 0, q.length);
+ qq[qq.length - 2] = "debug";
+ qq[qq.length - 1] = "track";
+ handle.put("debug", SKIPVAL);
+ QueryResponse response = query(qq);
+
+ Map<String, List<TrackingShardHandlerFactory.ShardRequestAndParams>> requests = trackingQueue.getAllRequests();
+ int numRequests = getNumRequests(requests);
+
+ boolean distribSinglePass = false;
+
+ Set<String> fls = new HashSet<>();
+ Set<String> sortFields = new HashSet<>();
for (int i = 0; i < q.length; i += 2) {
- params.add(q[i].toString(), q[i + 1].toString());
+ if (ShardParams.DISTRIB_SINGLE_PASS.equals(q[i].toString()) && Boolean.parseBoolean(q[i + 1].toString())) {
+ assertTrue("distrib.singlePass=true made more requests than number of shards",
+ numRequests == sliceCount);
+ distribSinglePass = true;
+ }
+ if (CommonParams.FL.equals(q[i].toString())) {
+ fls.addAll(StrUtils.splitSmart(q[i + 1].toString(), ','));
+ }
+ if (CommonParams.SORT.equals(q[i].toString())) {
+ String val = q[i + 1].toString().trim();
+ // take care of asc/desc decorators
+ sortFields.addAll(StrUtils.splitSmart(StrUtils.splitSmart(val, ' ').get(0), ','));
+ }
+ }
+
+ Set<String> idScoreFields = new HashSet<>(2);
+ idScoreFields.add("id"); // id is always requested in GET_TOP_IDS phase
+ // score is optional, requested only if sorted by score
+ if (fls.contains("score") || sortFields.contains("score")) idScoreFields.add("score");
+
+ if (idScoreFields.containsAll(fls) && !fls.isEmpty()) {
+ // if id and/or score are the only fields being requested then we implicitly turn on distribSinglePass=true
+ distribSinglePass = true;
+ }
+
+ if (distribSinglePass) {
+ Map<String, Object> debugMap = response.getDebugMap();
+ SimpleOrderedMap<Object> track = (SimpleOrderedMap<Object>) debugMap.get("track");
+ assertNotNull(track);
+ assertNotNull(track.get("EXECUTE_QUERY"));
+ assertNull("A single pass request should not have a GET_FIELDS phase", track.get("GET_FIELDS"));
+
+ // all fields should be requested in one go but even if 'id' is not requested by user
+ // it must still be fetched in this phase to merge correctly
+ Set<String> reqAndIdScoreFields = new HashSet<>(fls);
+ reqAndIdScoreFields.addAll(idScoreFields);
+ assertParamsEquals(trackingQueue, DEFAULT_COLLECTION, SHARD1,
+ CommonParams.FL, ShardRequest.PURPOSE_GET_TOP_IDS, reqAndIdScoreFields.toArray(new String[reqAndIdScoreFields.size()]));
+ assertParamsEquals(trackingQueue, DEFAULT_COLLECTION, SHARD2,
+ CommonParams.FL, ShardRequest.PURPOSE_GET_TOP_IDS, reqAndIdScoreFields.toArray(new String[reqAndIdScoreFields.size()]));
+ } else {
+ // we are assuming there are facet refinement or distributed idf requests here
+ assertTrue("distrib.singlePass=false made more requests than 2 * number of shards." +
+ " Actual: " + numRequests + " but expected <= " + sliceCount * 2,
+ numRequests <= sliceCount * 2);
+
+ // only id and/or score should be requested
+ assertParamsEquals(trackingQueue, DEFAULT_COLLECTION, SHARD1,
+ CommonParams.FL, ShardRequest.PURPOSE_GET_TOP_IDS, idScoreFields.toArray(new String[idScoreFields.size()]));
+ assertParamsEquals(trackingQueue, DEFAULT_COLLECTION, SHARD2,
+ CommonParams.FL, ShardRequest.PURPOSE_GET_TOP_IDS, idScoreFields.toArray(new String[idScoreFields.size()]));
+
+ // only originally requested fields must be requested in GET_FIELDS request
+ assertParamsEquals(trackingQueue, DEFAULT_COLLECTION, SHARD1,
+ CommonParams.FL, ShardRequest.PURPOSE_GET_FIELDS, fls.toArray(new String[fls.size()]));
+ assertParamsEquals(trackingQueue, DEFAULT_COLLECTION, SHARD2,
+ CommonParams.FL, ShardRequest.PURPOSE_GET_FIELDS, fls.toArray(new String[fls.size()]));
+ }
+
+ return response;
+ }
+
+ private int getNumRequests(Map<String, List<TrackingShardHandlerFactory.ShardRequestAndParams>> requests) {
+ int beforeNumRequests = 0;
+ for (Map.Entry<String, List<TrackingShardHandlerFactory.ShardRequestAndParams>> entry : requests.entrySet()) {
+ beforeNumRequests += entry.getValue().size();
+ }
+ return beforeNumRequests;
+ }
+
+ private void assertParamsEquals(TrackingShardHandlerFactory.RequestTrackingQueue trackingQueue, String collection, String shard, String paramName, int purpose, String... values) {
+ TrackingShardHandlerFactory.ShardRequestAndParams getByIdRequest = trackingQueue.getShardRequestByPurpose(cloudClient.getZkStateReader(), collection, shard, purpose);
+ assertParamsEquals(getByIdRequest, paramName, values);
+ }
+
+ private void assertParamsEquals(TrackingShardHandlerFactory.ShardRequestAndParams requestAndParams, String paramName, String... values) {
+ if (requestAndParams == null) return;
+ int expectedCount = values.length;
+ String[] params = requestAndParams.params.getParams(paramName);
+ if (expectedCount > 0 && (params == null || params.length == 0)) {
+ fail("Expected non-zero number of '" + paramName + "' parameters in request");
+ }
+ Set<String> requestedFields = new HashSet<>();
+ if (params != null) {
+ for (String p : params) {
+ List<String> list = StrUtils.splitSmart(p, ',');
+ for (String s : list) {
+ // make sure field names aren't duplicated in the parameters
+ assertTrue("Field name " + s + " was requested multiple times: params = " + requestAndParams.params,
+ requestedFields.add(s));
+ }
+ }
+ }
+ // if a wildcard ALL field is requested then we don't need to match exact number of params
+ if (!requestedFields.contains("*")) {
+ assertEquals("Number of requested fields do not match with expectations", expectedCount, requestedFields.size());
+ for (String field : values) {
+ if (!requestedFields.contains(field)) {
+ fail("Field " + field + " not found in param: " + paramName + " request had " + paramName + "=" + requestedFields);
+ }
+ }
}
- params.add("shards", getShardsString());
- params.add("debug", "track");
- rsp = queryServer(new ModifiableSolrParams(params));
- Map<String, Object> debugMap = rsp.getDebugMap();
- SimpleOrderedMap<Object> track = (SimpleOrderedMap<Object>) debugMap.get("track");
- assertNotNull(track);
- assertNotNull(track.get("EXECUTE_QUERY"));
- assertNull("A single pass request should not have a GET_FIELDS phase", track.get("GET_FIELDS"));
}
}
Modified: lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/FacetPivotSmallTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/FacetPivotSmallTest.java?rev=1670257&r1=1670256&r2=1670257&view=diff
==============================================================================
--- lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/FacetPivotSmallTest.java (original)
+++ lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/FacetPivotSmallTest.java Tue Mar 31 05:22:40 2015
@@ -158,7 +158,7 @@ public class FacetPivotSmallTest extends
params.add("facet", "true");
params.add("facet.pivot", "{!stats=s1}place_t,company_t");
params.add("stats", "true");
- params.add("stats.field", "{!key=avg_price tag=s1 mean=true}price_ti");
+ params.add("stats.field", "{!key=avg_price tag=s1}price_ti");
SolrQueryRequest req = req(params);
final String statsPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst";
@@ -174,6 +174,8 @@ public class FacetPivotSmallTest extends
dublinMicrosoftStats + "/double[@name='sumOfSquares'][.=1427.0]",
dublinMicrosoftStats + "/double[@name='mean'][.=21.0]",
dublinMicrosoftStats + "/double[@name='stddev'][.=7.211102550927978]",
+ // if new stats are supported, this will break - update test to assert values for each
+ "count(" + dublinMicrosoftStats + "/*)=8",
cardiffPolecatStats + "/double[@name='min'][.=15.0]",
cardiffPolecatStats + "/double[@name='max'][.=39.0]",
@@ -183,6 +185,8 @@ public class FacetPivotSmallTest extends
cardiffPolecatStats + "/double[@name='sumOfSquares'][.=1746.0]",
cardiffPolecatStats + "/double[@name='mean'][.=27.0]",
cardiffPolecatStats + "/double[@name='stddev'][.=16.97056274847714]",
+ // if new stats are supported, this will break - update test to assert values for each
+ "count(" + cardiffPolecatStats + "/*)=8",
krakowFujitsuStats + "/null[@name='min']",
krakowFujitsuStats + "/null[@name='max']",
@@ -191,7 +195,10 @@ public class FacetPivotSmallTest extends
krakowFujitsuStats + "/double[@name='sum'][.=0.0]",
krakowFujitsuStats + "/double[@name='sumOfSquares'][.=0.0]",
krakowFujitsuStats + "/double[@name='mean'][.='NaN']",
- krakowFujitsuStats + "/double[@name='stddev'][.=0.0]"
+ krakowFujitsuStats + "/double[@name='stddev'][.=0.0]",
+ // if new stats are supported, this will break - update test to assert values for each
+ "count(" + krakowFujitsuStats + "/*)=8"
+
);
}
Modified: lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java?rev=1670257&r1=1670256&r2=1670257&view=diff
==============================================================================
--- lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java (original)
+++ lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java Tue Mar 31 05:22:40 2015
@@ -534,7 +534,7 @@ public class QueryElevationComponentTest
String query = "title:ipod";
- Map<String, String> args = new HashMap<>();
+ Map<String, String> args = new HashMap<>(); // reusing args & requests this way is a solr-test-antipattern. PLEASE DO NOT COPY THIS CODE
args.put(CommonParams.Q, query);
args.put(CommonParams.QT, "/elevate");
args.put(CommonParams.FL, "id,score");
@@ -556,6 +556,7 @@ public class QueryElevationComponentTest
booster.setTopQueryResults(reader, query, new String[]{"x", "y", "z"}, null);
+ req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args));
assertQ("All six should make it", req
, "//*[@numFound='6']"
, "//result/doc[1]/str[@name='id'][.='x']"
@@ -569,6 +570,7 @@ public class QueryElevationComponentTest
booster.elevationCache.clear();
// now switch the order:
+ req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args));
booster.setTopQueryResults(reader, query, new String[]{"a", "x"}, null);
assertQ("All four should make it", req
, "//*[@numFound='4']"
@@ -580,6 +582,7 @@ public class QueryElevationComponentTest
// Test reverse sort
args.put(CommonParams.SORT, "score asc");
+ req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args));
assertQ("All four should make it", req
, "//*[@numFound='4']"
, "//result/doc[4]/str[@name='id'][.='a']"
@@ -592,6 +595,7 @@ public class QueryElevationComponentTest
// default 'forceBoost' should be false
assertEquals(false, booster.forceElevation);
args.put(CommonParams.SORT, "str_s1 asc");
+ req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args));
assertQ(null, req
, "//*[@numFound='4']"
, "//result/doc[1]/str[@name='id'][.='a']"
@@ -600,6 +604,7 @@ public class QueryElevationComponentTest
, "//result/doc[4]/str[@name='id'][.='x']"
);
args.put(CommonParams.SORT, "id asc");
+ req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args));
assertQ(null, req
, "//*[@numFound='4']"
, "//result/doc[1]/str[@name='id'][.='a']"
@@ -610,6 +615,7 @@ public class QueryElevationComponentTest
booster.forceElevation = true;
args.put(CommonParams.SORT, "id asc");
+ req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args));
assertQ(null, req
, "//*[@numFound='4']"
, "//result/doc[1]/str[@name='id'][.='a']"
@@ -620,6 +626,7 @@ public class QueryElevationComponentTest
//Test exclusive (not to be confused with exclusion)
args.put(QueryElevationParams.EXCLUSIVE, "true");
+ req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args));
booster.setTopQueryResults(reader, query, new String[]{"x", "a"}, new String[]{});
assertQ(null, req
, "//*[@numFound='2']"
@@ -631,6 +638,7 @@ public class QueryElevationComponentTest
booster.elevationCache.clear();
args.remove(CommonParams.SORT);
args.remove(QueryElevationParams.EXCLUSIVE);
+ req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args));
booster.setTopQueryResults(reader, query, new String[]{"x"}, new String[]{"a"});
assertQ(null, req
, "//*[@numFound='3']"
@@ -645,7 +653,7 @@ public class QueryElevationComponentTest
booster.elevationCache.clear();
args.put(QueryElevationParams.IDS, "x,y,z");
args.put(QueryElevationParams.EXCLUDE, "b");
-
+ req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args));
assertQ("All five should make it", req
, "//*[@numFound='5']"
, "//result/doc[1]/str[@name='id'][.='x']"
@@ -657,7 +665,7 @@ public class QueryElevationComponentTest
args.put(QueryElevationParams.IDS, "x,z,y");
args.put(QueryElevationParams.EXCLUDE, "b,c");
-
+ req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args));
assertQ("All four should make it", req
, "//*[@numFound='4']"
, "//result/doc[1]/str[@name='id'][.='x']"
Modified: lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/SpellCheckComponentTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/SpellCheckComponentTest.java?rev=1670257&r1=1670256&r2=1670257&view=diff
==============================================================================
--- lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/SpellCheckComponentTest.java (original)
+++ lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/SpellCheckComponentTest.java Tue Mar 31 05:22:40 2015
@@ -180,7 +180,11 @@ public class SpellCheckComponentTest ext
request = req("qt", "spellCheckCompRH", "q", "*:*", "spellcheck.q", "ttle",
"spellcheck", "true", "spellcheck.dictionary", "default",
"spellcheck.reload", "true");
- ResponseBuilder rb = new ResponseBuilder(request, new SolrQueryResponse(), new ArrayList(h.getCore().getSearchComponents().values()));
+ List<SearchComponent> components = new ArrayList<>();
+ for (String name : h.getCore().getSearchComponents().keySet()) {
+ components.add(h.getCore().getSearchComponent(name));
+ }
+ ResponseBuilder rb = new ResponseBuilder(request, new SolrQueryResponse(), components);
checker.prepare(rb);
try {
Modified: lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/StatsComponentTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/StatsComponentTest.java?rev=1670257&r1=1670256&r2=1670257&view=diff
==============================================================================
--- lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/StatsComponentTest.java (original)
+++ lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/StatsComponentTest.java Tue Mar 31 05:22:40 2015
@@ -16,11 +16,14 @@ package org.apache.solr.handler.componen
* limitations under the License.
*/
+import java.nio.ByteBuffer;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
+import java.util.Iterator;
+import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
@@ -29,21 +32,26 @@ import java.util.TimeZone;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.queries.function.valuesource.QueryValueSource;
-
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.MapSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.StatsParams;
+import org.apache.solr.common.util.Base64;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.StrUtils;
import org.apache.solr.core.SolrCore;
+import org.apache.solr.handler.component.StatsField.Stat;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.util.AbstractSolrTestCase;
-import org.junit.BeforeClass;
+import org.apache.commons.math3.util.Combinations;
+import com.tdunning.math.stats.AVLTreeDigest;
+
+import org.junit.BeforeClass;
/**
* Statistics Component Test
@@ -647,8 +655,20 @@ public class StatsComponentTest extends
args.put("indent", "true");
SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args));
- assertQ("test string statistics values", req,
- "//null[@name='active_i'][.='']");
+ assertQ("test string statistics values", req
+ ,"//lst[@name='active_i']/long[@name='count'][.='0']"
+ ,"//lst[@name='active_i']/long[@name='missing'][.='4']"
+
+ ,"//lst[@name='active_i']/null[@name='min']"
+ ,"//lst[@name='active_i']/null[@name='max']"
+ ,"//lst[@name='active_i']/double[@name='sum'][.='0.0']"
+ ,"//lst[@name='active_i']/double[@name='sumOfSquares'][.='0.0']"
+ ,"//lst[@name='active_i']/double[@name='stddev'][.='0.0']"
+ ,"//lst[@name='active_i']/double[@name='mean'][.='NaN']"
+ // if new stats are supported, this will break - update test to assert values for each
+ ,"count(//lst[@name='active_i']/*)=8"
+
+ );
}
public void testFieldStatisticsResultsStringFieldAlwaysMissing() throws Exception {
@@ -667,8 +687,15 @@ public class StatsComponentTest extends
args.put("indent", "true");
SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args));
- assertQ("test string statistics values", req,
- "//null[@name='active_s'][.='']");
+ assertQ("test string statistics values", req
+ ,"//lst[@name='active_s']/long[@name='count'][.='0']"
+ ,"//lst[@name='active_s']/long[@name='missing'][.='4']"
+
+ ,"//lst[@name='active_s']/null[@name='min']"
+ ,"//lst[@name='active_s']/null[@name='max']"
+ // if new stats are supported, this will break - update test to assert values for each
+ ,"count(//lst[@name='active_s']/*)=4"
+ );
}
//SOLR-3160
@@ -688,8 +715,20 @@ public class StatsComponentTest extends
args.put("indent", "true");
SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args));
- assertQ("test string statistics values", req,
- "//null[@name='active_dt'][.='']");
+ assertQ("test string statistics values", req
+ ,"//lst[@name='active_dt']/long[@name='count'][.='0']"
+ ,"//lst[@name='active_dt']/long[@name='missing'][.='3']"
+
+ ,"//lst[@name='active_dt']/null[@name='min']"
+ ,"//lst[@name='active_dt']/null[@name='max']"
+ ,"//lst[@name='active_dt']/null[@name='mean']"
+ ,"//lst[@name='active_dt']/date[@name='sum'][.='1970-01-01T00:00:00Z']"
+ ,"//lst[@name='active_dt']/double[@name='sumOfSquares'][.='0.0']"
+ ,"//lst[@name='active_dt']/double[@name='stddev'][.='0.0']"
+
+ // if new stats are supported, this will break - update test to assert values for each
+ ,"count(//lst[@name='active_dt']/*)=8"
+ );
}
public void testStatsFacetMultivaluedErrorHandling() throws Exception {
@@ -813,8 +852,8 @@ public class StatsComponentTest extends
args.put(CommonParams.Q, "*:*");
args.put(StatsParams.STATS, "true");
args.put(StatsParams.STATS_FIELD, fieldName);
- args.put("indent", "true");
args.put(StatsParams.STATS_CALC_DISTINCT, "true");
+ args.put("indent", "true");
SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args));
@@ -860,8 +899,8 @@ public class StatsComponentTest extends
args.put(StatsParams.STATS, "true");
args.put(StatsParams.STATS_FIELD, fieldName);
args.put(StatsParams.STATS_FACET, fieldName);
- args.put("indent", "true");
args.put(StatsParams.STATS_CALC_DISTINCT, "true");
+ args.put("indent", "true");
SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args));
@@ -987,27 +1026,107 @@ public class StatsComponentTest extends
assertU(commit());
- Map<String, String> args = new HashMap<>();
- args.put(CommonParams.Q, "*:*");
- args.put(StatsParams.STATS, "true");
- args.put(StatsParams.STATS_FIELD, fieldName);
- args.put(StatsParams.STATS_CALC_DISTINCT, "true");
- args.put("indent", "true");
- SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args));
+ final SolrParams baseParams = params(CommonParams.Q, "*:*",
+ "indent", "true",
+ StatsParams.STATS, "true");
+
+ SolrQueryRequest req1 = req(baseParams,
+ StatsParams.STATS_CALC_DISTINCT, "true",
+ StatsParams.STATS_FIELD, fieldName);
+ SolrQueryRequest req2 = req(baseParams,
+ StatsParams.STATS_FIELD,
+ "{!min=true, max=true, count=true, sum=true, mean=true, stddev=true, sumOfSquares=true, missing=true, calcdistinct=true}" + fieldName);
+
+ for (SolrQueryRequest req : new SolrQueryRequest[] { req1, req2 }) {
+ assertQ("test status on docValues and multiValued: " + req.toString(), req
+ , "//lst[@name='" + fieldName + "']/double[@name='min'][.='-3.0']"
+ , "//lst[@name='" + fieldName + "']/double[@name='max'][.='16.0']"
+ , "//lst[@name='" + fieldName + "']/long[@name='count'][.='12']"
+ , "//lst[@name='" + fieldName + "']/double[@name='sum'][.='38.0']"
+ , "//lst[@name='" + fieldName + "']/double[@name='mean'][.='3.1666666666666665']"
+ , "//lst[@name='" + fieldName + "']/double[@name='stddev'][.='5.638074031784151']"
+ , "//lst[@name='" + fieldName + "']/double[@name='sumOfSquares'][.='470.0']"
+ , "//lst[@name='" + fieldName + "']/long[@name='missing'][.='0']"
+ , "//lst[@name='" + fieldName + "']/long[@name='countDistinct'][.='9']"
+ // always comes along with countDistinct
+ , "count(//lst[@name='" + fieldName + "']/arr[@name='distinctValues']/float)=9"
+ // if new default stats are added, this will break - update test to assert values for each
+ ,"count(//lst[@name='" + fieldName + "']/*)=10"
+ );
+ }
+ }
+
+ public void testEnumFieldTypeStatus() throws Exception {
+ clearIndex();
+
+ String fieldName = "severity";
+ assertU(adoc("id", "0", fieldName, "Not Available"));
+ assertU(adoc("id", "1", fieldName, "Not Available"));
+ assertU(adoc("id", "2", fieldName, "Not Available"));
+ assertU(adoc("id", "3", fieldName, "Not Available"));
+ assertU(adoc("id", "4", fieldName, "Not Available"));
+ assertU(adoc("id", "5", fieldName, "Low"));
+ assertU(adoc("id", "6", fieldName, "Low"));
+ assertU(adoc("id", "7", fieldName, "Low"));
+ assertU(adoc("id", "8", fieldName, "Low"));
+ assertU(adoc("id", "9", fieldName, "Medium"));
+ assertU(adoc("id", "10", fieldName, "Medium"));
+ assertU(adoc("id", "11", fieldName, "Medium"));
+ assertU(adoc("id", "12", fieldName, "High"));
+ assertU(adoc("id", "13", fieldName, "High"));
+ assertU(adoc("id", "14", fieldName, "Critical"));
+
+
+ for (int i = 20; i <= 30; i++) {
+ assertU(adoc("id", "" + i));
+ }
- assertQ("test min/max on docValues and multiValued", req
- , "//lst[@name='" + fieldName + "']/double[@name='min'][.='-3.0']"
- , "//lst[@name='" + fieldName + "']/double[@name='max'][.='16.0']"
- , "//lst[@name='" + fieldName + "']/long[@name='count'][.='12']"
- , "//lst[@name='" + fieldName + "']/double[@name='sum'][.='38.0']"
- , "//lst[@name='" + fieldName + "']/long[@name='countDistinct'][.='9']"
- , "//lst[@name='" + fieldName + "']/double[@name='mean'][.='3.1666666666666665']"
- , "//lst[@name='" + fieldName + "']/double[@name='stddev'][.='5.638074031784151']"
- , "//lst[@name='" + fieldName + "']/double[@name='sumOfSquares'][.='470.0']"
- , "//lst[@name='" + fieldName + "']/long[@name='missing'][.='0']");
+ assertU(commit());
+
+ assertQ("enum", req("q","*:*", "stats", "true", "stats.field", fieldName)
+ , "//lst[@name='" + fieldName + "']/str[@name='min'][.='Not Available']"
+ , "//lst[@name='" + fieldName + "']/str[@name='max'][.='Critical']"
+ , "//lst[@name='" + fieldName + "']/long[@name='count'][.='15']"
+ , "//lst[@name='" + fieldName + "']/long[@name='missing'][.='11']");
+
+
+ assertQ("enum calcdistinct", req("q","*:*", "stats", "true", "stats.field", fieldName,
+ StatsParams.STATS_CALC_DISTINCT, "true")
+ , "//lst[@name='" + fieldName + "']/str[@name='min'][.='Not Available']"
+ , "//lst[@name='" + fieldName + "']/str[@name='max'][.='Critical']"
+ , "//lst[@name='" + fieldName + "']/long[@name='count'][.='15']"
+ , "//lst[@name='" + fieldName + "']/long[@name='countDistinct'][.='5']"
+ , "count(//lst[@name='" + fieldName + "']/arr[@name='distinctValues']/*)=5"
+ , "//lst[@name='" + fieldName + "']/long[@name='missing'][.='11']");
+
+
+ final String pre = "//lst[@name='stats_fields']/lst[@name='"+fieldName+"']/lst[@name='facets']/lst[@name='severity']";
+ assertQ("enum + stats.facet", req("q","*:*", "stats", "true", "stats.field", fieldName,
+ "stats.facet", fieldName)
+ , pre + "/lst[@name='High']/str[@name='min'][.='High']"
+ , pre + "/lst[@name='High']/str[@name='max'][.='High']"
+ , pre + "/lst[@name='High']/long[@name='count'][.='2']"
+ , pre + "/lst[@name='High']/long[@name='missing'][.='0']"
+ , pre + "/lst[@name='Low']/str[@name='min'][.='Low']"
+ , pre + "/lst[@name='Low']/str[@name='max'][.='Low']"
+ , pre + "/lst[@name='Low']/long[@name='count'][.='4']"
+ , pre + "/lst[@name='Low']/long[@name='missing'][.='0']"
+ , pre + "/lst[@name='Medium']/str[@name='min'][.='Medium']"
+ , pre + "/lst[@name='Medium']/str[@name='max'][.='Medium']"
+ , pre + "/lst[@name='Medium']/long[@name='count'][.='3']"
+ , pre + "/lst[@name='Medium']/long[@name='missing'][.='0']"
+ , pre + "/lst[@name='Not Available']/str[@name='min'][.='Not Available']"
+ , pre + "/lst[@name='Not Available']/str[@name='max'][.='Not Available']"
+ , pre + "/lst[@name='Not Available']/long[@name='count'][.='5']"
+ , pre + "/lst[@name='Not Available']/long[@name='missing'][.='0']"
+ , pre + "/lst[@name='Critical']/str[@name='min'][.='Critical']"
+ , pre + "/lst[@name='Critical']/str[@name='max'][.='Critical']"
+ , pre + "/lst[@name='Critical']/long[@name='count'][.='1']"
+ , pre + "/lst[@name='Critical']/long[@name='missing'][.='0']"
+ );
}
-
+
private Doc createDocValuesDocument(List<FldType> types, String fieldName, String id, Comparable... values) throws Exception {
Doc doc = createDoc(types);
doc.getValues("id").set(0, id);
@@ -1020,30 +1139,418 @@ public class StatsComponentTest extends
return cat_docValues;
}
+ public void testIndividualStatLocalParams() throws Exception {
+ final String kpre = XPRE + "lst[@name='stats_fields']/lst[@name='k']/";
+
+ assertU(adoc("id", "1", "a_f", "2.3", "b_f", "9.7", "a_i", "9", "foo_t", "how now brown cow"));
+ assertU(commit());
+
+ AVLTreeDigest tdigest = new AVLTreeDigest(100);
+
+ // some quick sanity check assertions...
+ // trivial check that we only get the exact 2 we ask for
+ assertQ("ask for and get only 2 stats",
+ req("q","*:*", "stats", "true",
+ "stats.field", "{!key=k mean=true min=true}a_i")
+ , kpre + "double[@name='mean'][.='9.0']"
+ , kpre + "double[@name='min'][.='9.0']"
+ , "count(" + kpre + "*)=2"
+ );
+
+ // for stats that are true/false, sanity check false does it's job
+ assertQ("min=true & max=false: only min should come back",
+ req("q","*:*", "stats", "true",
+ "stats.field", "{!key=k max=false min=true}a_i")
+ , kpre + "double[@name='min'][.='9.0']"
+ , "count(" + kpre + "*)=1"
+ );
+ assertQ("min=false: localparam stat means ignore default set, "+
+ "but since only local param is false no stats should be returned",
+ req("q","*:*", "stats", "true",
+ "stats.field", "{!key=k min=false}a_i")
+ // section of stats for this field should exist ...
+ , XPRE + "lst[@name='stats_fields']/lst[@name='k']"
+ // ...but be empty
+ , "count(" + kpre + "*)=0"
+ );
+
+ double sum = 0;
+ double sumOfSquares = 0;
+ final int count = 20;
+ for (int i = 0; i < count; i++) {
+ assertU(adoc("id", String.valueOf(i), "a_f", "2.3", "b_f", "9.7", "a_i",
+ String.valueOf(i % 10), "foo_t", "how now brown cow"));
+ tdigest.add(i % 10);
+ sum += i % 10;
+ sumOfSquares += (i % 10) * (i % 10);
+ }
+
+ assertU(commit());
+
+ ByteBuffer buf = ByteBuffer.allocate(tdigest.smallByteSize());
+ tdigest.asSmallBytes(buf);
+ EnumSet<Stat> allStats = EnumSet.allOf(Stat.class);
+
+ Map<Stat,String> expectedStats = new HashMap<>();
+ expectedStats.put(Stat.min, "0.0");
+ expectedStats.put(Stat.max, "9.0");
+ expectedStats.put(Stat.missing, "0");
+ expectedStats.put(Stat.sum, String.valueOf(sum));
+ expectedStats.put(Stat.count, String.valueOf(count));
+ expectedStats.put(Stat.mean, String.valueOf(sum / count));
+ expectedStats.put(Stat.sumOfSquares, String.valueOf(sumOfSquares));
+ expectedStats.put(Stat.stddev, String.valueOf(Math.sqrt(((count * sumOfSquares) - (sum * sum))/ (20 * (count - 1.0D)))));
+ expectedStats.put(Stat.calcdistinct, "10");
+ // NOTE: per shard expected value
+ expectedStats.put(Stat.percentiles, Base64.byteArrayToBase64(buf.array(), 0, buf.array().length));
+
+ Map<Stat,String> expectedType = new HashMap<>();
+ expectedType.put(Stat.min, "double");
+ expectedType.put(Stat.max, "double");
+ expectedType.put(Stat.missing, "long");
+ expectedType.put(Stat.sum, "double");
+ expectedType.put(Stat.count, "long");
+ expectedType.put(Stat.mean, "double");
+ expectedType.put(Stat.sumOfSquares, "double");
+ expectedType.put(Stat.stddev, "double");
+ expectedType.put(Stat.calcdistinct, "long");
+ expectedType.put(Stat.percentiles, "str");
+
+ Map<Stat,String> localParasInput = new HashMap<>();
+ localParasInput.put(Stat.min, "true");
+ localParasInput.put(Stat.max, "true");
+ localParasInput.put(Stat.missing, "true");
+ localParasInput.put(Stat.sum, "true");
+ localParasInput.put(Stat.count, "true");
+ localParasInput.put(Stat.mean, "true");
+ localParasInput.put(Stat.sumOfSquares, "true");
+ localParasInput.put(Stat.stddev, "true");
+ localParasInput.put(Stat.calcdistinct, "true");
+ localParasInput.put(Stat.percentiles, "'90, 99'");
+
+ // canary in the coal mine
+ assertEquals("size of expectedStats doesn't match all known stats; " +
+ "enum was updated w/o updating test?",
+ expectedStats.size(), allStats.size());
+ assertEquals("size of expectedType doesn't match all known stats; " +
+ "enum was updated w/o updating test?",
+ expectedType.size(), allStats.size());
+
+ // whitebox test: explicitly ask for isShard=true with an individual stat
+ for (Stat stat : expectedStats.keySet()) {
+ EnumSet<Stat> distribDeps = stat.getDistribDeps();
+
+ StringBuilder exclude = new StringBuilder();
+ List<String> testParas = new ArrayList<String>(distribDeps.size() + 2);
+ int calcdistinctFudge = 0;
+
+ for (Stat perShardStat : distribDeps ){
+ String key = perShardStat.toString();
+ if (perShardStat.equals(Stat.calcdistinct)) {
+ // this abomination breaks all the rules - uses a diff response key and triggers
+ // the additional "distinctValues" stat
+ key = "countDistinct";
+ calcdistinctFudge++;
+ testParas.add("count(" + kpre + "arr[@name='distinctValues']/*)=10");
+ }
+ testParas.add(kpre + expectedType.get(perShardStat) +
+ "[@name='" + key + "'][.='" + expectedStats.get(perShardStat) + "']");
+ // even if we go out of our way to exclude the dependent stats,
+ // the shard should return them since they are a dependency for the requested stat
+ if (!stat.equals(Stat.percentiles)){
+ exclude.append(perShardStat + "=false ");
+ }
+ }
+ testParas.add("count(" + kpre + "*)=" + (distribDeps.size() + calcdistinctFudge));
+
+ assertQ("ask for only "+stat+", with isShard=true, and expect only deps: " + distribDeps,
+ req("q", "*:*", "isShard", "true", "stats", "true",
+ "stats.field", "{!key=k " + exclude + stat +"=" + localParasInput.get(stat) + "}a_i")
+ , testParas.toArray(new String[testParas.size()])
+ );
+ }
+
+ // test all the possible combinations (of all possible sizes) of stats params
+ for (int numParams = 1; numParams <= allStats.size(); numParams++) {
+ for (EnumSet<Stat> set : new StatSetCombinations(numParams, allStats)) {
+
+ // EnumSets use natural ordering, we want to randomize the order of the params
+ List<Stat> combo = new ArrayList<Stat>(set);
+ Collections.shuffle(combo, random());
+
+ StringBuilder paras = new StringBuilder("{!key=k ");
+ List<String> testParas = new ArrayList<String>(numParams + 2);
+
+ int calcdistinctFudge = 0;
+ for (Stat stat : combo) {
+ String key = stat.toString();
+ if (stat.equals(Stat.calcdistinct)) {
+ // this abomination breaks all the rules - uses a diff response key and triggers
+ // the additional "distinctValues" stat
+ key = "countDistinct";
+ calcdistinctFudge++;
+ testParas.add("count(" + kpre + "arr[@name='distinctValues']/*)=10");
+ }
+ paras.append(stat + "=" + localParasInput.get(stat)+ " ");
+
+ if (!stat.equals(Stat.percentiles)){
+ testParas.add(kpre + expectedType.get(stat) + "[@name='" + key + "'][.='" + expectedStats.get(stat) + "']");
+ } else {
+ testParas.add("count(" + kpre + "lst[@name='percentiles']/*)=2");
+ String p90 = "" + tdigest.quantile(0.90D);
+ String p99 = "" + tdigest.quantile(0.99D);
+ testParas.add(kpre + "lst[@name='percentiles']/double[@name='90.0'][.="+p90+"]");
+ testParas.add(kpre + "lst[@name='percentiles']/double[@name='99.0'][.="+p99+"]");
+ }
+ }
+
+ paras.append("}a_i");
+ testParas.add("count(" + kpre + "*)=" + (combo.size() + calcdistinctFudge));
+
+ assertQ("ask for an get only: "+ combo,
+ req("q","*:*", "stats", "true",
+ "stats.field", paras.toString())
+ , testParas.toArray(new String[testParas.size()])
+ );
+ }
+ }
+ }
-// public void testOtherFacetStatsResult() throws Exception {
-//
-// assertU(adoc("id", "1", "stats_tls_dv", "10", "active_i", "1"));
-// assertU(adoc("id", "2", "stats_tls_dv", "20", "active_i", "1"));
-// assertU(commit());
-// assertU(adoc("id", "3", "stats_tls_dv", "30", "active_i", "2"));
-// assertU(adoc("id", "4", "stats_tls_dv", "40", "active_i", "2"));
-// assertU(commit());
-//
-// final String pre = "//lst[@name='stats_fields']/lst[@name='stats_tls_dv']/lst[@name='facets']/lst[@name='active_i']";
-//
-// assertQ("test value for active_s=true", req("q", "*:*", "stats", "true", "stats.field", "stats_tls_dv", "stats.facet", "active_i","indent", "true")
-// , "*[count("+pre+")=1]"
-// , pre+"/lst[@name='1']/double[@name='min'][.='10.0']"
-// , pre+"/lst[@name='1']/double[@name='max'][.='20.0']"
-// , pre+"/lst[@name='1']/double[@name='sum'][.='30.0']"
-// , pre+"/lst[@name='1']/long[@name='count'][.='2']"
-// , pre+"/lst[@name='1']/long[@name='missing'][.='0']"
-// , pre + "/lst[@name='true']/long[@name='countDistinct'][.='2']"
-// , "count(" + pre + "/lst[@name='true']/arr[@name='distinctValues']/*)=2"
-// , pre+"/lst[@name='1']/double[@name='sumOfSquares'][.='500.0']"
-// , pre+"/lst[@name='1']/double[@name='mean'][.='15.0']"
-// , pre+"/lst[@name='1']/double[@name='stddev'][.='7.0710678118654755']"
-// );
-// }
+ // Test for Solr-6349
+ public void testCalcDistinctStats() throws Exception {
+ final String kpre = XPRE + "lst[@name='stats_fields']/lst[@name='k']/";
+ final String min = "count(" + kpre +"/double[@name='min'])";
+ final String countDistinct = "count(" + kpre +"/long[@name='countDistinct'])";
+ final String distinctValues = "count(" + kpre +"/arr[@name='distinctValues'])";
+
+ final int count = 20;
+ for (int i = 0; i < count; i++) {
+ assertU(adoc("id", String.valueOf(i), "a_f", "2.3", "b_f", "9.7", "a_i",
+ String.valueOf(i % 10), "foo_t", "how now brown cow"));
+ }
+
+ assertU(commit());
+
+ String[] baseParams = new String[] { "q", "*:*", "stats", "true","indent", "true" };
+
+ for (SolrParams p : new SolrParams[] {
+ params("stats.field", "{!key=k}a_i"),
+ params(StatsParams.STATS_CALC_DISTINCT, "false",
+ "stats.field", "{!key=k}a_i"),
+ params("f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false",
+ "stats.field", "{!key=k}a_i"),
+ params(StatsParams.STATS_CALC_DISTINCT, "true",
+ "f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false",
+ "stats.field", "{!key=k}a_i"),
+ params("stats.field", "{!key=k min='true'}a_i"),
+ params(StatsParams.STATS_CALC_DISTINCT, "true",
+ "f.a_i." + StatsParams.STATS_CALC_DISTINCT, "true",
+ "stats.field", "{!key=k min='true' calcdistinct='false'}a_i"),
+ }) {
+
+ assertQ("min is either default or explicitly requested; "+
+ "countDistinct & distinctValues either default or explicitly prevented"
+ , req(p, baseParams)
+ , min + "=1"
+ , countDistinct + "=0"
+ , distinctValues + "=0");
+ }
+
+ for (SolrParams p : new SolrParams[] {
+ params("stats.calcdistinct", "true",
+ "stats.field", "{!key=k}a_i"),
+ params("f.a_i." + StatsParams.STATS_CALC_DISTINCT, "true",
+ "stats.field", "{!key=k}a_i"),
+ params("stats.calcdistinct", "false",
+ "f.a_i." + StatsParams.STATS_CALC_DISTINCT, "true",
+ "stats.field", "{!key=k}a_i"),
+ params("stats.calcdistinct", "false ",
+ "stats.field", "{!key=k min=true calcdistinct=true}a_i"),
+ params("f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false",
+ "stats.field", "{!key=k min=true calcdistinct=true}a_i"),
+ params("stats.calcdistinct", "false ",
+ "f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false",
+ "stats.field", "{!key=k min=true calcdistinct=true}a_i"),
+ }) {
+
+ assertQ("min is either default or explicitly requested; " +
+ "countDistinct & distinctValues explicitly requested"
+ , req(p, baseParams)
+ , min + "=1"
+ , countDistinct + "=1"
+ , distinctValues + "=1");
+ }
+
+ for (SolrParams p : new SolrParams[] {
+ params("stats.field", "{!key=k calcdistinct=true}a_i"),
+
+ params("stats.calcdistinct", "true",
+ "stats.field", "{!key=k min='false'}a_i"),
+
+ params("stats.calcdistinct", "true",
+ "stats.field", "{!key=k max='true' min='false'}a_i"),
+
+ params("stats.calcdistinct", "false",
+ "stats.field", "{!key=k calcdistinct=true}a_i"),
+ params("f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false",
+ "stats.field", "{!key=k calcdistinct=true}a_i"),
+ params("stats.calcdistinct", "false",
+ "f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false",
+ "stats.field", "{!key=k calcdistinct=true}a_i"),
+ params("stats.calcdistinct", "false",
+ "f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false",
+ "stats.field", "{!key=k min='false' calcdistinct=true}a_i"),
+ }) {
+
+ assertQ("min is explicitly excluded; " +
+ "countDistinct & distinctValues explicitly requested"
+ , req(p, baseParams)
+ , min + "=0"
+ , countDistinct + "=1"
+ , distinctValues + "=1");
+ }
+
+ for (SolrParams p : new SolrParams[] {
+ params(StatsParams.STATS_CALC_DISTINCT, "true",
+ "stats.field", "{!key=k min=true}a_i"),
+ params("f.a_i.stats.calcdistinct", "true",
+ "stats.field", "{!key=k min=true}a_i"),
+ params(StatsParams.STATS_CALC_DISTINCT, "false",
+ "f.a_i.stats.calcdistinct", "true",
+ "stats.field", "{!key=k min=true}a_i"),
+ params("f.a_i.stats.calcdistinct", "false",
+ "stats.field", "{!key=k min=true calcdistinct=true}a_i"),
+ params(StatsParams.STATS_CALC_DISTINCT, "false",
+ "stats.field", "{!key=k min=true calcdistinct=true}a_i"),
+ params(StatsParams.STATS_CALC_DISTINCT, "false",
+ "f.a_i.stats.calcdistinct", "false",
+ "stats.field", "{!key=k min=true calcdistinct=true}a_i"),
+ }) {
+
+ assertQ("min is explicitly requested; " +
+ "countDistinct & distinctValues explicitly requested"
+ , req(p, baseParams)
+ , min + "=1"
+ , countDistinct + "=1"
+ , distinctValues + "=1");
+ }
+ }
+
+ // simple percentiles test
+ public void testPercentiles() throws Exception {
+
+ // NOTE: deliberately not in numeric order
+ String percentiles = "10.0,99.9,1.0,2.0,20.0,30.0,40.0,50.0,60.0,70.0,80.0,98.0,99.0";
+ List <String> percentilesList = StrUtils.splitSmart(percentiles, ',');
+
+ // test empty case
+ SolrQueryRequest query = req("q", "*:*", "stats", "true",
+ "stats.field", "{!percentiles='" + percentiles + "'}stat_f");
+ try {
+ SolrQueryResponse rsp = h.queryAndResponse(null, query);
+ NamedList<Double> pout = extractPercentils(rsp, "stat_f");
+ for (int i = 0; i < percentilesList.size(); i++) {
+ // ensure exact order, but all values should be null (empty result set)
+ assertEquals(percentilesList.get(i), pout.getName(i));
+ assertEquals(null, pout.getVal(i));
+ }
+ } finally {
+ query.close();
+ }
+
+ int id = 0;
+ // add trivial docs to test basic percentiles
+ for (int i = 0; i < 100; i++) {
+ // add the same values multiple times (diff docs)
+ for (int j =0; j < 5; j++) {
+ assertU(adoc("id", ++id+"", "stat_f", ""+i));
+ }
+ }
+
+ assertU(commit());
+
+ query = req("q", "*:*", "stats", "true",
+ "stats.field", "{!percentiles='" + percentiles + "'}stat_f");
+ try {
+ SolrQueryResponse rsp = h.queryAndResponse(null, query);
+ NamedList<Double> pout = extractPercentils(rsp, "stat_f");
+ for (int i = 0; i < percentilesList.size(); i++) {
+ String p = percentilesList.get(i);
+ assertEquals(p, pout.getName(i));
+ assertEquals(Double.parseDouble(p), pout.getVal(i), 1.0D);
+
+ }
+ } finally {
+ query.close();
+ }
+
+ // test request for no percentiles
+ query = req("q", "*:*", "stats", "true",
+ "stats.field", "{!percentiles=''}stat_f");
+ try {
+ SolrQueryResponse rsp = h.queryAndResponse(null, query);
+ NamedList<Double> pout = extractPercentils(rsp, "stat_f");
+ assertNull(pout);
+ } finally {
+ query.close();
+ }
+
+ // non-numeric types don't support percentiles
+ assertU(adoc("id", ++id+"", "stat_dt", "1999-05-03T04:55:01Z"));
+ assertU(adoc("id", ++id+"", "stat_s", "cow"));
+
+ assertU(commit());
+
+ query = req("q", "*:*", "stats", "true",
+ "stats.field", "{!percentiles='" + percentiles + "'}stat_dt",
+ "stats.field", "{!percentiles='" + percentiles + "'}stat_s");
+
+ try {
+ SolrQueryResponse rsp = h.queryAndResponse(null, query);
+ assertNull(extractPercentils(rsp, "stat_dt"));
+ assertNull(extractPercentils(rsp, "stat_s"));
+ } finally {
+ query.close();
+ }
+
+ }
+
+ private NamedList<Double> extractPercentils(SolrQueryResponse rsp, String key) {
+ return ((NamedList<NamedList<NamedList<NamedList<Double>>>> )
+ rsp.getValues().get("stats")).get("stats_fields").get(key).get("percentiles");
+ }
+
+ /**
+ * given a comboSize and an EnumSet of Stats, generates iterators that produce every possible
+ * enum combination of that size
+ */
+ public static final class StatSetCombinations implements Iterable<EnumSet<Stat>> {
+ // we need an array so we can do fixed index offset lookups
+ private final Stat[] all;
+ private final Combinations intCombos;
+ public StatSetCombinations(int comboSize, EnumSet<Stat> universe) {
+ // NOTE: should not need to sort, EnumSet uses natural ordering
+ all = universe.toArray(new Stat[universe.size()]);
+ intCombos = new Combinations(all.length, comboSize);
+ }
+ public Iterator<EnumSet<Stat>> iterator() {
+ return new Iterator<EnumSet<Stat>>() {
+ final Iterator<int[]> wrapped = intCombos.iterator();
+ public void remove() {
+ wrapped.remove();
+ }
+ public boolean hasNext() {
+ return wrapped.hasNext();
+ }
+ public EnumSet<Stat> next() {
+ EnumSet<Stat> result = EnumSet.noneOf(Stat.class);
+ int[] indexes = wrapped.next();
+ for (int i = 0; i < indexes.length; i++) {
+ result.add(all[indexes[i]]);
+ }
+ return result;
+ }
+ };
+ }
+ }
}
Modified: lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentTest.java?rev=1670257&r1=1670256&r2=1670257&view=diff
==============================================================================
--- lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentTest.java (original)
+++ lucene/dev/branches/lucene6271/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentTest.java Tue Mar 31 05:22:40 2015
@@ -1,19 +1,13 @@
package org.apache.solr.handler.component;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.core.SolrCore;
-import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.TermVectorParams;
-import org.apache.solr.request.LocalSolrQueryRequest;
-import org.apache.solr.response.SolrQueryResponse;
import org.junit.BeforeClass;
import org.junit.Test;
-
-import java.util.HashMap;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -42,6 +36,7 @@ public class TermVectorComponentTest ext
initCore("solrconfig.xml","schema.xml");
assertU(adoc("id", "0",
+ "test_posoffpaytv", "This is a title and another title",
"test_posofftv", "This is a title and another title",
"test_basictv", "This is a title and another title",
"test_notv", "This is a title and another title",
@@ -49,6 +44,7 @@ public class TermVectorComponentTest ext
"test_offtv", "This is a title and another title"
));
assertU(adoc("id", "1",
+ "test_posoffpaytv", "The quick reb fox jumped over the lazy brown dogs.",
"test_posofftv", "The quick reb fox jumped over the lazy brown dogs.",
"test_basictv", "The quick reb fox jumped over the lazy brown dogs.",
"test_notv", "The quick reb fox jumped over the lazy brown dogs.",
@@ -56,6 +52,7 @@ public class TermVectorComponentTest ext
"test_offtv", "The quick reb fox jumped over the lazy brown dogs."
));
assertU(adoc("id", "2",
+ "test_posoffpaytv", "This is a document",
"test_posofftv", "This is a document",
"test_basictv", "This is a document",
"test_notv", "This is a document",
@@ -63,6 +60,7 @@ public class TermVectorComponentTest ext
"test_offtv", "This is a document"
));
assertU(adoc("id", "3",
+ "test_posoffpaytv", "another document",
"test_posofftv", "another document",
"test_basictv", "another document",
"test_notv", "another document",
@@ -71,6 +69,7 @@ public class TermVectorComponentTest ext
));
//bunch of docs that are variants on blue
assertU(adoc("id", "4",
+ "test_posoffpaytv", "blue",
"test_posofftv", "blue",
"test_basictv", "blue",
"test_notv", "blue",
@@ -78,6 +77,7 @@ public class TermVectorComponentTest ext
"test_offtv", "blue"
));
assertU(adoc("id", "5",
+ "test_posoffpaytv", "blud",
"test_posofftv", "blud",
"test_basictv", "blud",
"test_notv", "blud",
@@ -85,6 +85,7 @@ public class TermVectorComponentTest ext
"test_offtv", "blud"
));
assertU(adoc("id", "6",
+ "test_posoffpaytv", "boue",
"test_posofftv", "boue",
"test_basictv", "boue",
"test_notv", "boue",
@@ -92,6 +93,7 @@ public class TermVectorComponentTest ext
"test_offtv", "boue"
));
assertU(adoc("id", "7",
+ "test_posoffpaytv", "glue",
"test_posofftv", "glue",
"test_basictv", "glue",
"test_notv", "glue",
@@ -99,6 +101,7 @@ public class TermVectorComponentTest ext
"test_offtv", "glue"
));
assertU(adoc("id", "8",
+ "test_posoffpaytv", "blee",
"test_posofftv", "blee",
"test_basictv", "blee",
"test_notv", "blee",
@@ -106,6 +109,7 @@ public class TermVectorComponentTest ext
"test_offtv", "blee"
));
assertU(adoc("id", "9",
+ "test_posoffpaytv", "blah",
"test_posofftv", "blah",
"test_basictv", "blah",
"test_notv", "blah",
@@ -125,6 +129,7 @@ public class TermVectorComponentTest ext
" 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
" 'test_offtv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
" 'test_posofftv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
+ " 'test_posoffpaytv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
" 'test_postv':{'anoth':{'tf':1},'titl':{'tf':2}}}," +
" 'uniqueKeyFieldName':'id'}"
);
@@ -166,6 +171,7 @@ public class TermVectorComponentTest ext
" 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
" 'test_offtv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
" 'test_posofftv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
+ " 'test_posoffpaytv':{'anoth':{'tf':1},'titl':{'tf':2}}," +
" 'test_postv':{'anoth':{'tf':1},'titl':{'tf':2}}}," +
" 'uniqueKeyFieldName':'id'}"
);
@@ -241,7 +247,8 @@ public class TermVectorComponentTest ext
public void testPerField() throws Exception {
assertJQ(req("json.nl","map", "qt",tv, "q", "id:0", TermVectorComponent.COMPONENT_NAME, "true"
,TermVectorParams.TF, "true", TermVectorParams.DF, "true", TermVectorParams.OFFSETS, "true", TermVectorParams.POSITIONS, "true", TermVectorParams.TF_IDF, "true"
- ,TermVectorParams.FIELDS, "test_basictv,test_notv,test_postv,test_offtv,test_posofftv"
+ ,TermVectorParams.FIELDS, "test_basictv,test_notv,test_postv,test_offtv,test_posofftv,test_posoffpaytv"
+ ,"f.test_posoffpaytv." + TermVectorParams.PAYLOADS, "false"
,"f.test_posofftv." + TermVectorParams.POSITIONS, "false"
,"f.test_offtv." + TermVectorParams.OFFSETS, "false"
,"f.test_basictv." + TermVectorParams.DF, "false"
@@ -255,6 +262,17 @@ public class TermVectorComponentTest ext
);
}
+ @Test
+ public void testPayloads() throws Exception {
+ // This field uses TokenOffsetPayloadTokenFilter, which
+ // stuffs start (20) and end offset (27) into the
+ // payload:
+ assertJQ(req("json.nl","map", "qt",tv, "q", "id:0", TermVectorComponent.COMPONENT_NAME, "true"
+ , TermVectorParams.TF, "true", TermVectorParams.DF, "true", TermVectorParams.OFFSETS, "true", TermVectorParams.POSITIONS, "true", TermVectorParams.TF_IDF, "true",
+ TermVectorParams.PAYLOADS, "true")
+ ,"/termVectors/0/test_posoffpaytv/anoth=={'tf':1, 'offsets':{'start':20, 'end':27}, 'positions':{'position':5}, 'payloads':{'payload': 'AAAAFAAAABs='}, 'df':2, 'tf-idf':0.5}"
+ );
+ }
}