You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by kr...@apache.org on 2016/11/14 16:06:42 UTC
[01/16] lucene-solr:jira/solr-8593: LUCENE-7555: use BM25Similarity
for this test
Repository: lucene-solr
Updated Branches:
refs/heads/jira/solr-8593 368204bf4 -> 99a674601
LUCENE-7555: use BM25Similarity for this test
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2bc1d276
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2bc1d276
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2bc1d276
Branch: refs/heads/jira/solr-8593
Commit: 2bc1d2761f593b2ceea19b3195e0cb430318ceaa
Parents: 4d94510
Author: Mike McCandless <mi...@apache.org>
Authored: Sat Nov 12 13:28:57 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Sat Nov 12 13:28:57 2016 -0500
----------------------------------------------------------------------
.../src/test/org/apache/solr/uninverting/TestFieldCacheSort.java | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2bc1d276/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSort.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSort.java b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSort.java
index d53f610..4755c8f 100644
--- a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSort.java
+++ b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSort.java
@@ -50,10 +50,11 @@ import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.store.Directory;
-import org.apache.solr.uninverting.UninvertingReader.Type;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
+import org.apache.solr.uninverting.UninvertingReader.Type;
/*
* Tests sorting (but with fieldcache instead of docvalues)
@@ -434,6 +435,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
writer.close();
IndexSearcher searcher = newSearcher(ir);
+ searcher.setSimilarity(new BM25Similarity());
Sort sort = new Sort(new SortField(null, SortField.Type.SCORE, true));
TopDocs actual = searcher.search(new TermQuery(new Term("value", "foo")), 10, sort);
[03/16] lucene-solr:jira/solr-8593: LUCENE-7554: use BM25Similarity
for this test
Posted by kr...@apache.org.
LUCENE-7554: use BM25Similarity for this test
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/672d7e01
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/672d7e01
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/672d7e01
Branch: refs/heads/jira/solr-8593
Commit: 672d7e013e9b4e7ab803cb68360d83654dcafe96
Parents: 7523d58
Author: Mike McCandless <mi...@apache.org>
Authored: Sat Nov 12 13:32:48 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Sat Nov 12 13:32:48 2016 -0500
----------------------------------------------------------------------
.../lucene/search/vectorhighlight/FastVectorHighlighterTest.java | 2 ++
1 file changed, 2 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/672d7e01/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FastVectorHighlighterTest.java
----------------------------------------------------------------------
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FastVectorHighlighterTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FastVectorHighlighterTest.java
index 89f46af..2658fe2 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FastVectorHighlighterTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FastVectorHighlighterTest.java
@@ -51,6 +51,7 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.highlight.DefaultEncoder;
import org.apache.lucene.search.highlight.Encoder;
+import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
@@ -349,6 +350,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase {
FastVectorHighlighter highlighter = new FastVectorHighlighter();
IndexReader reader = DirectoryReader.open(writer);
IndexSearcher searcher = newSearcher(reader);
+ searcher.setSimilarity(new BM25Similarity());
TopDocs hits = searcher.search(query, 10);
assertEquals(2, hits.totalHits);
FieldQuery fieldQuery = highlighter.getFieldQuery(query, reader);
[09/16] lucene-solr:jira/solr-8593: don't use BooleanSimilarity in
RandomSimilarity
Posted by kr...@apache.org.
don't use BooleanSimilarity in RandomSimilarity
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c0f9bdf3
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c0f9bdf3
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c0f9bdf3
Branch: refs/heads/jira/solr-8593
Commit: c0f9bdf3c3c7721dced3a742ba2a97bdc4db360f
Parents: 4a47c57
Author: Mike McCandless <mi...@apache.org>
Authored: Sun Nov 13 18:41:18 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Sun Nov 13 18:41:18 2016 -0500
----------------------------------------------------------------------
.../org/apache/lucene/search/similarities/RandomSimilarity.java | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c0f9bdf3/lucene/test-framework/src/java/org/apache/lucene/search/similarities/RandomSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/similarities/RandomSimilarity.java b/lucene/test-framework/src/java/org/apache/lucene/search/similarities/RandomSimilarity.java
index 43b6c3c..4bfe4b8 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/similarities/RandomSimilarity.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/similarities/RandomSimilarity.java
@@ -91,7 +91,8 @@ public class RandomSimilarity extends PerFieldSimilarityWrapper {
allSims = new ArrayList<>();
allSims.add(new ClassicSimilarity());
allSims.add(new BM25Similarity());
- allSims.add(new BooleanSimilarity());
+ // We cannot do this, because this similarity behaves in "non-traditional" ways:
+ // allSims.add(new BooleanSimilarity());
for (BasicModel basicModel : BASIC_MODELS) {
for (AfterEffect afterEffect : AFTER_EFFECTS) {
for (Normalization normalization : NORMALIZATIONS) {
[13/16] lucene-solr:jira/solr-8593: LUCENE-6664: add getter
Posted by kr...@apache.org.
LUCENE-6664: add getter
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a86f8076
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a86f8076
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a86f8076
Branch: refs/heads/jira/solr-8593
Commit: a86f807685403537c20aa697b7c7e06bd97cbdf9
Parents: 907bed8
Author: Mike McCandless <mi...@apache.org>
Authored: Mon Nov 14 06:33:11 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Mon Nov 14 06:33:11 2016 -0500
----------------------------------------------------------------------
.../core/src/java/org/apache/lucene/util/RollingBuffer.java | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a86f8076/lucene/core/src/java/org/apache/lucene/util/RollingBuffer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/RollingBuffer.java b/lucene/core/src/java/org/apache/lucene/util/RollingBuffer.java
index aad95b6..88d2361 100644
--- a/lucene/core/src/java/org/apache/lucene/util/RollingBuffer.java
+++ b/lucene/core/src/java/org/apache/lucene/util/RollingBuffer.java
@@ -102,7 +102,7 @@ public abstract class RollingBuffer<T extends RollingBuffer.Resettable> {
nextPos++;
count++;
}
- assert inBounds(pos);
+ assert inBounds(pos): "pos=" + pos + " nextPos=" + nextPos + " count=" + count;
final int index = getIndex(pos);
//System.out.println(" pos=" + pos + " nextPos=" + nextPos + " -> index=" + index);
//assert buffer[index].pos == pos;
@@ -110,11 +110,16 @@ public abstract class RollingBuffer<T extends RollingBuffer.Resettable> {
}
/** Returns the maximum position looked up, or -1 if no
- * position has been looked up sinc reset/init. */
+ * position has been looked up since reset/init. */
public int getMaxPos() {
return nextPos-1;
}
+ /** Returns how many active positions are in the buffer. */
+ public int getBufferSize() {
+ return count;
+ }
+
public void freeBefore(int pos) {
final int toFree = count - (nextPos - pos);
assert toFree >= 0;
[14/16] lucene-solr:jira/solr-8593: Fix arg order in
CommonParamsTest's assertEquals.
Posted by kr...@apache.org.
Fix arg order in CommonParamsTest's assertEquals.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/fba2a864
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/fba2a864
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/fba2a864
Branch: refs/heads/jira/solr-8593
Commit: fba2a864d4034b541e0e02c25ca77ae393d97bc7
Parents: a86f807
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Nov 14 12:31:38 2016 +0000
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Nov 14 12:31:38 2016 +0000
----------------------------------------------------------------------
.../test/org/apache/solr/common/params/CommonParamsTest.java | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fba2a864/solr/solrj/src/test/org/apache/solr/common/params/CommonParamsTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/common/params/CommonParamsTest.java b/solr/solrj/src/test/org/apache/solr/common/params/CommonParamsTest.java
index c79c797..73643d7 100755
--- a/solr/solrj/src/test/org/apache/solr/common/params/CommonParamsTest.java
+++ b/solr/solrj/src/test/org/apache/solr/common/params/CommonParamsTest.java
@@ -26,9 +26,9 @@ import org.apache.lucene.util.LuceneTestCase;
*/
public class CommonParamsTest extends LuceneTestCase
{
- public void testStart() { assertEquals(CommonParams.START, "start"); }
- public void testStartDefault() { assertEquals(CommonParams.START_DEFAULT, 0); }
+ public void testStart() { assertEquals("start", CommonParams.START); }
+ public void testStartDefault() { assertEquals(0, CommonParams.START_DEFAULT); }
- public void testRows() { assertEquals(CommonParams.ROWS, "rows"); }
- public void testRowsDefault() { assertEquals(CommonParams.ROWS_DEFAULT, 10); }
+ public void testRows() { assertEquals("rows", CommonParams.ROWS); }
+ public void testRowsDefault() { assertEquals(10, CommonParams.ROWS_DEFAULT); }
}
[05/16] lucene-solr:jira/solr-8593: Revert the last change so we
continue using randomize similarity for this test.
Posted by kr...@apache.org.
Revert the last change so we continue using randomize similarity for this test.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8104468d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8104468d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8104468d
Branch: refs/heads/jira/solr-8593
Commit: 8104468de4c6a5a8cda7860c26313e79306d8c91
Parents: 8fa08f6
Author: Mike McCandless <mi...@apache.org>
Authored: Sun Nov 13 17:13:06 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Sun Nov 13 17:13:06 2016 -0500
----------------------------------------------------------------------
lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8104468d/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java b/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java
index 1621f31..ecc4645 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java
@@ -21,11 +21,10 @@ import java.io.IOException;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.*;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
-import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
@@ -58,7 +57,6 @@ public class TestDocBoost extends LuceneTestCase {
final float[] scores = new float[4];
IndexSearcher searcher = newSearcher(reader);
- searcher.setSimilarity(new BM25Similarity());
searcher.search
(new TermQuery(new Term("field", "word")),
new SimpleCollector() {
[16/16] lucene-solr:jira/solr-8593: Merge branch
'apache-https-master' into jira/solr-8593
Posted by kr...@apache.org.
Merge branch 'apache-https-master' into jira/solr-8593
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/99a67460
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/99a67460
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/99a67460
Branch: refs/heads/jira/solr-8593
Commit: 99a6746011e98ab0e7965e146956bc215e3d42f5
Parents: 368204b 4a31b29
Author: Kevin Risden <kr...@apache.org>
Authored: Mon Nov 14 10:06:29 2016 -0600
Committer: Kevin Risden <kr...@apache.org>
Committed: Mon Nov 14 10:06:29 2016 -0600
----------------------------------------------------------------------
.../org/apache/lucene/util/RollingBuffer.java | 9 +-
.../search/grouping/GroupingSearchTest.java | 10 +-
.../lucene/search/grouping/TestGrouping.java | 2 -
.../search/similarities/RandomSimilarity.java | 3 +-
solr/CHANGES.txt | 7 +
.../org/apache/solr/handler/ExportWriter.java | 10 +-
.../solrj/io/stream/StreamExpressionTest.java | 2 +-
.../client/solrj/io/stream/StreamingTest.java | 869 ++++++++++++-------
.../solr/common/params/CommonParamsTest.java | 8 +-
9 files changed, 587 insertions(+), 333 deletions(-)
----------------------------------------------------------------------
[02/16] lucene-solr:jira/solr-8593: LUCENE-7556: use BM25Similarity
for this test
Posted by kr...@apache.org.
LUCENE-7556: use BM25Similarity for this test
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/7523d580
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/7523d580
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/7523d580
Branch: refs/heads/jira/solr-8593
Commit: 7523d580ed7f43d4664942fbdb49be2de138adb2
Parents: 2bc1d27
Author: Mike McCandless <mi...@apache.org>
Authored: Sat Nov 12 13:30:50 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Sat Nov 12 13:31:14 2016 -0500
----------------------------------------------------------------------
lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523d580/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java b/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java
index ecc4645..1621f31 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java
@@ -21,10 +21,11 @@ import java.io.IOException;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.*;
-import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
+import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
@@ -57,6 +58,7 @@ public class TestDocBoost extends LuceneTestCase {
final float[] scores = new float[4];
IndexSearcher searcher = newSearcher(reader);
+ searcher.setSimilarity(new BM25Similarity());
searcher.search
(new TermQuery(new Term("field", "word")),
new SimpleCollector() {
[08/16] lucene-solr:jira/solr-8593: Revert the last change so we
continue using randomize similarity for this test.
Posted by kr...@apache.org.
Revert the last change so we continue using randomize similarity for this test.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4a47c572
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4a47c572
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4a47c572
Branch: refs/heads/jira/solr-8593
Commit: 4a47c5720d9473e15c06fe2ee4d324188d5ac59a
Parents: 4619727
Author: Mike McCandless <mi...@apache.org>
Authored: Sun Nov 13 17:15:13 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Sun Nov 13 17:15:13 2016 -0500
----------------------------------------------------------------------
.../apache/lucene/search/grouping/GroupingSearchTest.java | 10 ++++------
1 file changed, 4 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a47c572/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java
index 1a1e8af..d13bfd7 100644
--- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java
@@ -16,10 +16,6 @@
*/
package org.apache.lucene.search.grouping;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@@ -35,12 +31,15 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.mutable.MutableValueStr;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
public class GroupingSearchTest extends LuceneTestCase {
// Tests some very basic usages...
@@ -116,7 +115,6 @@ public class GroupingSearchTest extends LuceneTestCase {
w.addDocument(doc);
IndexSearcher indexSearcher = newSearcher(w.getReader());
- indexSearcher.setSimilarity(new BM25Similarity());
w.close();
Sort groupSort = Sort.RELEVANCE;
[11/16] lucene-solr:jira/solr-8593: Merge remote-tracking branch
'origin/master'
Posted by kr...@apache.org.
Merge remote-tracking branch 'origin/master'
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/7936f74f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/7936f74f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/7936f74f
Branch: refs/heads/jira/solr-8593
Commit: 7936f74ff2c550eb41ccae04a17b1c55ff31f4d0
Parents: b359636 c0f9bdf
Author: Noble Paul <no...@apache.org>
Authored: Mon Nov 14 12:34:03 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Mon Nov 14 12:34:03 2016 +0530
----------------------------------------------------------------------
.../org/apache/lucene/search/similarities/RandomSimilarity.java | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
[04/16] lucene-solr:jira/solr-8593: Revert the last change so we
continue using randomize similarity for this test.
Posted by kr...@apache.org.
Revert the last change so we continue using randomize similarity for this test.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8fa08f6d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8fa08f6d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8fa08f6d
Branch: refs/heads/jira/solr-8593
Commit: 8fa08f6d32ebd625998993050e7e33c147111232
Parents: 672d7e0
Author: Mike McCandless <mi...@apache.org>
Authored: Sun Nov 13 17:12:07 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Sun Nov 13 17:12:07 2016 -0500
----------------------------------------------------------------------
.../lucene/search/vectorhighlight/FastVectorHighlighterTest.java | 2 --
1 file changed, 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8fa08f6d/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FastVectorHighlighterTest.java
----------------------------------------------------------------------
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FastVectorHighlighterTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FastVectorHighlighterTest.java
index 2658fe2..89f46af 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FastVectorHighlighterTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FastVectorHighlighterTest.java
@@ -51,7 +51,6 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.highlight.DefaultEncoder;
import org.apache.lucene.search.highlight.Encoder;
-import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
@@ -350,7 +349,6 @@ public class FastVectorHighlighterTest extends LuceneTestCase {
FastVectorHighlighter highlighter = new FastVectorHighlighter();
IndexReader reader = DirectoryReader.open(writer);
IndexSearcher searcher = newSearcher(reader);
- searcher.setSimilarity(new BM25Similarity());
TopDocs hits = searcher.search(query, 10);
assertEquals(2, hits.totalHits);
FieldQuery fieldQuery = highlighter.getFieldQuery(query, reader);
[07/16] lucene-solr:jira/solr-8593: Revert the last change so we
continue using randomize similarity for this test.
Posted by kr...@apache.org.
Revert the last change so we continue using randomize similarity for this test.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4619727f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4619727f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4619727f
Branch: refs/heads/jira/solr-8593
Commit: 4619727f8ad7361731bfc8df0b6cf8c0f47a9d34
Parents: c11d6f9
Author: Mike McCandless <mi...@apache.org>
Authored: Sun Nov 13 17:14:21 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Sun Nov 13 17:14:21 2016 -0500
----------------------------------------------------------------------
.../src/test/org/apache/lucene/search/grouping/TestGrouping.java | 2 --
1 file changed, 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4619727f/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
index 8b0b4e8..6e2422c 100644
--- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
@@ -64,7 +64,6 @@ import org.apache.lucene.search.grouping.function.FunctionSecondPassGroupingColl
import org.apache.lucene.search.grouping.term.TermAllGroupsCollector;
import org.apache.lucene.search.grouping.term.TermFirstPassGroupingCollector;
import org.apache.lucene.search.grouping.term.TermSecondPassGroupingCollector;
-import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
@@ -141,7 +140,6 @@ public class TestGrouping extends LuceneTestCase {
w.addDocument(doc);
IndexSearcher indexSearcher = newSearcher(w.getReader());
- indexSearcher.setSimilarity(new BM25Similarity());
w.close();
final Sort groupSort = Sort.RELEVANCE;
[06/16] lucene-solr:jira/solr-8593: Revert the last change so we
continue using randomize similarity for this test.
Posted by kr...@apache.org.
Revert the last change so we continue using randomize similarity for this test.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c11d6f9f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c11d6f9f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c11d6f9f
Branch: refs/heads/jira/solr-8593
Commit: c11d6f9f90606e7b61adf23d81e696827e4736b1
Parents: 8104468
Author: Mike McCandless <mi...@apache.org>
Authored: Sun Nov 13 17:13:40 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Sun Nov 13 17:13:40 2016 -0500
----------------------------------------------------------------------
.../src/test/org/apache/solr/uninverting/TestFieldCacheSort.java | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c11d6f9f/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSort.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSort.java b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSort.java
index 4755c8f..d53f610 100644
--- a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSort.java
+++ b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSort.java
@@ -50,11 +50,10 @@ import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.store.Directory;
+import org.apache.solr.uninverting.UninvertingReader.Type;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
-import org.apache.solr.uninverting.UninvertingReader.Type;
/*
* Tests sorting (but with fieldcache instead of docvalues)
@@ -435,7 +434,6 @@ public class TestFieldCacheSort extends LuceneTestCase {
writer.close();
IndexSearcher searcher = newSearcher(ir);
- searcher.setSimilarity(new BM25Similarity());
Sort sort = new Sort(new SortField(null, SortField.Type.SCORE, true));
TopDocs actual = searcher.search(new TermQuery(new Term("value", "foo")), 10, sort);
[10/16] lucene-solr:jira/solr-8593: SOLR-9718: replace assert and
assertTrue() to assertEquals()
Posted by kr...@apache.org.
SOLR-9718: replace assert and assertTrue() to assertEquals()
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b359636f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b359636f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b359636f
Branch: refs/heads/jira/solr-8593
Commit: b359636fd9f272f3617a97a276fa41f7dd252016
Parents: a209c4e
Author: Noble Paul <no...@apache.org>
Authored: Mon Nov 14 12:33:37 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Mon Nov 14 12:33:37 2016 +0530
----------------------------------------------------------------------
.../client/solrj/io/stream/StreamingTest.java | 609 ++++++++++---------
1 file changed, 305 insertions(+), 304 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b359636f/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
index 0da6750..3888a41 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
@@ -183,13 +183,13 @@ public class StreamingTest extends SolrCloudTestCase {
ParallelStream pstream = parallelStream(ustream, new FieldComparator("a_f", ComparatorOrder.ASCENDING));
attachStreamFactory(pstream);
List<Tuple> tuples = getTuples(pstream);
- assert(tuples.size() == 5);
+ assertEquals(5, tuples.size());
assertOrder(tuples, 0, 1, 3, 4, 6);
//Test the eofTuples
Map<String,Tuple> eofTuples = pstream.getEofTuples();
- assert(eofTuples.size() == numWorkers); //There should be an EOF tuple for each worker.
+ assertEquals(numWorkers, eofTuples.size()); //There should be an EOF tuple for each worker.
}
@@ -214,8 +214,8 @@ public class StreamingTest extends SolrCloudTestCase {
"sort", "a_i asc", "fq", "a_ss:hello0", "fq", "a_ss:hello1"));
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, params);
List<Tuple> tuples = getTuples(stream);
- assertEquals("Multiple fq clauses should have been honored", tuples.size(), 1);
- assertEquals("should only have gotten back document 0", tuples.get(0).getString("id"), "0");
+ assertEquals("Multiple fq clauses should have been honored", 1, tuples.size());
+ assertEquals("should only have gotten back document 0", "0", tuples.get(0).getString("id"));
}
@Test
@@ -235,7 +235,7 @@ public class StreamingTest extends SolrCloudTestCase {
RankStream rstream = new RankStream(stream, 3, new FieldComparator("a_i",ComparatorOrder.DESCENDING));
List<Tuple> tuples = getTuples(rstream);
- assert(tuples.size() == 3);
+ assertEquals(3, tuples.size());
assertOrder(tuples, 4,3,2);
}
@@ -263,7 +263,7 @@ public class StreamingTest extends SolrCloudTestCase {
attachStreamFactory(pstream);
List<Tuple> tuples = getTuples(pstream);
- assert(tuples.size() == 10);
+ assertEquals(10, tuples.size());
assertOrder(tuples, 10,9,8,7,6,5,4,3,2,0);
}
@@ -289,10 +289,10 @@ public class StreamingTest extends SolrCloudTestCase {
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA);
stream.setTrace(true);
List<Tuple> tuples = getTuples(stream);
- assert(tuples.get(0).get("_COLLECTION_").equals(COLLECTION));
- assert(tuples.get(1).get("_COLLECTION_").equals(COLLECTION));
- assert(tuples.get(2).get("_COLLECTION_").equals(COLLECTION));
- assert(tuples.get(3).get("_COLLECTION_").equals(COLLECTION));
+ assertEquals(COLLECTION, tuples.get(0).get("_COLLECTION_"));
+ assertEquals(COLLECTION, tuples.get(1).get("_COLLECTION_"));
+ assertEquals(COLLECTION, tuples.get(2).get("_COLLECTION_"));
+ assertEquals(COLLECTION, tuples.get(3).get("_COLLECTION_"));
}
@Test
@@ -320,7 +320,7 @@ public class StreamingTest extends SolrCloudTestCase {
List<Tuple> tuples = getTuples(rstream);
- assert(tuples.size() == 3);
+ assertEquals(3, tuples.size());
Tuple t0 = tuples.get(0);
List<Map> maps0 = t0.getMaps("group");
@@ -343,7 +343,7 @@ public class StreamingTest extends SolrCloudTestCase {
tuples = getTuples(rstream);
- assert(tuples.size() == 3);
+ assertEquals(3, tuples.size());
t0 = tuples.get(0);
maps0 = t0.getMaps("group");
@@ -385,7 +385,7 @@ public class StreamingTest extends SolrCloudTestCase {
List<Tuple> tuples = getTuples(rstream);
- assert(tuples.size() == 0);
+ assertEquals(0, tuples.size());
}
@@ -415,7 +415,7 @@ public class StreamingTest extends SolrCloudTestCase {
attachStreamFactory(pstream);
List<Tuple> tuples = getTuples(pstream);
- assert(tuples.size() == 3);
+ assertEquals(3, tuples.size());
Tuple t0 = tuples.get(0);
List<Map> maps0 = t0.getMaps("group");
@@ -441,7 +441,7 @@ public class StreamingTest extends SolrCloudTestCase {
attachStreamFactory(pstream);
tuples = getTuples(pstream);
- assert(tuples.size() == 3);
+ assertEquals(3, tuples.size());
t0 = tuples.get(0);
maps0 = t0.getMaps("group");
@@ -479,19 +479,19 @@ public class StreamingTest extends SolrCloudTestCase {
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA);
ExceptionStream estream = new ExceptionStream(stream);
Tuple t = getTuple(estream);
- assert(t.EOF);
- assert(t.EXCEPTION);
- assert(t.getException().contains("sort param field can't be found: blah"));
+ assertTrue(t.EOF);
+ assertTrue(t.EXCEPTION);
+ assertTrue(t.getException().contains("sort param field can't be found: blah"));
//Test an error that comes originates from the /export handler
sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,score", "sort", "a_s asc", "qt", "/export");
stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA);
estream = new ExceptionStream(stream);
t = getTuple(estream);
- assert(t.EOF);
- assert(t.EXCEPTION);
+ assertTrue(t.EOF);
+ assertTrue(t.EXCEPTION);
//The /export handler will pass through a real exception.
- assert(t.getException().contains("undefined field:"));
+ assertTrue(t.getException().contains("undefined field:"));
}
@Test
@@ -516,10 +516,10 @@ public class StreamingTest extends SolrCloudTestCase {
ParallelStream pstream = new ParallelStream(zkHost, COLLECTION, stream, 2, new FieldComparator("blah", ComparatorOrder.ASCENDING));
ExceptionStream estream = new ExceptionStream(pstream);
Tuple t = getTuple(estream);
- assert(t.EOF);
- assert(t.EXCEPTION);
+ assertTrue(t.EOF);
+ assertTrue(t.EXCEPTION);
//ParallelStream requires that partitionKeys be set.
- assert(t.getException().contains("When numWorkers > 1 partitionKeys must be set."));
+ assertTrue(t.getException().contains("When numWorkers > 1 partitionKeys must be set."));
//Test an error that originates from the /select handler
@@ -528,9 +528,9 @@ public class StreamingTest extends SolrCloudTestCase {
pstream = new ParallelStream(zkHost, COLLECTION, stream, 2, new FieldComparator("blah", ComparatorOrder.ASCENDING));
estream = new ExceptionStream(pstream);
t = getTuple(estream);
- assert(t.EOF);
- assert(t.EXCEPTION);
- assert(t.getException().contains("sort param field can't be found: blah"));
+ assertTrue(t.EOF);
+ assertTrue(t.EXCEPTION);
+ assertTrue(t.getException().contains("sort param field can't be found: blah"));
//Test an error that originates from the /export handler
@@ -539,10 +539,10 @@ public class StreamingTest extends SolrCloudTestCase {
pstream = new ParallelStream(zkHost, COLLECTION, stream, 2, new FieldComparator("a_s", ComparatorOrder.ASCENDING));
estream = new ExceptionStream(pstream);
t = getTuple(estream);
- assert(t.EOF);
- assert(t.EXCEPTION);
+ assertTrue(t.EOF);
+ assertTrue(t.EXCEPTION);
//The /export handler will pass through a real exception.
- assert(t.getException().contains("undefined field:"));
+ assertTrue(t.getException().contains("undefined field:"));
}
@Test
@@ -577,7 +577,7 @@ public class StreamingTest extends SolrCloudTestCase {
List<Tuple> tuples = getTuples(statsStream);
- assert(tuples.size() == 1);
+ assertEquals(1, tuples.size());
//Test Long and Double Sums
@@ -593,15 +593,15 @@ public class StreamingTest extends SolrCloudTestCase {
Double avgf = tuple.getDouble("avg(a_f)");
Double count = tuple.getDouble("count(*)");
- assertTrue(sumi.longValue() == 70);
- assertTrue(sumf.doubleValue() == 55.0D);
- assertTrue(mini.doubleValue() == 0.0D);
- assertTrue(minf.doubleValue() == 1.0D);
- assertTrue(maxi.doubleValue() == 14.0D);
- assertTrue(maxf.doubleValue() == 10.0D);
- assertTrue(avgi.doubleValue() == 7.0D);
- assertTrue(avgf.doubleValue() == 5.5D);
- assertTrue(count.doubleValue() == 10);
+ assertEquals(70, sumi.longValue());
+ assertEquals(55.0, sumf.doubleValue(), 0.01);
+ assertEquals(0.0, mini.doubleValue(), 0.01);
+ assertEquals(1.0, minf.doubleValue(), 0.01);
+ assertEquals(14.0, maxi.doubleValue(), 0.01);
+ assertEquals(10.0, maxf.doubleValue(), 0.01);
+ assertEquals(7.0, avgi.doubleValue(), .01);
+ assertEquals(5.5, avgf.doubleValue(), .001);
+ assertEquals(10, count.doubleValue(), .01);
}
@@ -658,16 +658,16 @@ public class StreamingTest extends SolrCloudTestCase {
Double avgf = tuple.getDouble("avg(a_f)");
Double count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello4"));
- assertTrue(sumi.longValue() == 15);
- assertTrue(sumf.doubleValue() == 11.0D);
- assertTrue(mini.doubleValue() == 4.0D);
- assertTrue(minf.doubleValue() == 4.0D);
- assertTrue(maxi.doubleValue() == 11.0D);
- assertTrue(maxf.doubleValue() == 7.0D);
- assertTrue(avgi.doubleValue() == 7.5D);
- assertTrue(avgf.doubleValue() == 5.5D);
- assertTrue(count.doubleValue() == 2);
+ assertEquals("hello4", bucket);
+ assertEquals(15, sumi.longValue());
+ assertEquals(11.0, sumf.doubleValue(), 0.01);
+ assertEquals(4.0, mini.doubleValue(), 0.01);
+ assertEquals(4.0, minf.doubleValue(), 0.01);
+ assertEquals(11.0, maxi.doubleValue(), 0.01);
+ assertEquals(7.0, maxf.doubleValue(), 0.01);
+ assertEquals(7.5, avgi.doubleValue(), 0.01);
+ assertEquals(5.5, avgf.doubleValue(), 0.01);
+ assertEquals(2, count.doubleValue(), 0.01);
tuple = tuples.get(1);
bucket = tuple.getString("a_s");
@@ -681,16 +681,16 @@ public class StreamingTest extends SolrCloudTestCase {
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello0"));
- assertTrue(sumi.doubleValue() == 17.0D);
- assertTrue(sumf.doubleValue() == 18.0D);
- assertTrue(mini.doubleValue() == 0.0D);
- assertTrue(minf.doubleValue() == 1.0D);
- assertTrue(maxi.doubleValue() == 14.0D);
- assertTrue(maxf.doubleValue() == 10.0D);
- assertTrue(avgi.doubleValue() == 4.25D);
- assertTrue(avgf.doubleValue() == 4.5D);
- assertTrue(count.doubleValue() == 4);
+ assertEquals("hello0", bucket);
+ assertEquals(17, sumi.doubleValue(), .01);
+ assertEquals(18, sumf.doubleValue(), .01);
+ assertEquals(0.0, mini.doubleValue(), .01);
+ assertEquals(1.0, minf.doubleValue(), .01);
+ assertEquals(14.0, maxi.doubleValue(), .01);
+ assertEquals(10.0, maxf.doubleValue(), .01);
+ assertEquals(4.25, avgi.doubleValue(), .01);
+ assertEquals(4.5, avgf.doubleValue(), .01);
+ assertEquals(4, count.doubleValue(), .01);
tuple = tuples.get(2);
bucket = tuple.getString("a_s");
@@ -704,16 +704,16 @@ public class StreamingTest extends SolrCloudTestCase {
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello3"));
- assertTrue(sumi.doubleValue() == 38.0D);
- assertTrue(sumf.doubleValue() == 26.0D);
- assertTrue(mini.doubleValue() == 3.0D);
- assertTrue(minf.doubleValue() == 3.0D);
- assertTrue(maxi.doubleValue() == 13.0D);
- assertTrue(maxf.doubleValue() == 9.0D);
- assertTrue(avgi.doubleValue() == 9.5D);
- assertTrue(avgf.doubleValue() == 6.5D);
- assertTrue(count.doubleValue() == 4);
+ assertEquals("hello3", bucket);
+ assertEquals(38.0, sumi.doubleValue(), 0.01);
+ assertEquals(26.0, sumf.doubleValue(), 0.01);
+ assertEquals(3.0, mini.doubleValue(), 0.01);
+ assertEquals(3.0, minf.doubleValue(), 0.01);
+ assertEquals(13.0, maxi.doubleValue(), 0.01);
+ assertEquals(9.0, maxf.doubleValue(), 0.01);
+ assertEquals(9.5, avgi.doubleValue(), 0.01);
+ assertEquals(6.5, avgf.doubleValue(), 0.01);
+ assertEquals(4, count.doubleValue(), 0.01);
//Reverse the Sort.
@@ -724,7 +724,7 @@ public class StreamingTest extends SolrCloudTestCase {
tuples = getTuples(facetStream);
- assert(tuples.size() == 3);
+ assertEquals(3, tuples.size());
//Test Long and Double Sums
@@ -740,16 +740,16 @@ public class StreamingTest extends SolrCloudTestCase {
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello3"));
- assertTrue(sumi.doubleValue() == 38.0D);
- assertTrue(sumf.doubleValue() == 26.0D);
- assertTrue(mini.doubleValue() == 3.0D);
- assertTrue(minf.doubleValue() == 3.0D);
- assertTrue(maxi.doubleValue() == 13.0D);
- assertTrue(maxf.doubleValue() == 9.0D);
- assertTrue(avgi.doubleValue() == 9.5D);
- assertTrue(avgf.doubleValue() == 6.5D);
- assertTrue(count.doubleValue() == 4);
+ assertEquals("hello3", bucket);
+ assertEquals(38, sumi.doubleValue(), 0.1);
+ assertEquals(26, sumf.doubleValue(), 0.1);
+ assertEquals(3, mini.doubleValue(), 0.1);
+ assertEquals(3, minf.doubleValue(), 0.1);
+ assertEquals(13, maxi.doubleValue(), 0.1);
+ assertEquals(9, maxf.doubleValue(), 0.1);
+ assertEquals(9.5, avgi.doubleValue(), 0.1);
+ assertEquals(6.5, avgf.doubleValue(), 0.1);
+ assertEquals(4, count.doubleValue(), 0.1);
tuple = tuples.get(1);
bucket = tuple.getString("a_s");
@@ -763,16 +763,16 @@ public class StreamingTest extends SolrCloudTestCase {
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello0"));
- assertTrue(sumi.doubleValue() == 17.0D);
- assertTrue(sumf.doubleValue() == 18.0D);
- assertTrue(mini.doubleValue() == 0.0D);
- assertTrue(minf.doubleValue() == 1.0D);
- assertTrue(maxi.doubleValue() == 14.0D);
- assertTrue(maxf.doubleValue() == 10.0D);
- assertTrue(avgi.doubleValue() == 4.25D);
- assertTrue(avgf.doubleValue() == 4.5D);
- assertTrue(count.doubleValue() == 4);
+ assertEquals("hello0", bucket);
+ assertEquals(17, sumi.doubleValue(), 0.01);
+ assertEquals(18, sumf.doubleValue(), 0.01);
+ assertEquals(0, mini.doubleValue(), 0.01);
+ assertEquals(1, minf.doubleValue(), 0.01);
+ assertEquals(14, maxi.doubleValue(), 0.01);
+ assertEquals(10, maxf.doubleValue(), 0.01);
+ assertEquals(4.25, avgi.doubleValue(), 0.01);
+ assertEquals(4.5, avgf.doubleValue(), 0.01);
+ assertEquals(4, count.doubleValue(), 0.01);
tuple = tuples.get(2);
bucket = tuple.getString("a_s");
@@ -786,16 +786,16 @@ public class StreamingTest extends SolrCloudTestCase {
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello4"));
- assertTrue(sumi.longValue() == 15);
- assertTrue(sumf.doubleValue() == 11.0D);
- assertTrue(mini.doubleValue() == 4.0D);
- assertTrue(minf.doubleValue() == 4.0D);
- assertTrue(maxi.doubleValue() == 11.0D);
- assertTrue(maxf.doubleValue() == 7.0D);
- assertTrue(avgi.doubleValue() == 7.5D);
- assertTrue(avgf.doubleValue() == 5.5D);
- assertTrue(count.doubleValue() == 2);
+ assertEquals("hello4", bucket);
+ assertEquals(15, sumi.longValue());
+ assertEquals(11, sumf.doubleValue(), 0.01);
+ assertEquals(4.0, mini.doubleValue(), 0.01);
+ assertEquals(4.0, minf.doubleValue(), 0.01);
+ assertEquals(11.0, maxi.doubleValue(), 0.01);
+ assertEquals(7.0, maxf.doubleValue(), 0.01);
+ assertEquals(7.5, avgi.doubleValue(), 0.01);
+ assertEquals(5.5, avgf.doubleValue(), 0.01);
+ assertEquals(2, count.doubleValue(), 0.01);
//Test index sort
@@ -807,7 +807,7 @@ public class StreamingTest extends SolrCloudTestCase {
tuples = getTuples(facetStream);
- assert(tuples.size() == 3);
+ assertEquals(3, tuples.size());
tuple = tuples.get(0);
@@ -823,16 +823,16 @@ public class StreamingTest extends SolrCloudTestCase {
count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello4"));
- assertTrue(sumi.longValue() == 15);
- assertTrue(sumf.doubleValue() == 11.0D);
- assertTrue(mini.doubleValue() == 4.0D);
- assertTrue(minf.doubleValue() == 4.0D);
- assertTrue(maxi.doubleValue() == 11.0D);
- assertTrue(maxf.doubleValue() == 7.0D);
- assertTrue(avgi.doubleValue() == 7.5D);
- assertTrue(avgf.doubleValue() == 5.5D);
- assertTrue(count.doubleValue() == 2);
+ assertEquals("hello4", bucket);
+ assertEquals(15, sumi.longValue());
+ assertEquals(11, sumf.doubleValue(), 0.01);
+ assertEquals(4, mini.doubleValue(), 0.01);
+ assertEquals(4, minf.doubleValue(), 0.01);
+ assertEquals(11, maxi.doubleValue(), 0.01);
+ assertEquals(7, maxf.doubleValue(), 0.01);
+ assertEquals(7.5, avgi.doubleValue(), 0.01);
+ assertEquals(5.5, avgf.doubleValue(), 0.01);
+ assertEquals(2, count.doubleValue(), 0.01);
tuple = tuples.get(1);
bucket = tuple.getString("a_s");
@@ -869,16 +869,16 @@ public class StreamingTest extends SolrCloudTestCase {
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello0"));
- assertTrue(sumi.doubleValue() == 17.0D);
- assertTrue(sumf.doubleValue() == 18.0D);
- assertTrue(mini.doubleValue() == 0.0D);
- assertTrue(minf.doubleValue() == 1.0D);
- assertTrue(maxi.doubleValue() == 14.0D);
- assertTrue(maxf.doubleValue() == 10.0D);
- assertTrue(avgi.doubleValue() == 4.25D);
- assertTrue(avgf.doubleValue() == 4.5D);
- assertTrue(count.doubleValue() == 4);
+ assertEquals("hello0", bucket);
+ assertEquals(17, sumi.doubleValue(), 0.01);
+ assertEquals(18, sumf.doubleValue(), 0.01);
+ assertEquals(0, mini.doubleValue(), 0.01);
+ assertEquals(1, minf.doubleValue(), 0.01);
+ assertEquals(14, maxi.doubleValue(), 0.01);
+ assertEquals(10, maxf.doubleValue(), 0.01);
+ assertEquals(4.25, avgi.doubleValue(), 0.01);
+ assertEquals(4.5, avgf.doubleValue(), 0.01);
+ assertEquals(4, count.doubleValue(), 0.01);
//Test index sort
@@ -888,7 +888,7 @@ public class StreamingTest extends SolrCloudTestCase {
tuples = getTuples(facetStream);
- assert(tuples.size() == 3);
+ assertEquals(3, tuples.size());
tuple = tuples.get(0);
bucket = tuple.getString("a_s");
@@ -902,16 +902,16 @@ public class StreamingTest extends SolrCloudTestCase {
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello0"));
- assertTrue(sumi.doubleValue() == 17.0D);
- assertTrue(sumf.doubleValue() == 18.0D);
- assertTrue(mini.doubleValue() == 0.0D);
- assertTrue(minf.doubleValue() == 1.0D);
- assertTrue(maxi.doubleValue() == 14.0D);
- assertTrue(maxf.doubleValue() == 10.0D);
- assertTrue(avgi.doubleValue() == 4.25D);
- assertTrue(avgf.doubleValue() == 4.5D);
- assertTrue(count.doubleValue() == 4);
+ assertEquals("hello0", bucket);
+ assertEquals(17, sumi.doubleValue(), 0.01);
+ assertEquals(18, sumf.doubleValue(), 0.01);
+ assertEquals(0, mini.doubleValue(), 0.01);
+ assertEquals(1, minf.doubleValue(), 0.01);
+ assertEquals(14, maxi.doubleValue(), 0.01);
+ assertEquals(10, maxf.doubleValue(), 0.01);
+ assertEquals(4.25, avgi.doubleValue(), 0.0001);
+ assertEquals(4.5, avgf.doubleValue(), 0.001);
+ assertEquals(4, count.doubleValue(), 0.01);
tuple = tuples.get(1);
bucket = tuple.getString("a_s");
@@ -925,16 +925,16 @@ public class StreamingTest extends SolrCloudTestCase {
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello3"));
- assertTrue(sumi.doubleValue() == 38.0D);
- assertTrue(sumf.doubleValue() == 26.0D);
- assertTrue(mini.doubleValue() == 3.0D);
- assertTrue(minf.doubleValue() == 3.0D);
- assertTrue(maxi.doubleValue() == 13.0D);
- assertTrue(maxf.doubleValue() == 9.0D);
- assertTrue(avgi.doubleValue() == 9.5D);
- assertTrue(avgf.doubleValue() == 6.5D);
- assertTrue(count.doubleValue() == 4);
+ assertEquals("hello3", bucket);
+ assertEquals(38, sumi.doubleValue(), 0.01);
+ assertEquals(26, sumf.doubleValue(), 0.01);
+ assertEquals(3, mini.doubleValue(), 0.01);
+ assertEquals(3, minf.doubleValue(), 0.01);
+ assertEquals(13, maxi.doubleValue(), 0.01);
+ assertEquals(9, maxf.doubleValue(), 0.01);
+ assertEquals(9.5, avgi.doubleValue(), 0.01);
+ assertEquals(6.5, avgf.doubleValue(), 0.01);
+ assertEquals(4, count.doubleValue(), 0.01);
tuple = tuples.get(2);
bucket = tuple.getString("a_s");
@@ -948,16 +948,16 @@ public class StreamingTest extends SolrCloudTestCase {
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello4"));
- assertTrue(sumi.longValue() == 15);
- assertTrue(sumf.doubleValue() == 11.0D);
- assertTrue(mini.doubleValue() == 4.0D);
- assertTrue(minf.doubleValue() == 4.0D);
- assertTrue(maxi.doubleValue() == 11.0D);
- assertTrue(maxf.doubleValue() == 7.0D);
- assertTrue(avgi.doubleValue() == 7.5D);
- assertTrue(avgf.doubleValue() == 5.5D);
- assertTrue(count.doubleValue() == 2);
+ assertEquals("hello4", bucket);
+ assertEquals(15, sumi.longValue());
+ assertEquals(11.0, sumf.doubleValue(), 0.1);
+ assertEquals(4.0, mini.doubleValue(), 0.1);
+ assertEquals(4.0, minf.doubleValue(), 0.1);
+ assertEquals(11.0, maxi.doubleValue(), 0.1);
+ assertEquals(7.0, maxf.doubleValue(), 0.1);
+ assertEquals(7.5, avgi.doubleValue(), 0.1);
+ assertEquals(5.5, avgf.doubleValue(), 0.1);
+ assertEquals(2, count.doubleValue(), 0.1);
}
@@ -996,7 +996,7 @@ public class StreamingTest extends SolrCloudTestCase {
100);
List<Tuple> tuples = getTuples(facetStream);
- assert(tuples.size() == 6);
+ assertEquals(6, tuples.size());
Tuple tuple = tuples.get(0);
String bucket1 = tuple.getString("level1_s");
@@ -1004,10 +1004,10 @@ public class StreamingTest extends SolrCloudTestCase {
Double sumi = tuple.getDouble("sum(a_i)");
Double count = tuple.getDouble("count(*)");
- assertTrue(bucket1.equals("hello3"));
- assertTrue(bucket2.equals("b"));
- assertTrue(sumi.longValue() == 35);
- assertTrue(count.doubleValue() == 3);
+ assertEquals("hello3", bucket1);
+ assertEquals("b", bucket2);
+ assertEquals(35, sumi.longValue());
+ assertEquals(3, count, 0.1);
tuple = tuples.get(1);
bucket1 = tuple.getString("level1_s");
@@ -1015,10 +1015,10 @@ public class StreamingTest extends SolrCloudTestCase {
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket1.equals("hello0"));
- assertTrue(bucket2.equals("b"));
- assertTrue(sumi.longValue() == 15);
- assertTrue(count.doubleValue() == 2);
+ assertEquals("hello0", bucket1);
+ assertEquals("b", bucket2);
+ assertEquals(15, sumi.longValue());
+ assertEquals(2, count, 0.1);
tuple = tuples.get(2);
bucket1 = tuple.getString("level1_s");
@@ -1026,10 +1026,10 @@ public class StreamingTest extends SolrCloudTestCase {
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket1.equals("hello4"));
- assertTrue(bucket2.equals("b"));
- assertTrue(sumi.longValue() == 11);
- assertTrue(count.doubleValue() == 1);
+ assertEquals("hello4", bucket1);
+ assertEquals("b", bucket2);
+ assertEquals(11, sumi.longValue());
+ assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(3);
bucket1 = tuple.getString("level1_s");
@@ -1037,10 +1037,10 @@ public class StreamingTest extends SolrCloudTestCase {
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket1.equals("hello4"));
- assertTrue(bucket2.equals("a"));
- assertTrue(sumi.longValue() == 4);
- assertTrue(count.doubleValue() == 1);
+ assertEquals("hello4", bucket1);
+ assertEquals("a", bucket2);
+ assertEquals(4, sumi.longValue());
+ assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(4);
bucket1 = tuple.getString("level1_s");
@@ -1048,10 +1048,10 @@ public class StreamingTest extends SolrCloudTestCase {
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket1.equals("hello3"));
- assertTrue(bucket2.equals("a"));
- assertTrue(sumi.longValue() == 3);
- assertTrue(count.doubleValue() == 1);
+ assertEquals("hello3", bucket1);
+ assertEquals("a", bucket2);
+ assertEquals(3, sumi.longValue());
+ assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(5);
bucket1 = tuple.getString("level1_s");
@@ -1059,10 +1059,10 @@ public class StreamingTest extends SolrCloudTestCase {
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket1.equals("hello0"));
- assertTrue(bucket2.equals("a"));
- assertTrue(sumi.longValue() == 2);
- assertTrue(count.doubleValue() == 2);
+ assertEquals("hello0", bucket1);
+ assertEquals("a", bucket2);
+ assertEquals(2, sumi.longValue());
+ assertEquals(2, count.doubleValue(), 0.1);
sorts[0] = new FieldComparator("level1_s", ComparatorOrder.DESCENDING );
sorts[1] = new FieldComparator("level2_s", ComparatorOrder.DESCENDING );
@@ -1076,7 +1076,7 @@ public class StreamingTest extends SolrCloudTestCase {
100);
tuples = getTuples(facetStream);
- assert(tuples.size() == 6);
+ assertEquals(6, tuples.size());
tuple = tuples.get(0);
bucket1 = tuple.getString("level1_s");
@@ -1084,10 +1084,10 @@ public class StreamingTest extends SolrCloudTestCase {
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket1.equals("hello4"));
- assertTrue(bucket2.equals("b"));
- assertTrue(sumi.longValue() == 11);
- assertTrue(count.doubleValue() == 1);
+ assertEquals("hello4", bucket1);
+ assertEquals("b", bucket2);
+ assertEquals(11, sumi.longValue());
+ assertEquals(1, count, 0.1);
tuple = tuples.get(1);
bucket1 = tuple.getString("level1_s");
@@ -1095,10 +1095,10 @@ public class StreamingTest extends SolrCloudTestCase {
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket1.equals("hello4"));
- assertTrue(bucket2.equals("a"));
- assertTrue(sumi.longValue() == 4);
- assertTrue(count.doubleValue() == 1);
+ assertEquals("hello4", bucket1);
+ assertEquals("a", bucket2);
+ assertEquals(4, sumi.longValue());
+ assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(2);
bucket1 = tuple.getString("level1_s");
@@ -1106,10 +1106,10 @@ public class StreamingTest extends SolrCloudTestCase {
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket1.equals("hello3"));
- assertTrue(bucket2.equals("b"));
- assertTrue(sumi.longValue() == 35);
- assertTrue(count.doubleValue() == 3);
+ assertEquals("hello3", bucket1);
+ assertEquals("b", bucket2);
+ assertEquals(35, sumi.longValue());
+ assertEquals(3, count.doubleValue(), 0.1);
tuple = tuples.get(3);
bucket1 = tuple.getString("level1_s");
@@ -1117,10 +1117,10 @@ public class StreamingTest extends SolrCloudTestCase {
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket1.equals("hello3"));
- assertTrue(bucket2.equals("a"));
- assertTrue(sumi.longValue() == 3);
- assertTrue(count.doubleValue() == 1);
+ assertEquals("hello3", bucket1);
+ assertEquals("a", bucket2);
+ assertEquals(3, sumi.longValue());
+ assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(4);
bucket1 = tuple.getString("level1_s");
@@ -1128,10 +1128,10 @@ public class StreamingTest extends SolrCloudTestCase {
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket1.equals("hello0"));
- assertTrue(bucket2.equals("b"));
- assertTrue(sumi.longValue() == 15);
- assertTrue(count.doubleValue() == 2);
+ assertEquals("hello0", bucket1);
+ assertEquals("b", bucket2);
+ assertEquals(15, sumi.longValue());
+ assertEquals(2, count.doubleValue(), 0.1);
tuple = tuples.get(5);
bucket1 = tuple.getString("level1_s");
@@ -1139,10 +1139,10 @@ public class StreamingTest extends SolrCloudTestCase {
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket1.equals("hello0"));
- assertTrue(bucket2.equals("a"));
- assertTrue(sumi.longValue() == 2);
- assertTrue(count.doubleValue() == 2);
+ assertEquals("hello0", bucket1);
+ assertEquals("a", bucket2);
+ assertEquals(2, sumi.longValue());
+ assertEquals(2, count.doubleValue(), 0.1);
}
@@ -1197,16 +1197,16 @@ public class StreamingTest extends SolrCloudTestCase {
Double count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello0"));
- assertTrue(sumi.doubleValue() == 17.0D);
- assertTrue(sumf.doubleValue() == 18.0D);
- assertTrue(mini.doubleValue() == 0.0D);
- assertTrue(minf.doubleValue() == 1.0D);
- assertTrue(maxi.doubleValue() == 14.0D);
- assertTrue(maxf.doubleValue() == 10.0D);
- assertTrue(avgi.doubleValue() == 4.25D);
- assertTrue(avgf.doubleValue() == 4.5D);
- assertTrue(count.doubleValue() == 4);
+ assertEquals("hello0", bucket);
+ assertEquals(17, sumi.doubleValue(), 0.001);
+ assertEquals(18, sumf.doubleValue(), 0.001);
+ assertEquals(0, mini.doubleValue(), 0.001);
+ assertEquals(1, minf.doubleValue(), 0.001);
+ assertEquals(14, maxi.doubleValue(), 0.001);
+ assertEquals(10, maxf.doubleValue(), 0.001);
+ assertEquals(4.25, avgi.doubleValue(), 0.001);
+ assertEquals(4.5, avgf.doubleValue(), 0.001);
+ assertEquals(4, count.doubleValue(), 0.001);
tuple = tuples.get(1);
@@ -1221,16 +1221,16 @@ public class StreamingTest extends SolrCloudTestCase {
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello3"));
- assertTrue(sumi.doubleValue() == 38.0D);
- assertTrue(sumf.doubleValue() == 26.0D);
- assertTrue(mini.doubleValue() == 3.0D);
- assertTrue(minf.doubleValue() == 3.0D);
- assertTrue(maxi.doubleValue() == 13.0D);
- assertTrue(maxf.doubleValue() == 9.0D);
- assertTrue(avgi.doubleValue() == 9.5D);
- assertTrue(avgf.doubleValue() == 6.5D);
- assertTrue(count.doubleValue() == 4);
+ assertEquals("hello3", bucket);
+ assertEquals(38, sumi.doubleValue(), 0.001);
+ assertEquals(26, sumf.doubleValue(), 0.001);
+ assertEquals(3, mini.doubleValue(), 0.001);
+ assertEquals(3, minf.doubleValue(), 0.001);
+ assertEquals(13, maxi.doubleValue(), 0.001);
+ assertEquals(9, maxf.doubleValue(), 0.001);
+ assertEquals(9.5, avgi.doubleValue(), 0.001);
+ assertEquals(6.5, avgf.doubleValue(), 0.001);
+ assertEquals(4, count.doubleValue(), 0.001);
tuple = tuples.get(2);
@@ -1245,16 +1245,16 @@ public class StreamingTest extends SolrCloudTestCase {
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello4"));
- assertTrue(sumi.longValue() == 15);
- assertTrue(sumf.doubleValue() == 11.0D);
- assertTrue(mini.doubleValue() == 4.0D);
- assertTrue(minf.doubleValue() == 4.0D);
- assertTrue(maxi.doubleValue() == 11.0D);
- assertTrue(maxf.doubleValue() == 7.0D);
- assertTrue(avgi.doubleValue() == 7.5D);
- assertTrue(avgf.doubleValue() == 5.5D);
- assertTrue(count.doubleValue() == 2);
+ assertEquals("hello4", bucket);
+ assertEquals(15, sumi.longValue());
+ assertEquals(11, sumf.doubleValue(), 0.01);
+ assertEquals(4, mini.doubleValue(), 0.01);
+ assertEquals(4, minf.doubleValue(), 0.01);
+ assertEquals(11, maxi.doubleValue(), 0.01);
+ assertEquals(7, maxf.doubleValue(), 0.01);
+ assertEquals(7.5, avgi.doubleValue(), 0.01);
+ assertEquals(5.5, avgf.doubleValue(), 0.01);
+ assertEquals(2, count.doubleValue(), 0.01);
//Test will null value in the grouping field
@@ -1280,9 +1280,9 @@ public class StreamingTest extends SolrCloudTestCase {
rollupStream = new RollupStream(stream, buckets1, metrics1);
tuples = getTuples(rollupStream);
//Check that we've got the extra NULL bucket
- assert(tuples.size() == 4);
+ assertEquals(4, tuples.size());
tuple = tuples.get(0);
- assert(tuple.getString("a_s").equals("NULL"));
+ assertEquals("NULL", tuple.getString("a_s"));
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
@@ -1294,15 +1294,15 @@ public class StreamingTest extends SolrCloudTestCase {
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
- assertTrue(sumi.doubleValue() == 14.0D);
- assertTrue(sumf.doubleValue() == 10.0D);
- assertTrue(mini.doubleValue() == 14.0D);
- assertTrue(minf.doubleValue() == 10.0D);
- assertTrue(maxi.doubleValue() == 14.0D);
- assertTrue(maxf.doubleValue() == 10.0D);
- assertTrue(avgi.doubleValue() == 14.0D);
- assertTrue(avgf.doubleValue() == 10.0D);
- assertTrue(count.doubleValue() == 1);
+ assertEquals(14, sumi.doubleValue(), 0.01);
+ assertEquals(10, sumf.doubleValue(), 0.01);
+ assertEquals(14, mini.doubleValue(), 0.01);
+ assertEquals(10, minf.doubleValue(), 0.01);
+ assertEquals(14, maxi.doubleValue(), 0.01);
+ assertEquals(10, maxf.doubleValue(), 0.01);
+ assertEquals(14, avgi.doubleValue(), 0.01);
+ assertEquals(10, avgf.doubleValue(), 0.01);
+ assertEquals(1, count.doubleValue(), 0.01);
}
@@ -1412,7 +1412,7 @@ public class StreamingTest extends SolrCloudTestCase {
attachStreamFactory(parallelStream);
List<Tuple> tuples = getTuples(parallelStream);
- assert(tuples.size() == 3);
+ assertEquals(3, tuples.size());
//Test Long and Double Sums
@@ -1428,16 +1428,16 @@ public class StreamingTest extends SolrCloudTestCase {
Double avgf = tuple.getDouble("avg(a_f)");
Double count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello0"));
- assertTrue(sumi.doubleValue() == 17.0D);
- assertTrue(sumf.doubleValue() == 18.0D);
- assertTrue(mini.doubleValue() == 0.0D);
- assertTrue(minf.doubleValue() == 1.0D);
- assertTrue(maxi.doubleValue() == 14.0D);
- assertTrue(maxf.doubleValue() == 10.0D);
- assertTrue(avgi.doubleValue() == 4.25D);
- assertTrue(avgf.doubleValue() == 4.5D);
- assertTrue(count.doubleValue() == 4);
+ assertEquals("hello0", bucket);
+ assertEquals(17, sumi.doubleValue(), 0.001);
+ assertEquals(18, sumf.doubleValue(), 0.001);
+ assertEquals(0, mini.doubleValue(), 0.001);
+ assertEquals(1, minf.doubleValue(), 0.001);
+ assertEquals(14, maxi.doubleValue(), 0.001);
+ assertEquals(10, maxf.doubleValue(), 0.001);
+ assertEquals(4.25, avgi.doubleValue(), 0.001);
+ assertEquals(4.5, avgf.doubleValue(), 0.001);
+ assertEquals(4, count.doubleValue(), 0.001);
tuple = tuples.get(1);
bucket = tuple.getString("a_s");
@@ -1451,16 +1451,16 @@ public class StreamingTest extends SolrCloudTestCase {
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello3"));
- assertTrue(sumi.doubleValue() == 38.0D);
- assertTrue(sumf.doubleValue() == 26.0D);
- assertTrue(mini.doubleValue() == 3.0D);
- assertTrue(minf.doubleValue() == 3.0D);
- assertTrue(maxi.doubleValue() == 13.0D);
- assertTrue(maxf.doubleValue() == 9.0D);
- assertTrue(avgi.doubleValue() == 9.5D);
- assertTrue(avgf.doubleValue() == 6.5D);
- assertTrue(count.doubleValue() == 4);
+ assertEquals("hello3", bucket);
+ assertEquals(38, sumi.doubleValue(), 0.001);
+ assertEquals(26, sumf.doubleValue(), 0.001);
+ assertEquals(3, mini.doubleValue(), 0.001);
+ assertEquals(3, minf.doubleValue(), 0.001);
+ assertEquals(13, maxi.doubleValue(), 0.001);
+ assertEquals(9, maxf.doubleValue(), 0.001);
+ assertEquals(9.5, avgi.doubleValue(), 0.001);
+ assertEquals(6.5, avgf.doubleValue(), 0.001);
+ assertEquals(4, count.doubleValue(), 0.001);
tuple = tuples.get(2);
bucket = tuple.getString("a_s");
@@ -1474,16 +1474,16 @@ public class StreamingTest extends SolrCloudTestCase {
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
- assertTrue(bucket.equals("hello4"));
- assertTrue(sumi.longValue() == 15);
- assertTrue(sumf.doubleValue() == 11.0D);
- assertTrue(mini.doubleValue() == 4.0D);
- assertTrue(minf.doubleValue() == 4.0D);
- assertTrue(maxi.doubleValue() == 11.0D);
- assertTrue(maxf.doubleValue() == 7.0D);
- assertTrue(avgi.doubleValue() == 7.5D);
- assertTrue(avgf.doubleValue() == 5.5D);
- assertTrue(count.doubleValue() == 2);
+ assertEquals("hello4", bucket);
+ assertEquals(15, sumi.longValue());
+ assertEquals(11, sumf.doubleValue(), 0.001);
+ assertEquals(4, mini.doubleValue(), 0.001);
+ assertEquals(4, minf.doubleValue(), 0.001);
+ assertEquals(11, maxi.doubleValue(), 0.001);
+ assertEquals(7, maxf.doubleValue(), 0.001);
+ assertEquals(7.5, avgi.doubleValue(), 0.001);
+ assertEquals(5.5, avgf.doubleValue(), 0.001);
+ assertEquals(2, count.doubleValue(), 0.001);
}
@@ -1529,26 +1529,27 @@ public class StreamingTest extends SolrCloudTestCase {
Tuple tuple = tuples.get(0);
String s = tuple.getString("a_s");
- assert(s.equals("hello0")) ;
+ assertEquals("hello0", s);
+ ;
long l = tuple.getLong("a_i");
- assert(l == 0);
+ assertEquals(0, l);
double d = tuple.getDouble("a_f");
- assert(d == 5.1);
+ assertEquals(5.1, d, 0.001);
List<String> stringList = tuple.getStrings("s_multi");
- assert(stringList.get(0).equals("a"));
- assert(stringList.get(1).equals("b"));
+ assertEquals("a", stringList.get(0));
+ assertEquals("b", stringList.get(1));
List<Long> longList = tuple.getLongs("i_multi");
- assert(longList.get(0).longValue() == 1);
- assert(longList.get(1).longValue() == 2);
+ assertEquals(1, longList.get(0).longValue());
+ assertEquals(2, longList.get(1).longValue());
List<Double> doubleList = tuple.getDoubles("f_multi");
- assert(doubleList.get(0).doubleValue() == 1.2);
- assert(doubleList.get(1).doubleValue() == 1.3);
+ assertEquals(1.2, doubleList.get(0).doubleValue(), 0.001);
+ assertEquals(1.3, doubleList.get(1).doubleValue(), 0.001);
}
@@ -1573,7 +1574,7 @@ public class StreamingTest extends SolrCloudTestCase {
MergeStream mstream = new MergeStream(streamA, streamB, new FieldComparator("a_i",ComparatorOrder.ASCENDING));
List<Tuple> tuples = getTuples(mstream);
- assert(tuples.size() == 5);
+ assertEquals(5, tuples.size());
assertOrder(tuples, 0,1,2,3,4);
//Test descending
@@ -1586,7 +1587,7 @@ public class StreamingTest extends SolrCloudTestCase {
mstream = new MergeStream(streamA, streamB, new FieldComparator("a_i",ComparatorOrder.DESCENDING));
tuples = getTuples(mstream);
- assert(tuples.size() == 5);
+ assertEquals(5, tuples.size());
assertOrder(tuples, 4,3,2,1,0);
//Test compound sort
@@ -1600,7 +1601,7 @@ public class StreamingTest extends SolrCloudTestCase {
mstream = new MergeStream(streamA, streamB, new MultipleFieldComparator(new FieldComparator("a_f",ComparatorOrder.ASCENDING),new FieldComparator("a_i",ComparatorOrder.ASCENDING)));
tuples = getTuples(mstream);
- assert(tuples.size() == 5);
+ assertEquals(5, tuples.size());
assertOrder(tuples, 0,2,1,3,4);
sParamsA = mapParams("q", "id:(2 4 1)", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i desc");
@@ -1612,7 +1613,7 @@ public class StreamingTest extends SolrCloudTestCase {
mstream = new MergeStream(streamA, streamB, new MultipleFieldComparator(new FieldComparator("a_f",ComparatorOrder.ASCENDING),new FieldComparator("a_i",ComparatorOrder.DESCENDING)));
tuples = getTuples(mstream);
- assert(tuples.size() == 5);
+ assertEquals(5, tuples.size());
assertOrder(tuples, 2,0,1,3,4);
}
@@ -1645,7 +1646,7 @@ public class StreamingTest extends SolrCloudTestCase {
attachStreamFactory(pstream);
List<Tuple> tuples = getTuples(pstream);
- assert(tuples.size() == 9);
+ assertEquals(9, tuples.size());
assertOrder(tuples, 0,1,2,3,4,7,6,8,9);
//Test descending
@@ -1660,7 +1661,7 @@ public class StreamingTest extends SolrCloudTestCase {
attachStreamFactory(pstream);
tuples = getTuples(pstream);
- assert(tuples.size() == 8);
+ assertEquals(8, tuples.size());
assertOrder(tuples, 9,8,6,4,3,2,1,0);
}
@@ -1693,9 +1694,9 @@ public class StreamingTest extends SolrCloudTestCase {
attachStreamFactory(pstream);
List<Tuple> tuples = getTuples(pstream);
- assert(tuples.size() == 9);
+ assertEquals(9, tuples.size());
Map<String, Tuple> eofTuples = pstream.getEofTuples();
- assert(eofTuples.size() == numWorkers); // There should be an EOF Tuple for each worker.
+ assertEquals(numWorkers, eofTuples.size()); // There should be an EOF Tuple for each worker.
}
@@ -1835,21 +1836,21 @@ public class StreamingTest extends SolrCloudTestCase {
Tuple tuple = getTuple(stream); // All I really care about is that all the fields are returned. There's
- assertTrue("Integers should be returned", tuple.getLong("i_sing") == 11L);
- assertTrue("MV should be returned for i_multi", tuple.getLongs("i_multi").get(0) == 12);
- assertTrue("MV should be returned for i_multi", tuple.getLongs("i_multi").get(1) == 13);
+ assertEquals("Integers should be returned", 11, tuple.getLong("i_sing").longValue());
+ assertEquals("MV should be returned for i_multi", 12, tuple.getLongs("i_multi").get(0).longValue());
+ assertEquals("MV should be returned for i_multi", 13, tuple.getLongs("i_multi").get(1).longValue());
- assertTrue("longs should be returned", tuple.getLong("l_sing") == 14L);
- assertTrue("MV should be returned for l_multi", tuple.getLongs("l_multi").get(0) == 15);
- assertTrue("MV should be returned for l_multi", tuple.getLongs("l_multi").get(1) == 16);
+ assertEquals("longs should be returned", 14,tuple.getLong("l_sing").longValue());
+ assertEquals("MV should be returned for l_multi",15, tuple.getLongs("l_multi").get(0).longValue());
+ assertEquals("MV should be returned for l_multi", 16, tuple.getLongs("l_multi").get(1).longValue());
- assertTrue("floats should be returned", tuple.getDouble("f_sing") == 1.7);
- assertTrue("MV should be returned for f_multi", tuple.getDoubles("f_multi").get(0) == 1.8);
- assertTrue("MV should be returned for f_multi", tuple.getDoubles("f_multi").get(1) == 1.9);
+ assertEquals("floats should be returned", 1.7, tuple.getDouble("f_sing").doubleValue(), 0.001);
+ assertEquals("MV should be returned for f_multi", 1.8, tuple.getDoubles("f_multi").get(0).doubleValue(), 0.001);
+ assertEquals("MV should be returned for f_multi", 1.9, tuple.getDoubles("f_multi").get(1).doubleValue(), 0.001);
- assertTrue("doubles should be returned", tuple.getDouble("d_sing") == 1.2);
- assertTrue("MV should be returned for d_multi", tuple.getDoubles("d_multi").get(0) == 1.21);
- assertTrue("MV should be returned for d_multi", tuple.getDoubles("d_multi").get(1) == 1.22);
+ assertEquals("doubles should be returned", 1.2, tuple.getDouble("d_sing").doubleValue(), 0.001);
+ assertEquals("MV should be returned for d_multi", 1.21, tuple.getDoubles("d_multi").get(0).doubleValue(), 0.001);
+ assertEquals("MV should be returned for d_multi", 1.22, tuple.getDoubles("d_multi").get(1).doubleValue(), 0.001);
assertTrue("Strings should be returned", tuple.getString("s_sing").equals("single"));
assertTrue("MV should be returned for s_multi", tuple.getStrings("s_multi").get(0).equals("sm1"));
[12/16] lucene-solr:jira/solr-8593: SOLR-9718: replace assert and
assertTrue() to assertEquals()
Posted by kr...@apache.org.
SOLR-9718: replace assert and assertTrue() to assertEquals()
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/907bed88
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/907bed88
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/907bed88
Branch: refs/heads/jira/solr-8593
Commit: 907bed887d6be1aaec832c4f6d395d051bb49b17
Parents: 7936f74
Author: Noble Paul <no...@apache.org>
Authored: Mon Nov 14 12:41:59 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Mon Nov 14 12:41:59 2016 +0530
----------------------------------------------------------------------
.../client/solrj/io/stream/StreamingTest.java | 20 ++++++++++----------
1 file changed, 10 insertions(+), 10 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/907bed88/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
index 3888a41..7d6e1d3 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
@@ -1718,7 +1718,7 @@ public class StreamingTest extends SolrCloudTestCase {
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParams);
List<Tuple> tuples = getTuples(stream);
- assert(tuples.size() == 5);
+ assertEquals(5,tuples.size());
assertOrder(tuples, 4, 3, 2, 1, 0);
//With Ascending Sort
@@ -1726,7 +1726,7 @@ public class StreamingTest extends SolrCloudTestCase {
stream = new CloudSolrStream(zkHost, COLLECTION, sParams);
tuples = getTuples(stream);
- assert(tuples.size() == 5);
+ assertEquals(5, tuples.size());
assertOrder(tuples, 0,1,2,3,4);
@@ -1735,7 +1735,7 @@ public class StreamingTest extends SolrCloudTestCase {
stream = new CloudSolrStream(zkHost, COLLECTION, sParams);
tuples = getTuples(stream);
- assert(tuples.size() == 5);
+ assertEquals(5, tuples.size());
assertOrder(tuples, 2,0,1,3,4);
@@ -1743,7 +1743,7 @@ public class StreamingTest extends SolrCloudTestCase {
stream = new CloudSolrStream(zkHost, COLLECTION, sParams);
tuples = getTuples(stream);
- assert (tuples.size() == 5);
+ assertEquals(5, tuples.size());
assertOrder(tuples, 0, 2, 1, 3, 4);
}
@@ -1771,7 +1771,7 @@ public class StreamingTest extends SolrCloudTestCase {
try {
List<Tuple> tuples = getTuples(stream);
- assert (tuples.size() == 5);
+ assertEquals(5, tuples.size());
assertOrder(tuples, 0, 2, 1, 3, 4);
//Basic CloudSolrStream Test bools desc
@@ -1779,7 +1779,7 @@ public class StreamingTest extends SolrCloudTestCase {
stream = new CloudSolrStream(zkHost, COLLECTION, sParams);
tuples = getTuples(stream);
- assert (tuples.size() == 5);
+ assertEquals (5,tuples.size());
assertOrder(tuples, 4, 3, 1, 2, 0);
//Basic CloudSolrStream Test dates desc
@@ -1787,7 +1787,7 @@ public class StreamingTest extends SolrCloudTestCase {
stream = new CloudSolrStream(zkHost, COLLECTION, sParams);
tuples = getTuples(stream);
- assert (tuples.size() == 5);
+ assertEquals (5,tuples.size());
assertOrder(tuples, 2, 0, 1, 4, 3);
//Basic CloudSolrStream Test ates desc
@@ -1795,7 +1795,7 @@ public class StreamingTest extends SolrCloudTestCase {
stream = new CloudSolrStream(zkHost, COLLECTION, sParams);
tuples = getTuples(stream);
- assert (tuples.size() == 5);
+ assertEquals (5,tuples.size());
assertOrder(tuples, 3, 4, 1, 0, 2);
} finally {
if (stream != null) {
@@ -1840,8 +1840,8 @@ public class StreamingTest extends SolrCloudTestCase {
assertEquals("MV should be returned for i_multi", 12, tuple.getLongs("i_multi").get(0).longValue());
assertEquals("MV should be returned for i_multi", 13, tuple.getLongs("i_multi").get(1).longValue());
- assertEquals("longs should be returned", 14,tuple.getLong("l_sing").longValue());
- assertEquals("MV should be returned for l_multi",15, tuple.getLongs("l_multi").get(0).longValue());
+ assertEquals("longs should be returned", 14, tuple.getLong("l_sing").longValue());
+ assertEquals("MV should be returned for l_multi", 15, tuple.getLongs("l_multi").get(0).longValue());
assertEquals("MV should be returned for l_multi", 16, tuple.getLongs("l_multi").get(1).longValue());
assertEquals("floats should be returned", 1.7, tuple.getDouble("f_sing").doubleValue(), 0.001);
[15/16] lucene-solr:jira/solr-8593: SOLR-9166: Export handler returns
zero for numeric fields that are not in the original doc
Posted by kr...@apache.org.
SOLR-9166: Export handler returns zero for numeric fields that are not in the original doc
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4a31b29c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4a31b29c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4a31b29c
Branch: refs/heads/jira/solr-8593
Commit: 4a31b29cb031a10d25de01e25d1d9e5b1a4a7787
Parents: fba2a86
Author: Erick Erickson <er...@apache.org>
Authored: Fri Nov 11 13:11:20 2016 -0800
Committer: Erick Erickson <er...@apache.org>
Committed: Mon Nov 14 07:19:28 2016 -0800
----------------------------------------------------------------------
solr/CHANGES.txt | 7 +
.../org/apache/solr/handler/ExportWriter.java | 10 +-
.../solrj/io/stream/StreamExpressionTest.java | 2 +-
.../client/solrj/io/stream/StreamingTest.java | 244 +++++++++++++++++++
4 files changed, 257 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a31b29c/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index efd1c94..f48b1ef 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -79,6 +79,13 @@ Jetty 9.3.8.v20160314
Detailed Change List
----------------------
+Upgrade Notes
+----------------------
+
+* SOLR-9166: Export handler returns zero for numeric fields that are not in the original doc. One
+ consequence of this change is that you must be aware that some tuples will not have values if
+ there were none in the original document.
+
New Features
----------------------
* SOLR-9293: Solrj client support for hierarchical clusters and other topics
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a31b29c/solr/core/src/java/org/apache/solr/handler/ExportWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/ExportWriter.java b/solr/core/src/java/org/apache/solr/handler/ExportWriter.java
index 98ab22f..52010ce 100644
--- a/solr/core/src/java/org/apache/solr/handler/ExportWriter.java
+++ b/solr/core/src/java/org/apache/solr/handler/ExportWriter.java
@@ -1333,7 +1333,7 @@ public class ExportWriter implements SolrCore.RawWriter, Closeable {
if (vals.advance(docId) == docId) {
val = (int) vals.longValue();
} else {
- val = 0;
+ return false;
}
ew.put(this.field, val);
return true;
@@ -1385,7 +1385,7 @@ public class ExportWriter implements SolrCore.RawWriter, Closeable {
if (vals.advance(docId) == docId) {
val = vals.longValue();
} else {
- val = 0;
+ return false;
}
ew.put(field, val);
return true;
@@ -1405,7 +1405,7 @@ public class ExportWriter implements SolrCore.RawWriter, Closeable {
if (vals.advance(docId) == docId) {
val = vals.longValue();
} else {
- val = 0;
+ return false;
}
ew.put(this.field, new Date(val));
return true;
@@ -1449,7 +1449,7 @@ public class ExportWriter implements SolrCore.RawWriter, Closeable {
if (vals.advance(docId) == docId) {
val = (int)vals.longValue();
} else {
- val = 0;
+ return false;
}
ew.put(this.field, Float.intBitsToFloat(val));
return true;
@@ -1469,7 +1469,7 @@ public class ExportWriter implements SolrCore.RawWriter, Closeable {
if (vals.advance(docId) == docId) {
val = vals.longValue();
} else {
- val = 0;
+ return false;
}
ew.put(this.field, Double.longBitsToDouble(val));
return true;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a31b29c/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index 106368e..d447210 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -392,7 +392,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
assertTrue("hello4".equals(tuple.getString("a_s")));
assertNull(tuple.get("s_multi"));
assertNull(tuple.get("i_multi"));
- assertEquals(0L, (long)tuple.getLong("a_i"));
+ assertNull(tuple.getLong("a_i"));
tuple = tuples.get(1);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a31b29c/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
index 7d6e1d3..7a33a10 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
@@ -19,12 +19,18 @@ package org.apache.solr.client.solrj.io.stream;
import java.io.IOException;
import java.time.Instant;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Date;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.io.SolrClientCache;
import org.apache.solr.client.solrj.io.Tuple;
import org.apache.solr.client.solrj.io.comp.ComparatorOrder;
@@ -42,8 +48,10 @@ import org.apache.solr.client.solrj.io.stream.metrics.MinMetric;
import org.apache.solr.client.solrj.io.stream.metrics.SumMetric;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.cloud.AbstractDistribZkTestBase;
import org.apache.solr.cloud.SolrCloudTestCase;
+import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.junit.Before;
@@ -961,6 +969,242 @@ public class StreamingTest extends SolrCloudTestCase {
}
+
+ String[] docPairs(int base, String sSeq) {
+ List<String> pairs = new ArrayList<>();
+ final int iSeq = base * 100;
+ pairs.add(id);
+ pairs.add(sSeq + base); // aaa1
+ pairs.add("s_sing");
+ pairs.add(Integer.toString(iSeq + 1)); // 101
+ pairs.add("i_sing");
+ pairs.add(Integer.toString(iSeq + 2)); // 102
+ pairs.add("f_sing");
+ pairs.add(Float.toString(iSeq + 3)); // 103.0
+ pairs.add("l_sing");
+ pairs.add(Long.toString(iSeq + 4)); // 104
+ pairs.add("d_sing");
+ pairs.add(Double.toString(iSeq + 5)); // 105
+ pairs.add("dt_sing");
+ pairs.add(String.format("2000-01-01T%02d:00:00Z", base)); // Works as long as we add fewer than 60 docs
+ pairs.add("b_sing");
+ pairs.add((base % 2) == 0 ? "T" : "F"); // Tricky
+
+ String[] ret = new String[pairs.size()];
+ return pairs.toArray(ret);
+ }
+
+ // Select and export should be identical sort orders I think.
+ private void checkSort(JettySolrRunner jetty, String field, String sortDir, String[] fields) throws IOException, SolrServerException {
+
+ // Comes back after after LUCENE-7548
+// SolrQuery query = new SolrQuery("*:*");
+// query.addSort(field, ("asc".equals(sortDir) ? SolrQuery.ORDER.asc : SolrQuery.ORDER.desc));
+// query.addSort("id", SolrQuery.ORDER.asc);
+// query.addField("id");
+// query.addField(field);
+// query.setRequestHandler("standard");
+// query.setRows(100);
+//
+// List<String> selectOrder = new ArrayList<>();
+//
+// String url = jetty.getBaseUrl() + "/" + COLLECTION;
+//
+// try (HttpSolrClient client = getHttpSolrClient(url)) {
+// client.setConnectionTimeout(DEFAULT_CONNECTION_TIMEOUT);
+// QueryResponse rsp = client.query(query);
+// for (SolrDocument doc : rsp.getResults()) {
+// selectOrder.add((String) doc.getFieldValue("id"));
+// }
+// }
+// SolrParams exportParams = mapParams("q", "*:*", "qt", "/export", "fl", "id," + field, "sort", field + " " + sortDir + ",id asc");
+// try (CloudSolrStream solrStream = new CloudSolrStream(zkHost, COLLECTION, exportParams)) {
+// List<Tuple> tuples = getTuples(solrStream);
+// assertEquals("There should be exactly 32 responses returned", 32, tuples.size());
+// // Since the getTuples method doesn't return the EOF tuple, these two entries should be the same size.
+// assertEquals("Tuple count should exactly match sort array size for field " + field + " sort order " + sortDir, selectOrder.size(), tuples.size());
+//
+// for (int idx = 0; idx < selectOrder.size(); ++idx) { // Tuples should be in lock step with the orders from select.
+// assertEquals("Order for missing docValues fields wrong for field '" + field + "' sort direction '" + sortDir,
+// tuples.get(idx).getString("id"), selectOrder.get(idx));
+// }
+// }
+
+ // Remove below and uncomment above after LUCENE-7548
+ List<String> selectOrder = ("asc".equals(sortDir)) ? Arrays.asList(ascOrder) : Arrays.asList(descOrder);
+ List<String> selectOrderBool = ("asc".equals(sortDir)) ? Arrays.asList(ascOrderBool) : Arrays.asList(descOrderBool);
+ SolrParams exportParams = mapParams("q", "*:*", "qt", "/export", "fl", "id," + field, "sort", field + " " + sortDir + ",id asc");
+ try (CloudSolrStream solrStream = new CloudSolrStream(zkHost, COLLECTION, exportParams)) {
+ List<Tuple> tuples = getTuples(solrStream);
+ assertEquals("There should be exactly 32 responses returned", 32, tuples.size());
+ // Since the getTuples method doesn't return the EOF tuple, these two entries should be the same size.
+ assertEquals("Tuple count should exactly match sort array size for field " + field + " sort order " + sortDir, selectOrder.size(), tuples.size());
+
+ for (int idx = 0; idx < selectOrder.size(); ++idx) { // Tuples should be in lock step with the orders passed in.
+ assertEquals("Order for missing docValues fields wrong for field '" + field + "' sort direction '" + sortDir +
+ "' RESTORE GETTING selectOrder from select statement after LUCENE-7548",
+ tuples.get(idx).getString("id"), (field.startsWith("b_") ? selectOrderBool.get(idx) : selectOrder.get(idx)));
+ }
+ }
+ }
+
+ static final String[] voidIds = new String[]{
+ "iii1",
+ "eee1",
+ "aaa1",
+ "ooo1",
+ "iii2",
+ "eee2",
+ "aaa2",
+ "ooo2",
+ "iii3",
+ "eee3",
+ "aaa3",
+ "ooo3"
+ };
+
+ private void checkReturnValsForEmpty(String[] fields) throws IOException {
+
+ Set<String> voids = new HashSet<>(Arrays.asList(voidIds));
+
+ StringBuilder fl = new StringBuilder("id");
+ for (String f : fields) {
+ fl.append(",").append(f);
+ }
+ SolrParams sParams = mapParams("q", "*:*", "qt", "/export", "fl", fl.toString(), "sort", "id asc");
+
+ try (CloudSolrStream solrStream = new CloudSolrStream(zkHost, COLLECTION, sParams)) {
+ List<Tuple> tuples = getTuples(solrStream);
+ assertEquals("There should be exactly 32 responses returned", 32, tuples.size());
+
+ for (Tuple tuple : tuples) {
+ String id = tuple.getString("id");
+ if (voids.contains(id)) {
+ for (String f : fields) {
+ assertNull("Should have returned a void for field " + f + " doc " + id, tuple.get(f));
+ }
+ } else {
+ for (String f : fields) {
+ assertNotNull("Should have returned a value for field " + f + " doc " + id, tuple.get(f));
+ }
+ }
+ }
+ }
+ }
+
+ // Goes away after after LUCENE-7548
+ final static String[] ascOrder = new String[]{
+ "aaa1", "aaa2", "aaa3", "eee1",
+ "eee2", "eee3", "iii1", "iii2",
+ "iii3", "ooo1", "ooo2", "ooo3",
+ "aaa4", "eee4", "iii4", "ooo4",
+ "aaa5", "eee5", "iii5", "ooo5",
+ "aaa6", "eee6", "iii6", "ooo6",
+ "aaa7", "eee7", "iii7", "ooo7",
+ "aaa8", "eee8", "iii8", "ooo8"
+ };
+
+ // Goes away after after LUCENE-7548
+ final static String[] descOrder = new String[]{
+ "aaa8", "eee8", "iii8", "ooo8",
+ "aaa7", "eee7", "iii7", "ooo7",
+ "aaa6", "eee6", "iii6", "ooo6",
+ "aaa5", "eee5", "iii5", "ooo5",
+ "aaa4", "eee4", "iii4", "ooo4",
+ "aaa1", "aaa2", "aaa3", "eee1",
+ "eee2", "eee3", "iii1", "iii2",
+ "iii3", "ooo1", "ooo2", "ooo3"
+ };
+
+
+ // Goes away after after LUCENE-7548
+ final static String[] ascOrderBool = new String[]{
+ "aaa1", "aaa2", "aaa3", "eee1",
+ "eee2", "eee3", "iii1", "iii2",
+ "iii3", "ooo1", "ooo2", "ooo3",
+ "aaa5", "aaa7", "eee5", "eee7",
+ "iii5", "iii7", "ooo5", "ooo7",
+ "aaa4", "aaa6", "aaa8", "eee4",
+ "eee6", "eee8", "iii4", "iii6",
+ "iii8", "ooo4", "ooo6", "ooo8"
+ };
+
+ // Goes away after after LUCENE-7548
+ final static String[] descOrderBool = new String[]{
+ "aaa4", "aaa6", "aaa8", "eee4",
+ "eee6", "eee8", "iii4", "iii6",
+ "iii8", "ooo4", "ooo6", "ooo8",
+ "aaa5", "aaa7", "eee5", "eee7",
+ "iii5", "iii7", "ooo5", "ooo7",
+ "aaa1", "aaa2", "aaa3", "eee1",
+ "eee2", "eee3", "iii1", "iii2",
+ "iii3", "ooo1", "ooo2", "ooo3",
+ };
+
+ @Test
+ public void testMissingFields() throws Exception {
+
+ new UpdateRequest()
+ // Some docs with nothing at all for any of the "interesting" fields.
+ .add(id, "iii1")
+ .add(id, "eee1")
+ .add(id, "aaa1")
+ .add(id, "ooo1")
+
+ .add(id, "iii2")
+ .add(id, "eee2")
+ .add(id, "aaa2")
+ .add(id, "ooo2")
+
+ .add(id, "iii3")
+ .add(id, "eee3")
+ .add(id, "aaa3")
+ .add(id, "ooo3")
+
+ // Docs with values in for all of the types we want to sort on.
+
+ .add(docPairs(4, "iii"))
+ .add(docPairs(4, "eee"))
+ .add(docPairs(4, "aaa"))
+ .add(docPairs(4, "ooo"))
+
+ .add(docPairs(5, "iii"))
+ .add(docPairs(5, "eee"))
+ .add(docPairs(5, "aaa"))
+ .add(docPairs(5, "ooo"))
+
+ .add(docPairs(6, "iii"))
+ .add(docPairs(6, "eee"))
+ .add(docPairs(6, "aaa"))
+ .add(docPairs(6, "ooo"))
+
+ .add(docPairs(7, "iii"))
+ .add(docPairs(7, "eee"))
+ .add(docPairs(7, "aaa"))
+ .add(docPairs(7, "ooo"))
+
+ .add(docPairs(8, "iii"))
+ .add(docPairs(8, "eee"))
+ .add(docPairs(8, "aaa"))
+ .add(docPairs(8, "ooo"))
+
+ .commit(cluster.getSolrClient(), COLLECTION);
+
+ JettySolrRunner jetty = cluster.getJettySolrRunners().get(0);
+
+
+ String[] fields = new String[]{"s_sing", "i_sing", "f_sing", "l_sing", "d_sing", "dt_sing", "b_sing" };
+
+
+ for (String f : fields) {
+ checkSort(jetty, f, "asc", fields);
+ checkSort(jetty, f, "desc", fields);
+ }
+
+ checkReturnValsForEmpty(fields);
+
+ }
+
@Test
public void testSubFacetStream() throws Exception {