You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by cp...@apache.org on 2016/02/02 10:53:13 UTC
[01/21] lucene-solr git commit: don't test merge stability of point
values: our BKD tree impl is not stable on 1D merge
Repository: lucene-solr
Updated Branches:
refs/heads/master-solr-8621 14d2b0c32 -> 219f7de94
don't test merge stability of point values: our BKD tree impl is not stable on 1D merge
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/27c28b53
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/27c28b53
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/27c28b53
Branch: refs/heads/master-solr-8621
Commit: 27c28b534074edb6e40988aae98a3e51718555a4
Parents: ddbf3a2
Author: Mike McCandless <mi...@apache.org>
Authored: Fri Jan 29 09:12:35 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Fri Jan 29 09:12:35 2016 -0500
----------------------------------------------------------------------
.../java/org/apache/lucene/index/BasePointFormatTestCase.java | 6 ++++++
1 file changed, 6 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27c28b53/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java
index 3b19530..20bdfb5 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java
@@ -919,4 +919,10 @@ public abstract class BasePointFormatTestCase extends BaseIndexFileFormatTestCas
//dir = FSDirectory.open(createTempDir());
return dir;
}
+
+ @Override
+ public void testMergeStability() {
+ // suppress this test from base class: merges for BKD trees are not stable because the tree created by merge will have a different
+ // structure than the tree created by adding points separately
+ }
}
[09/21] lucene-solr git commit: Merge branch 'master' of
https://git-wip-us.apache.org/repos/asf/lucene-solr
Posted by cp...@apache.org.
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a4d15862
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a4d15862
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a4d15862
Branch: refs/heads/master-solr-8621
Commit: a4d15862ac38cc5d7f4406b1fca4cbdaecd33f86
Parents: 13c9912 9332b16
Author: Noble Paul <no...@apache.org>
Authored: Sun Jan 31 18:45:29 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Sun Jan 31 18:45:29 2016 +0530
----------------------------------------------------------------------
lucene/CHANGES.txt | 3 +
.../simpletext/SimpleTextPointReader.java | 24 +-
.../simpletext/SimpleTextPointWriter.java | 6 +-
.../simpletext/TestSimpleTextPointFormat.java | 33 +
.../org/apache/lucene/codecs/CodecUtil.java | 4 +-
.../org/apache/lucene/codecs/PointWriter.java | 4 +
.../codecs/lucene50/Lucene50CompoundReader.java | 14 +
.../codecs/lucene60/Lucene60PointFormat.java | 3 +-
.../codecs/lucene60/Lucene60PointReader.java | 64 +-
.../codecs/lucene60/Lucene60PointWriter.java | 87 +-
.../org/apache/lucene/document/DoublePoint.java | 6 +
.../org/apache/lucene/document/FloatPoint.java | 6 +
.../org/apache/lucene/document/IntPoint.java | 6 +
.../org/apache/lucene/document/LongPoint.java | 6 +
.../lucene/index/DefaultIndexingChain.java | 3 +
.../org/apache/lucene/index/IndexReader.java | 4 +-
.../apache/lucene/index/MultiPointValues.java | 4 +-
.../org/apache/lucene/index/PointValues.java | 8 +-
.../apache/lucene/index/SegmentCoreReaders.java | 4 +-
.../lucene60/TestLucene60PointFormat.java | 83 ++
.../org/apache/lucene/index/TestAddIndexes.java | 20 +
.../index/TestAllFilesCheckIndexHeader.java | 147 +++
.../index/TestAllFilesDetectTruncation.java | 131 +++
.../index/TestAllFilesHaveChecksumFooter.java | 24 +-
.../index/TestAllFilesHaveCodecHeader.java | 31 +-
.../apache/lucene/index/TestAtomicUpdate.java | 2 +
.../lucene/index/TestCodecHoldsOpenFiles.java | 5 +
.../index/TestIndexWriterExceptions2.java | 3 +
.../lucene/index/TestIndexWriterOnDiskFull.java | 5 +
.../lucene/index/TestIndexWriterOnVMError.java | 3 +
.../apache/lucene/index/TestPointValues.java | 988 +++----------------
.../lucene/index/TestSwappedIndexFiles.java | 129 +++
.../codecs/asserting/AssertingPointFormat.java | 5 +
.../lucene/codecs/cranky/CrankyCodec.java | 6 +
.../lucene/codecs/cranky/CrankyPointFormat.java | 176 ++++
.../index/BaseIndexFileFormatTestCase.java | 5 +
.../lucene/index/BasePointFormatTestCase.java | 929 +++++++++++++++++
.../org/apache/lucene/util/LuceneTestCase.java | 64 ++
.../asserting/TestAssertingPointFormat.java | 31 +
solr/CHANGES.txt | 5 +
.../org/apache/solr/handler/SQLHandler.java | 18 +-
.../apache/solr/search/BoostQParserPlugin.java | 4 -
.../solr/search/CollapsingQParserPlugin.java | 4 -
.../solr/search/ComplexPhraseQParserPlugin.java | 1 +
.../apache/solr/search/DisMaxQParserPlugin.java | 4 -
.../apache/solr/search/ExportQParserPlugin.java | 3 -
.../search/ExtendedDismaxQParserPlugin.java | 4 -
.../apache/solr/search/FieldQParserPlugin.java | 4 -
.../solr/search/FunctionQParserPlugin.java | 4 -
.../solr/search/FunctionRangeQParserPlugin.java | 4 -
.../apache/solr/search/HashQParserPlugin.java | 4 -
.../apache/solr/search/JoinQParserPlugin.java | 4 -
.../apache/solr/search/LuceneQParserPlugin.java | 4 -
.../apache/solr/search/NestedQParserPlugin.java | 4 -
.../solr/search/OldLuceneQParserPlugin.java | 4 -
.../apache/solr/search/PrefixQParserPlugin.java | 4 -
.../org/apache/solr/search/QParserPlugin.java | 4 +
.../apache/solr/search/RawQParserPlugin.java | 4 -
.../apache/solr/search/ReRankQParserPlugin.java | 3 -
.../apache/solr/search/SimpleQParserPlugin.java | 5 -
.../solr/search/SpatialBoxQParserPlugin.java | 5 -
.../solr/search/SpatialFilterQParserPlugin.java | 5 -
.../solr/search/SurroundQParserPlugin.java | 4 -
.../apache/solr/search/SwitchQParserPlugin.java | 4 -
.../apache/solr/search/TermQParserPlugin.java | 4 -
.../apache/solr/search/TermsQParserPlugin.java | 4 -
.../apache/solr/search/XmlQParserPlugin.java | 4 -
.../join/BlockJoinParentQParserPlugin.java | 4 -
.../solr/search/join/GraphQParserPlugin.java | 4 -
.../org/apache/solr/search/join/GraphQuery.java | 89 +-
.../solr/search/join/GraphTermsCollector.java | 2 +-
.../search/join/ScoreJoinQParserPlugin.java | 4 -
.../solr/search/mlt/MLTQParserPlugin.java | 5 -
.../solr/client/solrj/ConnectionReuseTest.java | 2 +-
.../apache/solr/search/FooQParserPlugin.java | 4 -
.../solr/search/TestAnalyticsQParserPlugin.java | 4 -
.../apache/solr/search/TestRankQueryPlugin.java | 4 -
.../apache/solr/search/join/GraphQueryTest.java | 23 +
.../solr/client/solrj/io/sql/ResultSetImpl.java | 87 +-
.../solr/client/solrj/io/sql/JdbcTest.java | 177 +++-
80 files changed, 2430 insertions(+), 1180 deletions(-)
----------------------------------------------------------------------
[13/21] lucene-solr git commit: .gitignore Benchmark: temp/ & work/
Posted by cp...@apache.org.
.gitignore Benchmark: temp/ & work/
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0fab4ccb
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0fab4ccb
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0fab4ccb
Branch: refs/heads/master-solr-8621
Commit: 0fab4ccb96b7cc0d167aa4a83a034cedb587c06f
Parents: 4cdce3d
Author: David Smiley <ds...@apache.org>
Authored: Mon Feb 1 00:18:37 2016 -0500
Committer: David Smiley <ds...@apache.org>
Committed: Mon Feb 1 00:18:37 2016 -0500
----------------------------------------------------------------------
lucene/benchmark/.gitignore | 2 ++
1 file changed, 2 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fab4ccb/lucene/benchmark/.gitignore
----------------------------------------------------------------------
diff --git a/lucene/benchmark/.gitignore b/lucene/benchmark/.gitignore
new file mode 100644
index 0000000..6cac9b7
--- /dev/null
+++ b/lucene/benchmark/.gitignore
@@ -0,0 +1,2 @@
+temp/
+work/
\ No newline at end of file
[14/21] lucene-solr git commit: fix test bug,
using different randomness when creating the two IWCs
Posted by cp...@apache.org.
fix test bug, using different randomness when creating the two IWCs
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/15fed60b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/15fed60b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/15fed60b
Branch: refs/heads/master-solr-8621
Commit: 15fed60b050c14e09638ebc7d56df99d24631fc6
Parents: 0fab4cc
Author: Mike McCandless <mi...@apache.org>
Authored: Mon Feb 1 05:26:42 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Mon Feb 1 05:27:20 2016 -0500
----------------------------------------------------------------------
.../org/apache/lucene/index/TestSwappedIndexFiles.java | 12 ++++++++++--
1 file changed, 10 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/15fed60b/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java b/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java
index a9a8c14..c412545 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java
@@ -19,6 +19,7 @@ package org.apache.lucene.index;
import java.io.EOFException;
import java.io.IOException;
+import java.util.Arrays;
import java.util.Collections;
import java.util.Random;
@@ -70,7 +71,7 @@ public class TestSwappedIndexFiles extends LuceneTestCase {
private void indexOneDoc(long seed, Directory dir, Document doc, boolean useCFS) throws IOException {
Random random = new Random(seed);
- IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random));
+ IndexWriterConfig conf = newIndexWriterConfig(random, new MockAnalyzer(random));
conf.setCodec(TestUtil.getDefaultCodec());
if (useCFS == false) {
@@ -87,6 +88,10 @@ public class TestSwappedIndexFiles extends LuceneTestCase {
}
private void swapFiles(Directory dir1, Directory dir2) throws IOException {
+ if (VERBOSE) {
+ System.out.println("TEST: dir1 files: " + Arrays.toString(dir1.listAll()));
+ System.out.println("TEST: dir2 files: " + Arrays.toString(dir2.listAll()));
+ }
for(String name : dir1.listAll()) {
if (name.equals(IndexWriter.WRITE_LOCK_NAME)) {
continue;
@@ -94,8 +99,11 @@ public class TestSwappedIndexFiles extends LuceneTestCase {
swapOneFile(dir1, dir2, name);
}
}
-
+
private void swapOneFile(Directory dir1, Directory dir2, String victim) throws IOException {
+ if (VERBOSE) {
+ System.out.println("TEST: swap file " + victim);
+ }
try (BaseDirectoryWrapper dirCopy = newDirectory()) {
dirCopy.setCheckIndexOnClose(false);
[18/21] lucene-solr git commit: LUCENE-7006: increase
BaseMergePolicyTestCase use (TestNoMergePolicy and TestSortingMergePolicy now
extend it, TestUpgradeIndexMergePolicy added)
Posted by cp...@apache.org.
LUCENE-7006: increase BaseMergePolicyTestCase use (TestNoMergePolicy and TestSortingMergePolicy now extend it, TestUpgradeIndexMergePolicy added)
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ce0b931d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ce0b931d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ce0b931d
Branch: refs/heads/master-solr-8621
Commit: ce0b931da890bb62448ae11ed5f9c0cb41017ede
Parents: fce97a6
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Feb 1 17:51:09 2016 +0000
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Feb 1 17:51:09 2016 +0000
----------------------------------------------------------------------
lucene/CHANGES.txt | 4 +++
.../apache/lucene/index/TestNoMergePolicy.java | 8 ++++--
.../index/TestUpgradeIndexMergePolicy.java | 26 ++++++++++++++++++++
.../lucene/index/TestSortingMergePolicy.java | 8 ++++--
4 files changed, 42 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ce0b931d/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 017742d..db58f4d 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -266,6 +266,10 @@ Other
* LUCENE-7005: TieredMergePolicy tweaks (>= vs. >, @see get vs. set)
(Christine Poerschke)
+* LUCENE-7006: increase BaseMergePolicyTestCase use (TestNoMergePolicy and
+ TestSortingMergePolicy now extend it, TestUpgradeIndexMergePolicy added)
+ (Christine Poerschke)
+
======================= Lucene 5.4.1 =======================
Bug Fixes
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ce0b931d/lucene/core/src/test/org/apache/lucene/index/TestNoMergePolicy.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestNoMergePolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestNoMergePolicy.java
index e345ed4..bdd83c6 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestNoMergePolicy.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestNoMergePolicy.java
@@ -25,11 +25,15 @@ import java.util.Arrays;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
-public class TestNoMergePolicy extends LuceneTestCase {
+public class TestNoMergePolicy extends BaseMergePolicyTestCase {
+
+ public MergePolicy mergePolicy() {
+ return NoMergePolicy.INSTANCE;
+ }
@Test
public void testNoMergePolicy() throws Exception {
- MergePolicy mp = NoMergePolicy.INSTANCE;
+ MergePolicy mp = mergePolicy();
assertNull(mp.findMerges(null, (SegmentInfos)null, null));
assertNull(mp.findForcedMerges(null, 0, null, null));
assertNull(mp.findForcedDeletesMerges(null, null));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ce0b931d/lucene/core/src/test/org/apache/lucene/index/TestUpgradeIndexMergePolicy.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestUpgradeIndexMergePolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestUpgradeIndexMergePolicy.java
new file mode 100644
index 0000000..857bcca
--- /dev/null
+++ b/lucene/core/src/test/org/apache/lucene/index/TestUpgradeIndexMergePolicy.java
@@ -0,0 +1,26 @@
+package org.apache.lucene.index;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+public class TestUpgradeIndexMergePolicy extends BaseMergePolicyTestCase {
+
+ public MergePolicy mergePolicy() {
+ return new UpgradeIndexMergePolicy(newMergePolicy(random()));
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ce0b931d/lucene/misc/src/test/org/apache/lucene/index/TestSortingMergePolicy.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/index/TestSortingMergePolicy.java b/lucene/misc/src/test/org/apache/lucene/index/TestSortingMergePolicy.java
index 8729117..d9baf55 100644
--- a/lucene/misc/src/test/org/apache/lucene/index/TestSortingMergePolicy.java
+++ b/lucene/misc/src/test/org/apache/lucene/index/TestSortingMergePolicy.java
@@ -50,7 +50,7 @@ import org.apache.lucene.util.TestUtil;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
-public class TestSortingMergePolicy extends LuceneTestCase {
+public class TestSortingMergePolicy extends BaseMergePolicyTestCase {
private List<String> terms;
private Directory dir1, dir2;
@@ -78,6 +78,10 @@ public class TestSortingMergePolicy extends LuceneTestCase {
return doc;
}
+ public MergePolicy mergePolicy() {
+ return newSortingMergePolicy(sort);
+ }
+
public static SortingMergePolicy newSortingMergePolicy(Sort sort) {
// usually create a MP with a low merge factor so that many merges happen
MergePolicy mp;
@@ -113,7 +117,7 @@ public class TestSortingMergePolicy extends LuceneTestCase {
final long seed = random().nextLong();
final IndexWriterConfig iwc1 = newIndexWriterConfig(new MockAnalyzer(new Random(seed)));
final IndexWriterConfig iwc2 = newIndexWriterConfig(new MockAnalyzer(new Random(seed)));
- iwc2.setMergePolicy(newSortingMergePolicy(sort));
+ iwc2.setMergePolicy(mergePolicy());
final RandomIndexWriter iw1 = new RandomIndexWriter(new Random(seed), dir1, iwc1);
final RandomIndexWriter iw2 = new RandomIndexWriter(new Random(seed), dir2, iwc2);
for (int i = 0; i < numDocs; ++i) {
[03/21] lucene-solr git commit: try again
Posted by cp...@apache.org.
try again
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c4030838
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c4030838
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c4030838
Branch: refs/heads/master-solr-8621
Commit: c403083872408b6f70f10b999fc2ae706804a672
Parents: 3141c69
Author: Mike McCandless <mi...@apache.org>
Authored: Fri Jan 29 09:25:48 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Fri Jan 29 09:25:48 2016 -0500
----------------------------------------------------------------------
.../org/apache/lucene/index/BaseIndexFileFormatTestCase.java | 5 +++++
.../java/org/apache/lucene/index/BasePointFormatTestCase.java | 7 +++++++
2 files changed, 12 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c4030838/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
index b4b6f7d..c53293c 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
@@ -195,6 +195,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
/** The purpose of this test is to make sure that bulk merge doesn't accumulate useless data over runs. */
public void testMergeStability() throws Exception {
+ assumeTrue("merge is not stable", mergeIsStable());
Directory dir = newDirectory();
if (dir instanceof MockDirectoryWrapper) {
// Else, the virus checker may prevent deletion of files and cause
@@ -240,6 +241,10 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
dir2.close();
}
+ protected boolean mergeIsStable() {
+ return true;
+ }
+
/** Test the accuracy of the ramBytesUsed estimations. */
@Slow
public void testRamBytesUsed() throws IOException {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c4030838/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java
index 3b19530..2b88d74 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java
@@ -919,4 +919,11 @@ public abstract class BasePointFormatTestCase extends BaseIndexFileFormatTestCas
//dir = FSDirectory.open(createTempDir());
return dir;
}
+
+ @Override
+ protected boolean mergeIsStable() {
+ // suppress this test from base class: merges for BKD trees are not stable because the tree created by merge will have a different
+ // structure than the tree created by adding points separately
+ return false;
+ }
}
[16/21] lucene-solr git commit: SOLR-8607: The Schema API refuses to
add new fields that match existing dynamic fields
Posted by cp...@apache.org.
SOLR-8607: The Schema API refuses to add new fields that match existing dynamic fields
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/064c0ac0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/064c0ac0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/064c0ac0
Branch: refs/heads/master-solr-8621
Commit: 064c0ac00fed662183c4cb117f6aeb6d0f9fe1a1
Parents: 8e27c14
Author: Steve Rowe <sa...@apache.org>
Authored: Mon Feb 1 09:15:17 2016 -0500
Committer: Steve Rowe <sa...@apache.org>
Committed: Mon Feb 1 09:15:17 2016 -0500
----------------------------------------------------------------------
solr/CHANGES.txt | 2 +
.../apache/solr/schema/ManagedIndexSchema.java | 4 +-
.../solr/rest/schema/TestBulkSchemaAPI.java | 83 ++++++++++++++++++++
3 files changed, 87 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/064c0ac0/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 9b08555..447761f 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -434,6 +434,8 @@ Bug Fixes
* SOLR-8605: Regular expression queries starting with escaped forward slash caused
an exception. (Scott Blum, yonik)
+* SOLR-8607: The Schema API refuses to add new fields that match existing dynamic fields.
+ (Jan Høydahl, Steve Rowe)
Optimizations
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/064c0ac0/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
index e114031..9db41cd 100644
--- a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
+++ b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
@@ -395,7 +395,7 @@ public final class ManagedIndexSchema extends IndexSchema {
newSchema = shallowCopy(true);
for (SchemaField newField : newFields) {
- if (null != newSchema.getFieldOrNull(newField.getName())) {
+ if (null != newSchema.fields.get(newField.getName())) {
String msg = "Field '" + newField.getName() + "' already exists.";
throw new FieldExistsException(ErrorCode.BAD_REQUEST, msg);
}
@@ -1195,7 +1195,7 @@ public final class ManagedIndexSchema extends IndexSchema {
String msg = "Can't add dynamic field '" + fieldName + "'.";
throw new SolrException(ErrorCode.BAD_REQUEST, msg);
}
- SchemaField existingFieldWithTheSameName = getFieldOrNull(fieldName);
+ SchemaField existingFieldWithTheSameName = fields.get(fieldName);
if (null != existingFieldWithTheSameName) {
String msg = "Field '" + fieldName + "' already exists.";
throw new SolrException(ErrorCode.BAD_REQUEST, msg);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/064c0ac0/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
index ca6f1fc..bd05015 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
@@ -164,6 +164,89 @@ public class TestBulkSchemaAPI extends RestTestBase {
assertEquals("5.0.0", String.valueOf(analyzer.get("luceneMatchVersion")));
}
+ public void testAddFieldMatchingExistingDynamicField() throws Exception {
+ RestTestHarness harness = restTestHarness;
+
+ String newFieldName = "attr_non_dynamic";
+
+ Map map = getObj(harness, newFieldName, "fields");
+ assertNull("Field '" + newFieldName + "' already exists in the schema", map);
+
+ map = getObj(harness, "attr_*", "dynamicFields");
+ assertNotNull("'attr_*' dynamic field does not exist in the schema", map);
+
+ map = getObj(harness, "boolean", "fieldTypes");
+ assertNotNull("'boolean' field type does not exist in the schema", map);
+
+ String payload = "{\n" +
+ " 'add-field' : {\n" +
+ " 'name':'" + newFieldName + "',\n" +
+ " 'type':'boolean',\n" +
+ " 'stored':true,\n" +
+ " 'indexed':true\n" +
+ " }\n" +
+ " }";
+
+ String response = harness.post("/schema?wt=json", json(payload));
+
+ map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
+ assertNull(response, map.get("errors"));
+
+ map = getObj(harness, newFieldName, "fields");
+ assertNotNull("Field '" + newFieldName + "' is not in the schema", map);
+ }
+
+ public void testAddFieldWithExistingCatchallDynamicField() throws Exception {
+ RestTestHarness harness = restTestHarness;
+
+ String newFieldName = "NewField1";
+
+ Map map = getObj(harness, newFieldName, "fields");
+ assertNull("Field '" + newFieldName + "' already exists in the schema", map);
+
+ map = getObj(harness, "*", "dynamicFields");
+ assertNull("'*' dynamic field already exists in the schema", map);
+
+ map = getObj(harness, "string", "fieldTypes");
+ assertNotNull("'boolean' field type does not exist in the schema", map);
+
+ map = getObj(harness, "boolean", "fieldTypes");
+ assertNotNull("'boolean' field type does not exist in the schema", map);
+
+ String payload = "{\n" +
+ " 'add-dynamic-field' : {\n" +
+ " 'name':'*',\n" +
+ " 'type':'string',\n" +
+ " 'stored':true,\n" +
+ " 'indexed':true\n" +
+ " }\n" +
+ "}";
+
+ String response = harness.post("/schema?wt=json", json(payload));
+
+ map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
+ assertNull(response, map.get("errors"));
+
+ map = getObj(harness, "*", "dynamicFields");
+ assertNotNull("Dynamic field '*' is not in the schema", map);
+
+ payload = "{\n" +
+ " 'add-field' : {\n" +
+ " 'name':'" + newFieldName + "',\n" +
+ " 'type':'boolean',\n" +
+ " 'stored':true,\n" +
+ " 'indexed':true\n" +
+ " }\n" +
+ " }";
+
+ response = harness.post("/schema?wt=json", json(payload));
+
+ map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
+ assertNull(response, map.get("errors"));
+
+ map = getObj(harness, newFieldName, "fields");
+ assertNotNull("Field '" + newFieldName + "' is not in the schema", map);
+ }
public void testMultipleCommands() throws Exception{
RestTestHarness harness = restTestHarness;
[05/21] lucene-solr git commit: Use 2d points too in some of these
tests (1d has optimized merge, for example)
Posted by cp...@apache.org.
Use 2d points too in some of these tests (1d has optimized merge, for example)
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/07c9b2bd
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/07c9b2bd
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/07c9b2bd
Branch: refs/heads/master-solr-8621
Commit: 07c9b2bdad1c072678763993c0ba2ed3bba810bc
Parents: e6db8ba
Author: Robert Muir <rm...@apache.org>
Authored: Fri Jan 29 11:43:39 2016 -0500
Committer: Robert Muir <rm...@apache.org>
Committed: Fri Jan 29 11:43:39 2016 -0500
----------------------------------------------------------------------
.../core/src/test/org/apache/lucene/index/TestAddIndexes.java | 6 ++++++
.../src/test/org/apache/lucene/index/TestAtomicUpdate.java | 1 +
.../test/org/apache/lucene/index/TestCodecHoldsOpenFiles.java | 1 +
.../org/apache/lucene/index/TestIndexWriterExceptions2.java | 1 +
.../org/apache/lucene/index/TestIndexWriterOnDiskFull.java | 2 ++
.../test/org/apache/lucene/index/TestIndexWriterOnVMError.java | 1 +
6 files changed, 12 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/07c9b2bd/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java b/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java
index ca6808f..778a7eb 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java
@@ -172,6 +172,7 @@ public class TestAddIndexes extends LuceneTestCase {
doc.add(newStringField("id", "" + (i % 10), Field.Store.NO));
doc.add(newTextField("content", "bbb " + i, Field.Store.NO));
doc.add(new IntPoint("doc", i));
+ doc.add(new IntPoint("doc2d", i, i));
doc.add(new NumericDocValuesField("dv", i));
writer.updateDocument(new Term("id", "" + (i%10)), doc);
}
@@ -207,6 +208,7 @@ public class TestAddIndexes extends LuceneTestCase {
doc.add(newStringField("id", "" + (i % 10), Field.Store.NO));
doc.add(newTextField("content", "bbb " + i, Field.Store.NO));
doc.add(new IntPoint("doc", i));
+ doc.add(new IntPoint("doc2d", i, i));
doc.add(new NumericDocValuesField("dv", i));
writer.updateDocument(new Term("id", "" + (i%10)), doc);
}
@@ -245,6 +247,7 @@ public class TestAddIndexes extends LuceneTestCase {
doc.add(newStringField("id", "" + (i % 10), Field.Store.NO));
doc.add(newTextField("content", "bbb " + i, Field.Store.NO));
doc.add(new IntPoint("doc", i));
+ doc.add(new IntPoint("doc2d", i, i));
doc.add(new NumericDocValuesField("dv", i));
writer.updateDocument(new Term("id", "" + (i%10)), doc);
}
@@ -519,6 +522,7 @@ public class TestAddIndexes extends LuceneTestCase {
Document doc = new Document();
doc.add(newTextField("content", "aaa", Field.Store.NO));
doc.add(new IntPoint("doc", i));
+ doc.add(new IntPoint("doc2d", i, i));
doc.add(new NumericDocValuesField("dv", i));
writer.addDocument(doc);
}
@@ -529,6 +533,7 @@ public class TestAddIndexes extends LuceneTestCase {
Document doc = new Document();
doc.add(newTextField("content", "bbb", Field.Store.NO));
doc.add(new IntPoint("doc", i));
+ doc.add(new IntPoint("doc2d", i, i));
doc.add(new NumericDocValuesField("dv", i));
writer.addDocument(doc);
}
@@ -1014,6 +1019,7 @@ public class TestAddIndexes extends LuceneTestCase {
doc.add(newTextField("content", "aaa", Field.Store.NO));
doc.add(newTextField("id", "" + (docStart + i), Field.Store.YES));
doc.add(new IntPoint("doc", i));
+ doc.add(new IntPoint("doc2d", i, i));
doc.add(new NumericDocValuesField("dv", i));
writer.addDocument(doc);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/07c9b2bd/lucene/core/src/test/org/apache/lucene/index/TestAtomicUpdate.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAtomicUpdate.java b/lucene/core/src/test/org/apache/lucene/index/TestAtomicUpdate.java
index 1eba37a..e801bcc 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestAtomicUpdate.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestAtomicUpdate.java
@@ -81,6 +81,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
d.add(new StringField("id", Integer.toString(i), Field.Store.YES));
d.add(new TextField("contents", English.intToEnglish(i+10*count), Field.Store.NO));
d.add(new IntPoint("doc", i));
+ d.add(new IntPoint("doc2d", i, i));
writer.updateDocument(new Term("id", Integer.toString(i)), d);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/07c9b2bd/lucene/core/src/test/org/apache/lucene/index/TestCodecHoldsOpenFiles.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCodecHoldsOpenFiles.java b/lucene/core/src/test/org/apache/lucene/index/TestCodecHoldsOpenFiles.java
index 0686048..8077545 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestCodecHoldsOpenFiles.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestCodecHoldsOpenFiles.java
@@ -38,6 +38,7 @@ public class TestCodecHoldsOpenFiles extends LuceneTestCase {
Document doc = new Document();
doc.add(newField("foo", "bar", TextField.TYPE_NOT_STORED));
doc.add(new IntPoint("doc", i));
+ doc.add(new IntPoint("doc2d", i, i));
doc.add(new NumericDocValuesField("dv", i));
w.addDocument(doc);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/07c9b2bd/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions2.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions2.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions2.java
index 649170a..4c4c496 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions2.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions2.java
@@ -127,6 +127,7 @@ public class TestIndexWriterExceptions2 extends LuceneTestCase {
ft.setStoreTermVectors(true);
doc.add(newField("text_vectors", TestUtil.randomAnalysisString(random(), 6, true), ft));
doc.add(new IntPoint("point", random().nextInt()));
+ doc.add(new IntPoint("point2d", random().nextInt(), random().nextInt()));
if (random().nextInt(10) > 0) {
// single doc
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/07c9b2bd/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
index 8163f8e..f75230c 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
@@ -574,6 +574,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
doc.add(newTextField("content", "aaa", Field.Store.NO));
doc.add(new NumericDocValuesField("numericdv", 1));
doc.add(new IntPoint("point", 1));
+ doc.add(new IntPoint("point2d", 1, 1));
writer.addDocument(doc);
}
@@ -583,6 +584,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
doc.add(newTextField("id", "" + index, Field.Store.NO));
doc.add(new NumericDocValuesField("numericdv", 1));
doc.add(new IntPoint("point", 1));
+ doc.add(new IntPoint("point2d", 1, 1));
writer.addDocument(doc);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/07c9b2bd/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnVMError.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnVMError.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnVMError.java
index 6c62df2..91da346 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnVMError.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnVMError.java
@@ -126,6 +126,7 @@ public class TestIndexWriterOnVMError extends LuceneTestCase {
ft.setStoreTermVectors(true);
doc.add(newField("text_vectors", TestUtil.randomAnalysisString(random(), 6, true), ft));
doc.add(new IntPoint("point", random().nextInt()));
+ doc.add(new IntPoint("point2d", random().nextInt(), random().nextInt()));
if (random().nextInt(10) > 0) {
// single doc
[02/21] lucene-solr git commit: revert
Posted by cp...@apache.org.
revert
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3141c697
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3141c697
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3141c697
Branch: refs/heads/master-solr-8621
Commit: 3141c697e5f85aeabf63513c7c510cd3862bbc81
Parents: 27c28b5
Author: Mike McCandless <mi...@apache.org>
Authored: Fri Jan 29 09:18:31 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Fri Jan 29 09:18:31 2016 -0500
----------------------------------------------------------------------
.../java/org/apache/lucene/index/BasePointFormatTestCase.java | 6 ------
1 file changed, 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3141c697/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java
index 20bdfb5..3b19530 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BasePointFormatTestCase.java
@@ -919,10 +919,4 @@ public abstract class BasePointFormatTestCase extends BaseIndexFileFormatTestCas
//dir = FSDirectory.open(createTempDir());
return dir;
}
-
- @Override
- public void testMergeStability() {
- // suppress this test from base class: merges for BKD trees are not stable because the tree created by merge will have a different
- // structure than the tree created by adding points separately
- }
}
[08/21] lucene-solr git commit: SOLR-8618 refactored a couple of
methods out as protected
Posted by cp...@apache.org.
SOLR-8618 refactored a couple of methods out as protected
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/13c9912b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/13c9912b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/13c9912b
Branch: refs/heads/master-solr-8621
Commit: 13c9912b3c4698595db8d07fcbc09fe062ee5404
Parents: 105c6df
Author: Noble Paul <no...@apache.org>
Authored: Sun Jan 31 18:44:39 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Sun Jan 31 18:44:39 2016 +0530
----------------------------------------------------------------------
.../solr/handler/dataimport/JdbcDataSource.java | 70 +++++++++++++-------
1 file changed, 46 insertions(+), 24 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/13c9912b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
index 790d502..d485651 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
@@ -294,47 +294,65 @@ public class JdbcDataSource extends
return colNames;
}
- private class ResultSetIterator {
- ResultSet resultSet;
+ protected class ResultSetIterator {
+ private ResultSet resultSet;
- Statement stmt = null;
+ private Statement stmt = null;
- List<String> colNames;
-
- Iterator<Map<String, Object>> rSetIterator;
+
+ private Iterator<Map<String, Object>> rSetIterator;
public ResultSetIterator(String query) {
+ final List<String> colNames;
try {
Connection c = getConnection();
- stmt = c.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
- stmt.setFetchSize(batchSize);
- stmt.setMaxRows(maxRows);
+ stmt = createStatement(c);
LOG.debug("Executing SQL: " + query);
long start = System.nanoTime();
- if (stmt.execute(query)) {
- resultSet = stmt.getResultSet();
- }
+ resultSet = executeStatement(stmt, query);
LOG.trace("Time taken for sql :"
+ TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS));
colNames = readFieldNames(resultSet.getMetaData());
} catch (Exception e) {
wrapAndThrow(SEVERE, e, "Unable to execute query: " + query);
+ return;
}
if (resultSet == null) {
rSetIterator = new ArrayList<Map<String, Object>>().iterator();
return;
}
- rSetIterator = new Iterator<Map<String, Object>>() {
+ rSetIterator = createIterator(stmt, resultSet, convertType, colNames, fieldNameVsType);
+ }
+
+
+ protected Statement createStatement(Connection c) throws SQLException {
+ Statement statement = c.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+ statement.setFetchSize(batchSize);
+ statement.setMaxRows(maxRows);
+ return statement;
+ }
+
+ protected ResultSet executeStatement(Statement statement, String query) throws SQLException {
+ if (statement.execute(query)) {
+ return statement.getResultSet();
+ }
+ return null;
+ }
+
+
+ protected Iterator<Map<String,Object>> createIterator(Statement stmt, ResultSet resultSet, boolean convertType,
+ List<String> colNames, Map<String,Integer> fieldNameVsType) {
+ return new Iterator<Map<String,Object>>() {
@Override
public boolean hasNext() {
- return hasnext();
+ return hasnext(resultSet, stmt);
}
@Override
- public Map<String, Object> next() {
- return getARow();
+ public Map<String,Object> next() {
+ return getARow(resultSet, convertType, colNames, fieldNameVsType);
}
@Override
@@ -342,12 +360,11 @@ public class JdbcDataSource extends
}
};
}
+
+
- private Iterator<Map<String, Object>> getIterator() {
- return rSetIterator;
- }
-
- private Map<String, Object> getARow() {
+ protected Map<String,Object> getARow(ResultSet resultSet, boolean convertType, List<String> colNames,
+ Map<String,Integer> fieldNameVsType) {
if (resultSet == null)
return null;
Map<String, Object> result = new HashMap<>();
@@ -402,7 +419,7 @@ public class JdbcDataSource extends
return result;
}
- private boolean hasnext() {
+ protected boolean hasnext(ResultSet resultSet, Statement stmt) {
if (resultSet == null)
return false;
try {
@@ -419,7 +436,7 @@ public class JdbcDataSource extends
}
}
- private void close() {
+ protected void close() {
try {
if (resultSet != null)
resultSet.close();
@@ -432,9 +449,14 @@ public class JdbcDataSource extends
stmt = null;
}
}
+
+ protected final Iterator<Map<String,Object>> getIterator() {
+ return rSetIterator;
+ }
+
}
- Connection getConnection() throws Exception {
+ protected Connection getConnection() throws Exception {
long currTime = System.nanoTime();
if (currTime - connLastUsed > CONN_TIME_OUT) {
synchronized (this) {
[10/21] lucene-solr git commit: SOLR-8285: Ensure the /export handler
works with NULL field values
Posted by cp...@apache.org.
SOLR-8285: Ensure the /export handler works with NULL field values
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e20820a0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e20820a0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e20820a0
Branch: refs/heads/master-solr-8621
Commit: e20820a04d1aff5ccde64b7183308454bda62fef
Parents: a4d1586
Author: jbernste <jb...@apache.org>
Authored: Sun Jan 31 13:18:48 2016 -0500
Committer: jbernste <jb...@apache.org>
Committed: Sun Jan 31 13:21:10 2016 -0500
----------------------------------------------------------------------
.../solr/response/SortingResponseWriter.java | 100 +++++++++++++------
.../response/TestSortingResponseWriter.java | 5 +-
.../solrj/io/stream/StreamExpressionTest.java | 63 ++++++++++++
3 files changed, 135 insertions(+), 33 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e20820a0/solr/core/src/java/org/apache/solr/response/SortingResponseWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/SortingResponseWriter.java b/solr/core/src/java/org/apache/solr/response/SortingResponseWriter.java
index 4b9f89f..6b6d36c 100644
--- a/solr/core/src/java/org/apache/solr/response/SortingResponseWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/SortingResponseWriter.java
@@ -22,7 +22,9 @@ import java.io.PrintWriter;
import java.io.Writer;
import java.lang.invoke.MethodHandles;
import java.util.List;
+import java.util.ArrayList;
+import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues;
@@ -239,13 +241,11 @@ public class SortingResponseWriter implements QueryResponseWriter {
FixedBitSet set = sets[ord];
set.clear(sortDoc.docId);
LeafReaderContext context = leaves.get(ord);
- boolean needsComma = false;
+ int fieldIndex = 0;
for(FieldWriter fieldWriter : fieldWriters) {
- if(needsComma) {
- out.write(',');
+ if(fieldWriter.write(sortDoc.docId, context.reader(), out, fieldIndex)){
+ ++fieldIndex;
}
- fieldWriter.write(sortDoc.docId, context.reader(), out);
- needsComma = true;
}
}
@@ -827,7 +827,7 @@ public class SortingResponseWriter implements QueryResponseWriter {
}
public void setNextReader(LeafReaderContext context) throws IOException {
- this.vals = context.reader().getNumericDocValues(field);
+ this.vals = DocValues.getNumeric(context.reader(), field);
}
public void setCurrentValue(int docId) {
@@ -905,7 +905,7 @@ public class SortingResponseWriter implements QueryResponseWriter {
}
public void setNextReader(LeafReaderContext context) throws IOException {
- this.vals = context.reader().getNumericDocValues(field);
+ this.vals = DocValues.getNumeric(context.reader(), field);
}
public void setCurrentValue(int docId) {
@@ -984,7 +984,7 @@ public class SortingResponseWriter implements QueryResponseWriter {
}
public void setNextReader(LeafReaderContext context) throws IOException {
- this.vals = context.reader().getNumericDocValues(field);
+ this.vals = DocValues.getNumeric(context.reader(), field);
}
public void setCurrentValue(int docId) {
@@ -1061,7 +1061,7 @@ public class SortingResponseWriter implements QueryResponseWriter {
}
public void setNextReader(LeafReaderContext context) throws IOException {
- this.vals = context.reader().getNumericDocValues(field);
+ this.vals = DocValues.getNumeric(context.reader(), field);
}
public void setCurrentValue(int docId) {
@@ -1193,7 +1193,7 @@ public class SortingResponseWriter implements QueryResponseWriter {
}
protected abstract class FieldWriter {
- public abstract void write(int docId, LeafReader reader, Writer out) throws IOException;
+ public abstract boolean write(int docId, LeafReader reader, Writer out, int fieldIndex) throws IOException;
}
class IntFieldWriter extends FieldWriter {
@@ -1203,14 +1203,18 @@ public class SortingResponseWriter implements QueryResponseWriter {
this.field = field;
}
- public void write(int docId, LeafReader reader, Writer out) throws IOException {
- NumericDocValues vals = reader.getNumericDocValues(this.field);
+ public boolean write(int docId, LeafReader reader, Writer out, int fieldIndex) throws IOException {
+ NumericDocValues vals = DocValues.getNumeric(reader, this.field);
int val = (int)vals.get(docId);
- out.write('"');
- out.write(this.field);
- out.write('"');
- out.write(':');
- out.write(Integer.toString(val));
+ if(fieldIndex>0) {
+ out.write(',');
+ }
+ out.write('"');
+ out.write(this.field);
+ out.write('"');
+ out.write(':');
+ out.write(Integer.toString(val));
+ return true;
}
}
@@ -1225,18 +1229,30 @@ public class SortingResponseWriter implements QueryResponseWriter {
this.fieldType = fieldType;
this.numeric = numeric;
}
-
- public void write(int docId, LeafReader reader, Writer out) throws IOException {
- SortedSetDocValues vals = reader.getSortedSetDocValues(this.field);
+ public boolean write(int docId, LeafReader reader, Writer out, int fieldIndex) throws IOException {
+ SortedSetDocValues vals = DocValues.getSortedSet(reader, this.field);
vals.setDocument(docId);
+ List<Long> ords = new ArrayList();
+ long o = -1;
+ while((o = vals.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
+ ords.add(o);
+ }
+
+ if(ords.size()== 0) {
+ return false;
+ }
+
+
+ if(fieldIndex>0) {
+ out.write(',');
+ }
out.write('"');
out.write(this.field);
out.write('"');
out.write(':');
out.write('[');
int v = 0;
- long ord = -1;
- while((ord = vals.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
+ for(long ord : ords) {
BytesRef ref = vals.lookupOrd(ord);
fieldType.indexedToReadable(ref, cref);
if(v > 0) {
@@ -1255,6 +1271,7 @@ public class SortingResponseWriter implements QueryResponseWriter {
++v;
}
out.write("]");
+ return true;
}
}
@@ -1265,14 +1282,18 @@ public class SortingResponseWriter implements QueryResponseWriter {
this.field = field;
}
- public void write(int docId, LeafReader reader, Writer out) throws IOException {
- NumericDocValues vals = reader.getNumericDocValues(this.field);
+ public boolean write(int docId, LeafReader reader, Writer out, int fieldIndex) throws IOException {
+ NumericDocValues vals = DocValues.getNumeric(reader, this.field);
long val = vals.get(docId);
+ if(fieldIndex > 0) {
+ out.write(',');
+ }
out.write('"');
out.write(this.field);
out.write('"');
out.write(':');
out.write(Long.toString(val));
+ return true;
}
}
@@ -1283,14 +1304,18 @@ public class SortingResponseWriter implements QueryResponseWriter {
this.field = field;
}
- public void write(int docId, LeafReader reader, Writer out) throws IOException {
- NumericDocValues vals = reader.getNumericDocValues(this.field);
+ public boolean write(int docId, LeafReader reader, Writer out, int fieldIndex) throws IOException {
+ NumericDocValues vals = DocValues.getNumeric(reader, this.field);
int val = (int)vals.get(docId);
+ if(fieldIndex > 0) {
+ out.write(',');
+ }
out.write('"');
out.write(this.field);
out.write('"');
out.write(':');
out.write(Float.toString(Float.intBitsToFloat(val)));
+ return true;
}
}
@@ -1301,14 +1326,18 @@ public class SortingResponseWriter implements QueryResponseWriter {
this.field = field;
}
- public void write(int docId, LeafReader reader, Writer out) throws IOException {
- NumericDocValues vals = reader.getNumericDocValues(this.field);
+ public boolean write(int docId, LeafReader reader, Writer out, int fieldIndex) throws IOException {
+ NumericDocValues vals = DocValues.getNumeric(reader, this.field);
+ if(fieldIndex > 0) {
+ out.write(',');
+ }
long val = vals.get(docId);
out.write('"');
out.write(this.field);
out.write('"');
out.write(':');
out.write(Double.toString(Double.longBitsToDouble(val)));
+ return true;
}
}
@@ -1322,10 +1351,18 @@ public class SortingResponseWriter implements QueryResponseWriter {
this.fieldType = fieldType;
}
- public void write(int docId, LeafReader reader, Writer out) throws IOException {
- SortedDocValues vals = reader.getSortedDocValues(this.field);
- BytesRef ref = vals.get(docId);
+ public boolean write(int docId, LeafReader reader, Writer out, int fieldIndex) throws IOException {
+ SortedDocValues vals = DocValues.getSorted(reader, this.field);
+ int ord = vals.getOrd(docId);
+ if(ord == -1) {
+ return false;
+ }
+
+ BytesRef ref = vals.lookupOrd(ord);
fieldType.indexedToReadable(ref, cref);
+ if(fieldIndex > 0) {
+ out.write(',');
+ }
out.write('"');
out.write(this.field);
out.write('"');
@@ -1333,6 +1370,7 @@ public class SortingResponseWriter implements QueryResponseWriter {
out.write('"');
writeStr(cref.toString(), out);
out.write('"');
+ return true;
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e20820a0/solr/core/src/test/org/apache/solr/response/TestSortingResponseWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/response/TestSortingResponseWriter.java b/solr/core/src/test/org/apache/solr/response/TestSortingResponseWriter.java
index 0e7f9b1..bb432ec 100644
--- a/solr/core/src/test/org/apache/solr/response/TestSortingResponseWriter.java
+++ b/solr/core/src/test/org/apache/solr/response/TestSortingResponseWriter.java
@@ -114,7 +114,8 @@ public class TestSortingResponseWriter extends SolrTestCaseJ4 {
//Test null value string:
s = h.query(req("q", "id:7", "qt", "/export", "fl", "floatdv,intdv,stringdv,longdv,doubledv", "sort", "intdv asc"));
- assertEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"floatdv\":2.1,\"intdv\":7,\"stringdv\":\"\",\"longdv\":323223232323,\"doubledv\":2344.345}]}}");
+
+ assertEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"floatdv\":2.1,\"intdv\":7,\"longdv\":323223232323,\"doubledv\":2344.345}]}}");
//Test multiValue docValues output
s = h.query(req("q", "id:1", "qt", "/export", "fl", "intdv_m,floatdv_m,doubledv_m,longdv_m,stringdv_m", "sort", "intdv asc"));
@@ -122,7 +123,7 @@ public class TestSortingResponseWriter extends SolrTestCaseJ4 {
//Test multiValues docValues output with nulls
s = h.query(req("q", "id:7", "qt", "/export", "fl", "intdv_m,floatdv_m,doubledv_m,longdv_m,stringdv_m", "sort", "intdv asc"));
- assertEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"intdv_m\":[],\"floatdv_m\":[123.321,345.123],\"doubledv_m\":[3444.222,23232.2],\"longdv_m\":[343332,43434343434],\"stringdv_m\":[]}]}}");
+ assertEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"floatdv_m\":[123.321,345.123],\"doubledv_m\":[3444.222,23232.2],\"longdv_m\":[343332,43434343434]}]}}");
//Test single sort param is working
s = h.query(req("q", "id:(1 2)", "qt", "/export", "fl", "intdv", "sort", "intdv desc"));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e20820a0/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index 69ef6e8..33ee767 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -136,6 +136,7 @@ public class StreamExpressionTest extends AbstractFullDistribZkTestBase {
testUniqueStream();
testRollupStream();
testStatsStream();
+ testNulls();
testDaemonStream();
testParallelUniqueStream();
testParallelReducerStream();
@@ -303,6 +304,68 @@ public class StreamExpressionTest extends AbstractFullDistribZkTestBase {
del("*:*");
commit();
}
+
+
+ private void testNulls() throws Exception {
+
+ indexr(id, "0", "a_i", "1", "a_f", "0", "s_multi", "aaa", "s_multi", "bbb", "i_multi", "100", "i_multi", "200");
+ indexr(id, "2", "a_s", "hello2", "a_i", "3", "a_f", "0");
+ indexr(id, "3", "a_s", "hello3", "a_i", "4", "a_f", "3");
+ indexr(id, "4", "a_s", "hello4", "a_f", "4");
+ indexr(id, "1", "a_s", "hello1", "a_i", "2", "a_f", "1");
+ commit();
+
+ StreamExpression expression;
+ TupleStream stream;
+ List<Tuple> tuples;
+ Tuple tuple;
+ StreamFactory factory = new StreamFactory()
+ .withCollectionZkHost("collection1", zkServer.getZkAddress())
+ .withFunctionName("search", CloudSolrStream.class);
+ // Basic test
+ expression = StreamExpressionParser.parse("search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f, s_multi, i_multi\", qt=\"/export\", sort=\"a_i asc\")");
+ stream = new CloudSolrStream(expression, factory);
+ tuples = getTuples(stream);
+
+ assert(tuples.size() == 5);
+ assertOrder(tuples, 4, 0, 1, 2, 3);
+
+ tuple = tuples.get(0);
+ assertTrue("hello4".equals(tuple.getString("a_s")));
+ assertNull(tuple.get("s_multi"));
+ assertNull(tuple.get("i_multi"));
+ assertEquals(0L, (long)tuple.getLong("a_i"));
+
+
+ tuple = tuples.get(1);
+ assertNull(tuple.get("a_s"));
+ List<String> strings = tuple.getStrings("s_multi");
+ assertNotNull(strings);
+ assertEquals("aaa", strings.get(0));
+ assertEquals("bbb", strings.get(1));
+ List<Long> longs = tuple.getLongs("i_multi");
+ assertNotNull(longs);
+
+ //test sort (asc) with null string field. Null should sort to the top.
+ expression = StreamExpressionParser.parse("search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f, s_multi, i_multi\", qt=\"/export\", sort=\"a_s asc\")");
+ stream = new CloudSolrStream(expression, factory);
+ tuples = getTuples(stream);
+
+ assert(tuples.size() == 5);
+ assertOrder(tuples, 0, 1, 2, 3, 4);
+
+ //test sort(desc) with null string field. Null should sort to the bottom.
+ expression = StreamExpressionParser.parse("search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f, s_multi, i_multi\", qt=\"/export\", sort=\"a_s desc\")");
+ stream = new CloudSolrStream(expression, factory);
+ tuples = getTuples(stream);
+
+ assert(tuples.size() == 5);
+ assertOrder(tuples, 4, 3, 2, 1, 0);
+
+ del("*:*");
+ commit();
+ }
+
private void testMergeStream() throws Exception {
[15/21] lucene-solr git commit: SOLR-8600: add & use
ReRankQParserPlugin parameter [default] constants,
changed ReRankQuery.toString to use StringBuilder.
Posted by cp...@apache.org.
SOLR-8600: add & use ReRankQParserPlugin parameter [default] constants, changed ReRankQuery.toString to use StringBuilder.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8e27c14a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8e27c14a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8e27c14a
Branch: refs/heads/master-solr-8621
Commit: 8e27c14aeda817df4e6a6394595104a47f6f8fac
Parents: 15fed60
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Feb 1 12:00:39 2016 +0000
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Feb 1 12:00:39 2016 +0000
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +
.../apache/solr/search/ReRankQParserPlugin.java | 27 ++++++---
.../apache/solr/search/QueryEqualityTest.java | 13 ++--
.../solr/search/TestReRankQParserPlugin.java | 63 ++++++++++++--------
4 files changed, 67 insertions(+), 39 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8e27c14a/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index adfc8d7..9b08555 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -585,6 +585,9 @@ Other Changes
* SOLR-7968: Make QueryComponent more extensible. (Markus Jelsma via David Smiley)
+* SOLR-8600: add & use ReRankQParserPlugin parameter [default] constants,
+ changed ReRankQuery.toString to use StringBuilder. (Christine Poerschke)
+
================== 5.4.1 ==================
Bug Fixes
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8e27c14a/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
index 2b23d53..21cca3e 100644
--- a/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
@@ -64,6 +64,14 @@ public class ReRankQParserPlugin extends QParserPlugin {
public static final String NAME = "rerank";
private static Query defaultQuery = new MatchAllDocsQuery();
+ public static final String RERANK_QUERY = "reRankQuery";
+
+ public static final String RERANK_DOCS = "reRankDocs";
+ public static final int RERANK_DOCS_DEFAULT = 200;
+
+ public static final String RERANK_WEIGHT = "reRankWeight";
+ public static final double RERANK_WEIGHT_DEFAULT = 2.0d;
+
public QParser createParser(String query, SolrParams localParams, SolrParams params, SolrQueryRequest req) {
return new ReRankQParser(query, localParams, params, req);
}
@@ -75,17 +83,17 @@ public class ReRankQParserPlugin extends QParserPlugin {
}
public Query parse() throws SyntaxError {
- String reRankQueryString = localParams.get("reRankQuery");
+ String reRankQueryString = localParams.get(RERANK_QUERY);
if (reRankQueryString == null || reRankQueryString.trim().length() == 0) {
- throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "reRankQuery parameter is mandatory");
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, RERANK_QUERY+" parameter is mandatory");
}
QParser reRankParser = QParser.getParser(reRankQueryString, null, req);
Query reRankQuery = reRankParser.parse();
- int reRankDocs = localParams.getInt("reRankDocs", 200);
+ int reRankDocs = localParams.getInt(RERANK_DOCS, RERANK_DOCS_DEFAULT);
reRankDocs = Math.max(1, reRankDocs); //
- double reRankWeight = localParams.getDouble("reRankWeight",2.0d);
+ double reRankWeight = localParams.getDouble(RERANK_WEIGHT, RERANK_WEIGHT_DEFAULT);
int start = params.getInt(CommonParams.START,CommonParams.START_DEFAULT);
int rows = params.getInt(CommonParams.ROWS,CommonParams.ROWS_DEFAULT);
@@ -150,10 +158,13 @@ public class ReRankQParserPlugin extends QParserPlugin {
@Override
public String toString(String s) {
- return "{!rerank mainQuery='"+mainQuery.toString()+
- "' reRankQuery='"+reRankQuery.toString()+
- "' reRankDocs="+reRankDocs+
- " reRankWeight="+reRankWeight+"}";
+ final StringBuilder sb = new StringBuilder(100); // default initialCapacity of 16 won't be enough
+ sb.append("{!").append(NAME);
+ sb.append(" mainQuery='").append(mainQuery.toString()).append("' ");
+ sb.append(RERANK_QUERY).append("='").append(reRankQuery.toString()).append("' ");
+ sb.append(RERANK_DOCS).append('=').append(reRankDocs).append(' ');
+ sb.append(RERANK_WEIGHT).append('=').append(reRankWeight).append('}');
+ return sb.toString();
}
public Query rewrite(IndexReader reader) throws IOException {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8e27c14a/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java b/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java
index 75ce44c..08fa329 100644
--- a/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java
+++ b/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java
@@ -124,6 +124,7 @@ public class QueryEqualityTest extends SolrTestCaseJ4 {
}
public void testReRankQuery() throws Exception {
+ final String defType = ReRankQParserPlugin.NAME;
SolrQueryRequest req = req("q", "*:*",
"rqq", "{!edismax}hello",
"rdocs", "20",
@@ -131,9 +132,9 @@ public class QueryEqualityTest extends SolrTestCaseJ4 {
"rows", "10",
"start", "0");
try {
- assertQueryEquals("rerank", req,
- "{!rerank reRankQuery=$rqq reRankDocs=$rdocs reRankWeight=$rweight}",
- "{!rerank reRankQuery=$rqq reRankDocs=20 reRankWeight=2}");
+ assertQueryEquals(defType, req,
+ "{!"+defType+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=$rdocs "+ReRankQParserPlugin.RERANK_WEIGHT+"=$rweight}",
+ "{!"+defType+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=20 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
} finally {
req.close();
@@ -147,9 +148,9 @@ public class QueryEqualityTest extends SolrTestCaseJ4 {
"rows", "100",
"start", "50");
try {
- assertQueryEquals("rerank", req,
- "{!rerank mainQuery=$qq reRankQuery=$rqq reRankDocs=$rdocs reRankWeight=$rweight}",
- "{!rerank mainQuery=$qq reRankQuery=$rqq reRankDocs=20 reRankWeight=2}");
+ assertQueryEquals(defType, req,
+ "{!"+defType+" mainQuery=$qq "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=$rdocs "+ReRankQParserPlugin.RERANK_WEIGHT+"=$rweight}",
+ "{!"+defType+" mainQuery=$qq "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=20 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
} finally {
req.close();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8e27c14a/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java
index 40c60c8..9efbad8 100644
--- a/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java
@@ -45,6 +45,19 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
}
@Test
+ public void testReRankQParserPluginConstants() throws Exception {
+ assertEquals(ReRankQParserPlugin.NAME, "rerank");
+
+ assertEquals(ReRankQParserPlugin.RERANK_QUERY, "reRankQuery");
+
+ assertEquals(ReRankQParserPlugin.RERANK_DOCS, "reRankDocs");
+ assertEquals(ReRankQParserPlugin.RERANK_DOCS_DEFAULT, 200);
+
+ assertEquals(ReRankQParserPlugin.RERANK_WEIGHT, "reRankWeight");
+ assertEquals(ReRankQParserPlugin.RERANK_WEIGHT_DEFAULT, 2.0d, 0.0d);
+ }
+
+ @Test
public void testReRankQueries() throws Exception {
assertU(delQ("*:*"));
@@ -73,7 +86,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
ModifiableSolrParams params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=200}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=200}");
params.add("q", "term_s:YYYY");
params.add("rqq", "{!edismax bf=$bff}*:*");
params.add("bff", "field(test_ti)");
@@ -89,7 +102,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
);
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=6}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "{!edismax bq=$bqq2}*:*");
@@ -109,7 +122,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test with sort by score.
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=6}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "{!edismax bq=$bqq2}*:*");
@@ -130,7 +143,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test with compound sort.
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=6}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "{!edismax bq=$bqq2}*:*");
@@ -152,7 +165,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test with elevation
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=6 reRankWeight=50}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6 "+ReRankQParserPlugin.RERANK_WEIGHT+"=50}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "{!edismax bq=$bqq2}*:*");
@@ -175,7 +188,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test TermQuery rqq
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=6 reRankWeight=2}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "test_ti:50^1000");
@@ -195,7 +208,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test Elevation
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=6 reRankWeight=2}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "test_ti:50^1000");
@@ -217,7 +230,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test Elevation swapped
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=6 reRankWeight=2}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "test_ti:50^1000");
@@ -240,7 +253,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=4 reRankWeight=2}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=4 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "test_ti:50^1000");
@@ -261,7 +274,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test Elevation with start beyond the rerank docs
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=3 reRankWeight=2}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=3 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "test_ti:50^1000");
@@ -278,7 +291,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test Elevation with zero results
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=3 reRankWeight=2}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=3 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
params.add("q", "{!edismax bq=$bqq1}nada");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "test_ti:50^1000");
@@ -294,7 +307,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Pass in reRankDocs lower then the length being collected.
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=1 reRankWeight=2}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=1 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "test_ti:50^1000");
@@ -312,7 +325,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
);
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=0 reRankWeight=2}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=0 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "test_ti:50^1000");
@@ -330,7 +343,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
);
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=2 reRankWeight=2}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=2 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "test_ti:4^1000");
@@ -349,7 +362,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test reRankWeight of 0, reranking will have no effect.
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=6 reRankWeight=0}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6 "+ReRankQParserPlugin.RERANK_WEIGHT+"=0}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "test_ti:50^1000");
@@ -374,7 +387,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test range query
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=6}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6}");
params.add("q", "test_ti:[0 TO 2000]");
params.add("rqq", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("fl", "id,score");
@@ -400,7 +413,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Run same query and see if it was cached. This tests the query result cache hit with rewritten queries
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=6}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6}");
params.add("q", "test_ti:[0 TO 2000]");
params.add("rqq", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("fl", "id,score");
@@ -424,7 +437,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test range query embedded in larger query
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=6}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6}");
// function query for predictible scores (relative to id) independent of similarity
params.add("q", "{!func}id");
// constant score for each clause (unique per doc) for predictible scores independent of similarity
@@ -446,7 +459,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test with start beyond reRankDocs
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=3 reRankWeight=2}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=3 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
params.add("q", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60");
params.add("rqq", "id:1^1000");
params.add("fl", "id,score");
@@ -462,7 +475,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test ReRankDocs > docs returned
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=6 reRankWeight=2}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
params.add("q", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50");
params.add("rqq", "id:1^1000");
params.add("fl", "id,score");
@@ -477,7 +490,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test with zero results
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=3 reRankWeight=2}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=3 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
params.add("q", "term_s:NNNN");
params.add("rqq", "id:1^1000");
params.add("fl", "id,score");
@@ -531,7 +544,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
ModifiableSolrParams params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=11 reRankWeight=2}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=11 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60 id:7^70 id:8^80 id:9^90 id:10^100 id:11^110");
params.add("rqq", "test_ti:50^1000");
@@ -546,7 +559,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
//Test Elevation
params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=11 reRankWeight=2}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=11 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}");
params.add("q", "{!edismax bq=$bqq1}*:*");
params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60 id:7^70 id:8^80 id:9^90 id:10^100 id:11^110");
params.add("rqq", "test_ti:50^1000");
@@ -576,14 +589,14 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
ModifiableSolrParams params = new ModifiableSolrParams();
- params.add("rq", "{!rerank reRankQuery=$rqq reRankDocs=200}");
+ params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=200}");
params.add("q", "term_s:YYYY");
params.add("start", "0");
params.add("rows", "2");
try {
h.query(req(params));
- fail("A syntax error should be thrown when reRankQuery parameter is not specified");
+ fail("A syntax error should be thrown when "+ReRankQParserPlugin.RERANK_QUERY+" parameter is not specified");
} catch (SolrException e) {
assertTrue(e.code() == SolrException.ErrorCode.BAD_REQUEST.code);
}
[04/21] lucene-solr git commit: SOLR-8532: GraphQuery don't collect
edges at maxDepth level
Posted by cp...@apache.org.
SOLR-8532: GraphQuery don't collect edges at maxDepth level
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e6db8ba2
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e6db8ba2
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e6db8ba2
Branch: refs/heads/master-solr-8621
Commit: e6db8ba2149e9733b7ca4d19a90ff9a36c75df1e
Parents: c403083
Author: yonik <yo...@apache.org>
Authored: Fri Jan 29 10:59:49 2016 -0500
Committer: yonik <yo...@apache.org>
Committed: Fri Jan 29 10:59:49 2016 -0500
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +
.../org/apache/solr/search/join/GraphQuery.java | 89 +++++++++++---------
.../solr/search/join/GraphTermsCollector.java | 2 +-
.../apache/solr/search/join/GraphQueryTest.java | 23 +++++
4 files changed, 78 insertions(+), 39 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e6db8ba2/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 5ff042f..f5c88a3 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -172,6 +172,9 @@ Optimizations
count. Also includes change to move to the next non-zero term value when selecting a segment
position. (Keith Laban, Steve Bower, Dennis Gove)
+* SOLR-8532: Optimize GraphQuery when maxDepth is set by not collecting edges at the maxDepth level.
+ (Kevin Watters via yonik)
+
Other Changes
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e6db8ba2/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java b/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java
index 5f9bfd2..a31568a 100644
--- a/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java
+++ b/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java
@@ -135,8 +135,8 @@ public class GraphQuery extends Query {
SolrIndexSearcher fromSearcher;
private float queryNorm = 1.0F;
private float queryWeight = 1.0F;
- int frontierSize = 0;
- public int currentDepth = 0;
+ private int frontierSize = 0;
+ private int currentDepth = -1;
private Filter filter;
private DocSet resultSet;
@@ -177,69 +177,82 @@ public class GraphQuery extends Query {
* @throws IOException - if a sub search fails... maybe other cases too! :)
*/
private DocSet getDocSet() throws IOException {
- DocSet fromSet = null;
- FixedBitSet seedResultBits = null;
// Size that the bit set needs to be.
int capacity = fromSearcher.getRawReader().maxDoc();
// The bit set to contain the results that match the query.
FixedBitSet resultBits = new FixedBitSet(capacity);
- // The measure of how deep in the graph we have gone.
- currentDepth = 0;
+ // this holds the result at each level
+ BitDocSet fromSet = null;
+ // the root docs if we return root is false
+ FixedBitSet rootBits = null;
// the initial query for the frontier for the first query
Query frontierQuery = q;
// Find all documents in this graph that are leaf nodes to speed traversal
- // TODO: speed this up in the future with HAS_FIELD type queries
- BooleanQuery.Builder leafNodeQuery = new BooleanQuery.Builder();
- WildcardQuery edgeQuery = new WildcardQuery(new Term(toField, "*"));
- leafNodeQuery.add(edgeQuery, Occur.MUST_NOT);
- DocSet leafNodes = fromSearcher.getDocSet(leafNodeQuery.build());
+ DocSet leafNodes = resolveLeafNodes(toField);
// Start the breadth first graph traversal.
+
do {
- // Create the graph result collector for this level
- GraphTermsCollector graphResultCollector = new GraphTermsCollector(toField,capacity, resultBits, leafNodes);
- // traverse the level!
- fromSearcher.search(frontierQuery, graphResultCollector);
- // All edge ids on the frontier.
- BytesRefHash collectorTerms = graphResultCollector.getCollectorTerms();
- frontierSize = collectorTerms.size();
- // The resulting doc set from the frontier.
- fromSet = graphResultCollector.getDocSet();
- if (seedResultBits == null) {
- // grab a copy of the seed bits (these are the "rootNodes")
- seedResultBits = ((BitDocSet)fromSet).getBits().clone();
+ // Increment how far we have gone in the frontier.
+ currentDepth++;
+ // if we are at the max level we don't need the graph terms collector.
+ // TODO validate that the join case works properly.
+ if (maxDepth != -1 && currentDepth >= maxDepth) {
+ // if we've reached the max depth, don't worry about collecting edges.
+ fromSet = fromSearcher.getDocSetBits(frontierQuery);
+ // explicitly the frontier size is zero now so we can break
+ frontierSize = 0;
+ } else {
+ // when we're not at the max depth level, we need to collect edges
+ // Create the graph result collector for this level
+ GraphTermsCollector graphResultCollector = new GraphTermsCollector(toField,capacity, resultBits, leafNodes);
+ fromSearcher.search(frontierQuery, graphResultCollector);
+ fromSet = graphResultCollector.getDocSet();
+ // All edge ids on the frontier.
+ BytesRefHash collectorTerms = graphResultCollector.getCollectorTerms();
+ frontierSize = collectorTerms.size();
+ // The resulting doc set from the frontier.
+ FrontierQuery fq = buildFrontierQuery(collectorTerms, frontierSize);
+ if (fq == null) {
+ // in case we get null back, make sure we know we're done at this level.
+ frontierSize = 0;
+ } else {
+ frontierQuery = fq.getQuery();
+ frontierSize = fq.getFrontierSize();
+ }
}
- Integer fs = new Integer(frontierSize);
- FrontierQuery fq = buildFrontierQuery(collectorTerms, fs);
- if (fq == null) {
- // in case we get null back, make sure we know we're done at this level.
- fq = new FrontierQuery(null, 0);
+ if (currentDepth == 0 && !returnRoot) {
+ // grab a copy of the root bits but only if we need it.
+ rootBits = fromSet.getBits();
}
- frontierQuery = fq.getQuery();
- frontierSize = fq.getFrontierSize();
// Add the bits from this level to the result set.
- resultBits.or(((BitDocSet)fromSet).getBits());
- // Increment how far we have gone in the frontier.
- currentDepth++;
- // Break out if we have reached our max depth
- if (currentDepth >= maxDepth && maxDepth != -1) {
+ resultBits.or(fromSet.getBits());
+ // test if we discovered any new edges, if not , we're done.
+ if ((maxDepth != -1 && currentDepth >= maxDepth)) {
break;
}
- // test if we discovered any new edges, if not , we're done.
} while (frontierSize > 0);
// helper bit set operations on the final result set
if (!returnRoot) {
- resultBits.andNot(seedResultBits);
+ resultBits.andNot(rootBits);
}
+ // this is the final resulting filter.
BitDocSet resultSet = new BitDocSet(resultBits);
// If we only want to return leaf nodes do that here.
if (onlyLeafNodes) {
return resultSet.intersection(leafNodes);
} else {
- // create a doc set off the bits that we found.
return resultSet;
}
}
+ private DocSet resolveLeafNodes(String field) throws IOException {
+ BooleanQuery.Builder leafNodeQuery = new BooleanQuery.Builder();
+ WildcardQuery edgeQuery = new WildcardQuery(new Term(field, "*"));
+ leafNodeQuery.add(edgeQuery, Occur.MUST_NOT);
+ DocSet leafNodes = fromSearcher.getDocSet(leafNodeQuery.build());
+ return leafNodes;
+ }
+
/** Build an automaton to represent the frontier query */
private Automaton buildAutomaton(BytesRefHash termBytesHash) {
// need top pass a sorted set of terms to the autn builder (maybe a better way to avoid this?)
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e6db8ba2/solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java b/solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java
index 6af3694..389721e 100644
--- a/solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java
+++ b/solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java
@@ -108,7 +108,7 @@ class GraphTermsCollector extends SimpleCollector implements Collector {
numHits++;
}
- public DocSet getDocSet() {
+ public BitDocSet getDocSet() {
if (bits == null) {
// TODO: this shouldn't happen
bits = new FixedBitSet(maxDoc);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e6db8ba2/solr/core/src/test/org/apache/solr/search/join/GraphQueryTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/join/GraphQueryTest.java b/solr/core/src/test/org/apache/solr/search/join/GraphQueryTest.java
index 4385dcc..1f5de65 100644
--- a/solr/core/src/test/org/apache/solr/search/join/GraphQueryTest.java
+++ b/solr/core/src/test/org/apache/solr/search/join/GraphQueryTest.java
@@ -77,6 +77,29 @@ public class GraphQueryTest extends SolrTestCaseJ4 {
qr = createRequest(g4Query);
assertQ(qr,"//*[@numFound='2']");
+ String g5Query = "{!graph from=\"node_id\" to=\"edge_id\" returnRoot=\"true\" returnOnlyLeaf=\"false\" maxDepth=0}id:doc_8";
+ qr = createRequest(g5Query);
+ assertQ(qr,"//*[@numFound='1']");
+
+ String g6Query = "{!graph from=\"node_id\" to=\"edge_id\" returnRoot=\"true\" returnOnlyLeaf=\"false\" maxDepth=1}id:doc_8";
+ qr = createRequest(g6Query);
+ assertQ(qr,"//*[@numFound='3']");
+
+ String g7Query = "{!graph from=\"node_id\" to=\"edge_id\" returnRoot=\"false\" returnOnlyLeaf=\"false\" maxDepth=1}id:doc_8";
+ qr = createRequest(g7Query);
+ assertQ(qr,"//*[@numFound='2']");
+
+ String g8Query = "{!graph from=\"node_id\" to=\"edge_id\" returnRoot=\"false\" returnOnlyLeaf=\"true\" maxDepth=2}id:doc_8";
+ qr = createRequest(g8Query);
+ assertQ(qr,"//*[@numFound='1']");
+
+ String g9Query = "{!graph from=\"node_id\" to=\"edge_id\" maxDepth=1}id:doc_1";
+ qr = createRequest(g9Query);
+ assertQ(qr,"//*[@numFound='2']");
+
+ String g10Query = "{!graph from=\"node_id\" to=\"edge_id\" returnRoot=false maxDepth=1}id:doc_1";
+ qr = createRequest(g10Query);
+ assertQ(qr,"//*[@numFound='1']");
}
private SolrQueryRequest createRequest(String query) {
[17/21] lucene-solr git commit: LUCENE-7005: TieredMergePolicy tweaks
(>= vs. >, @see get vs. set)
Posted by cp...@apache.org.
LUCENE-7005: TieredMergePolicy tweaks (>= vs. >, @see get vs. set)
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/fce97a6c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/fce97a6c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/fce97a6c
Branch: refs/heads/master-solr-8621
Commit: fce97a6ca6e14b3d0216957cee1621327c452dfc
Parents: 064c0ac
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Feb 1 17:11:47 2016 +0000
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Feb 1 17:11:47 2016 +0000
----------------------------------------------------------------------
lucene/CHANGES.txt | 3 +++
.../core/src/java/org/apache/lucene/index/TieredMergePolicy.java | 4 ++--
2 files changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fce97a6c/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index ad22c43..017742d 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -263,6 +263,9 @@ Other
* LUCENE-6988: You can now add IndexableFields directly to a MemoryIndex,
and create a MemoryIndex from a lucene Document. (Alan Woodward)
+* LUCENE-7005: TieredMergePolicy tweaks (>= vs. >, @see get vs. set)
+ (Christine Poerschke)
+
======================= Lucene 5.4.1 =======================
Bug Fixes
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fce97a6c/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java
index 205c7c7..d857c5e 100644
--- a/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java
@@ -148,7 +148,7 @@ public class TieredMergePolicy extends MergePolicy {
/** Returns the current maxMergedSegmentMB setting.
*
- * @see #getMaxMergedSegmentMB */
+ * @see #setMaxMergedSegmentMB */
public double getMaxMergedSegmentMB() {
return maxMergedSegmentBytes/1024/1024.;
}
@@ -180,7 +180,7 @@ public class TieredMergePolicy extends MergePolicy {
* Default is 2 MB. */
public TieredMergePolicy setFloorSegmentMB(double v) {
if (v <= 0.0) {
- throw new IllegalArgumentException("floorSegmentMB must be >= 0.0 (got " + v + ")");
+ throw new IllegalArgumentException("floorSegmentMB must be > 0.0 (got " + v + ")");
}
v *= 1024 * 1024;
floorSegmentBytes = v > Long.MAX_VALUE ? Long.MAX_VALUE : (long) v;
[06/21] lucene-solr git commit: catch invalide usage of XXXPoint APIs,
when dimensions are mismatched
Posted by cp...@apache.org.
catch invalide usage of XXXPoint APIs, when dimensions are mismatched
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/5d5b082e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/5d5b082e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/5d5b082e
Branch: refs/heads/master-solr-8621
Commit: 5d5b082e6d68413c274c8259c235c2d56b14a760
Parents: 07c9b2b
Author: Mike McCandless <mi...@apache.org>
Authored: Fri Jan 29 15:39:06 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Fri Jan 29 15:39:06 2016 -0500
----------------------------------------------------------------------
.../org/apache/lucene/document/DoublePoint.java | 6 ++
.../org/apache/lucene/document/FloatPoint.java | 6 ++
.../org/apache/lucene/document/IntPoint.java | 6 ++
.../org/apache/lucene/document/LongPoint.java | 6 ++
.../apache/lucene/index/TestPointValues.java | 71 ++++++++++++++++++++
5 files changed, 95 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5d5b082e/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java b/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java
index a7a63e0..a12a0c9 100644
--- a/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java
+++ b/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java
@@ -42,6 +42,9 @@ public final class DoublePoint extends Field {
/** Change the values of this field */
public void setDoubleValues(double... point) {
+ if (type.pointDimensionCount() != point.length) {
+ throw new IllegalArgumentException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot change to (incoming) " + point.length + " dimensions");
+ }
fieldsData = pack(point);
}
@@ -52,6 +55,9 @@ public final class DoublePoint extends Field {
@Override
public Number numericValue() {
+ if (type.pointDimensionCount() != 1) {
+ throw new IllegalStateException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot convert to a single numeric value");
+ }
BytesRef bytes = (BytesRef) fieldsData;
assert bytes.length == RamUsageEstimator.NUM_BYTES_LONG;
return NumericUtils.sortableLongToDouble(NumericUtils.bytesToLongDirect(bytes.bytes, bytes.offset));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5d5b082e/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java b/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java
index a023a4a..00766ef 100644
--- a/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java
+++ b/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java
@@ -42,6 +42,9 @@ public final class FloatPoint extends Field {
/** Change the values of this field */
public void setFloatValues(float... point) {
+ if (type.pointDimensionCount() != point.length) {
+ throw new IllegalArgumentException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot change to (incoming) " + point.length + " dimensions");
+ }
fieldsData = pack(point);
}
@@ -52,6 +55,9 @@ public final class FloatPoint extends Field {
@Override
public Number numericValue() {
+ if (type.pointDimensionCount() != 1) {
+ throw new IllegalStateException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot convert to a single numeric value");
+ }
BytesRef bytes = (BytesRef) fieldsData;
assert bytes.length == RamUsageEstimator.NUM_BYTES_INT;
return NumericUtils.sortableIntToFloat(NumericUtils.bytesToIntDirect(bytes.bytes, bytes.offset));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5d5b082e/lucene/core/src/java/org/apache/lucene/document/IntPoint.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/IntPoint.java b/lucene/core/src/java/org/apache/lucene/document/IntPoint.java
index 28f6a55..ef48706 100644
--- a/lucene/core/src/java/org/apache/lucene/document/IntPoint.java
+++ b/lucene/core/src/java/org/apache/lucene/document/IntPoint.java
@@ -42,6 +42,9 @@ public final class IntPoint extends Field {
/** Change the values of this field */
public void setIntValues(int... point) {
+ if (type.pointDimensionCount() != point.length) {
+ throw new IllegalArgumentException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot change to (incoming) " + point.length + " dimensions");
+ }
fieldsData = pack(point);
}
@@ -52,6 +55,9 @@ public final class IntPoint extends Field {
@Override
public Number numericValue() {
+ if (type.pointDimensionCount() != 1) {
+ throw new IllegalStateException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot convert to a single numeric value");
+ }
BytesRef bytes = (BytesRef) fieldsData;
assert bytes.length == RamUsageEstimator.NUM_BYTES_INT;
return NumericUtils.bytesToInt(bytes.bytes, bytes.offset);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5d5b082e/lucene/core/src/java/org/apache/lucene/document/LongPoint.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/LongPoint.java b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java
index 23fddb2..15810d1 100644
--- a/lucene/core/src/java/org/apache/lucene/document/LongPoint.java
+++ b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java
@@ -42,6 +42,9 @@ public final class LongPoint extends Field {
/** Change the values of this field */
public void setLongValues(long... point) {
+ if (type.pointDimensionCount() != point.length) {
+ throw new IllegalArgumentException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot change to (incoming) " + point.length + " dimensions");
+ }
fieldsData = pack(point);
}
@@ -52,6 +55,9 @@ public final class LongPoint extends Field {
@Override
public Number numericValue() {
+ if (type.pointDimensionCount() != 1) {
+ throw new IllegalStateException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot convert to a single numeric value");
+ }
BytesRef bytes = (BytesRef) fieldsData;
assert bytes.length == RamUsageEstimator.NUM_BYTES_LONG;
return NumericUtils.bytesToLong(bytes.bytes, bytes.offset);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5d5b082e/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
index 738109e..ff5fdf8 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
@@ -21,8 +21,11 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.BinaryPoint;
import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.LongPoint;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
@@ -435,4 +438,72 @@ public class TestPointValues extends LuceneTestCase {
w.close();
dir.close();
}
+
+ public void testInvalidIntPointUsage() throws Exception {
+ IntPoint field = new IntPoint("field", 17, 42);
+ try {
+ field.setIntValue(14);
+ fail("did not hit exception");
+ } catch (IllegalArgumentException iae) {
+ // good
+ }
+
+ try {
+ field.numericValue();
+ fail("did not hit exception");
+ } catch (IllegalStateException ise) {
+ // good
+ }
+ }
+
+ public void testInvalidLongPointUsage() throws Exception {
+ LongPoint field = new LongPoint("field", 17, 42);
+ try {
+ field.setLongValue(14);
+ fail("did not hit exception");
+ } catch (IllegalArgumentException iae) {
+ // good
+ }
+
+ try {
+ field.numericValue();
+ fail("did not hit exception");
+ } catch (IllegalStateException ise) {
+ // good
+ }
+ }
+
+ public void testInvalidFloatPointUsage() throws Exception {
+ FloatPoint field = new FloatPoint("field", 17, 42);
+ try {
+ field.setFloatValue(14);
+ fail("did not hit exception");
+ } catch (IllegalArgumentException iae) {
+ // good
+ }
+
+ try {
+ field.numericValue();
+ fail("did not hit exception");
+ } catch (IllegalStateException ise) {
+ // good
+ }
+ }
+
+ public void testInvalidDoublePointUsage() throws Exception {
+ DoublePoint field = new DoublePoint("field", 17, 42);
+ try {
+ field.setDoubleValue(14);
+ fail("did not hit exception");
+ } catch (IllegalArgumentException iae) {
+ // good
+ }
+
+ try {
+ field.numericValue();
+ fail("did not hit exception");
+ } catch (IllegalStateException ise) {
+ // good
+ }
+ }
}
[11/21] lucene-solr git commit: SOLR-8285: Update CHANGES.txt
Posted by cp...@apache.org.
SOLR-8285: Update CHANGES.txt
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/14a2c16c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/14a2c16c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/14a2c16c
Branch: refs/heads/master-solr-8621
Commit: 14a2c16ca4243e5f26b776639cb03ec5cefe3d2b
Parents: e20820a
Author: jbernste <jb...@apache.org>
Authored: Sun Jan 31 13:31:27 2016 -0500
Committer: jbernste <jb...@apache.org>
Committed: Sun Jan 31 13:31:27 2016 -0500
----------------------------------------------------------------------
solr/CHANGES.txt | 2 ++
1 file changed, 2 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/14a2c16c/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index f5c88a3..4c0722e 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -142,6 +142,8 @@ New Features
* SOLR-8550: Add asynchronous DaemonStreams to the Streaming API (Joel Bernstein)
+* SOLR-8285: Ensure the /export handler works with NULL field values (Joel Bernstein)
+
Bug Fixes
----------------------
* SOLR-8386: Add field option in the new admin UI schema page loads up even when no schemaFactory has been
[21/21] lucene-solr git commit: Merge branch 'master' into
master-solr-8622
Posted by cp...@apache.org.
Merge branch 'master' into master-solr-8622
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/219f7de9
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/219f7de9
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/219f7de9
Branch: refs/heads/master-solr-8621
Commit: 219f7de9401ab66eab00647a493d97e0296dfbff
Parents: 14d2b0c c136bd7
Author: Christine Poerschke <cp...@apache.org>
Authored: Tue Feb 2 09:51:34 2016 +0000
Committer: Christine Poerschke <cp...@apache.org>
Committed: Tue Feb 2 09:51:34 2016 +0000
----------------------------------------------------------------------
lucene/CHANGES.txt | 7 +
lucene/benchmark/.gitignore | 2 +
.../org/apache/lucene/document/DoublePoint.java | 6 +
.../org/apache/lucene/document/FloatPoint.java | 6 +
.../org/apache/lucene/document/IntPoint.java | 6 +
.../org/apache/lucene/document/LongPoint.java | 6 +
.../apache/lucene/index/MultiPointValues.java | 4 +-
.../apache/lucene/index/TieredMergePolicy.java | 4 +-
.../org/apache/lucene/index/TestAddIndexes.java | 6 +
.../apache/lucene/index/TestAtomicUpdate.java | 1 +
.../lucene/index/TestCodecHoldsOpenFiles.java | 1 +
.../index/TestIndexWriterExceptions2.java | 1 +
.../lucene/index/TestIndexWriterOnDiskFull.java | 2 +
.../lucene/index/TestIndexWriterOnVMError.java | 1 +
.../apache/lucene/index/TestNoMergePolicy.java | 8 +-
.../apache/lucene/index/TestPointValues.java | 71 ++++++++
.../lucene/index/TestSwappedIndexFiles.java | 12 +-
.../index/TestUpgradeIndexMergePolicy.java | 26 +++
.../lucene/index/TestSortingMergePolicy.java | 8 +-
.../index/BaseIndexFileFormatTestCase.java | 5 +
.../lucene/index/BasePointFormatTestCase.java | 7 +
.../org/apache/lucene/util/LuceneTestCase.java | 64 +++++++
solr/CHANGES.txt | 12 ++
.../solr/handler/dataimport/JdbcDataSource.java | 70 +++++---
.../org/apache/solr/handler/SQLHandler.java | 67 ++++++-
.../solr/handler/component/QueryComponent.java | 48 ++---
.../solr/handler/component/ResponseBuilder.java | 4 +
.../apache/solr/handler/component/ShardDoc.java | 166 +----------------
.../component/ShardFieldSortedHitQueue.java | 179 +++++++++++++++++++
.../solr/response/SortingResponseWriter.java | 100 +++++++----
.../apache/solr/schema/ManagedIndexSchema.java | 4 +-
.../apache/solr/search/ReRankQParserPlugin.java | 27 ++-
.../org/apache/solr/search/join/GraphQuery.java | 89 +++++----
.../solr/search/join/GraphTermsCollector.java | 2 +-
.../response/TestSortingResponseWriter.java | 5 +-
.../solr/rest/schema/TestBulkSchemaAPI.java | 83 +++++++++
.../apache/solr/search/QueryEqualityTest.java | 13 +-
.../solr/search/TestReRankQParserPlugin.java | 63 ++++---
.../apache/solr/search/join/GraphQueryTest.java | 23 +++
.../client/solrj/io/sql/ConnectionImpl.java | 30 ++--
.../solrj/io/sql/DatabaseMetaDataImpl.java | 7 +-
.../solr/client/solrj/io/sql/ResultSetImpl.java | 22 ++-
.../solr/client/solrj/io/sql/StatementImpl.java | 29 +--
.../solr/client/solrj/io/sql/JdbcTest.java | 9 +-
.../solrj/io/stream/StreamExpressionTest.java | 63 +++++++
45 files changed, 987 insertions(+), 382 deletions(-)
----------------------------------------------------------------------
[07/21] lucene-solr git commit: also assert PointValues are the same
from LTC.assertReaderEquals
Posted by cp...@apache.org.
also assert PointValues are the same from LTC.assertReaderEquals
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9332b160
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9332b160
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9332b160
Branch: refs/heads/master-solr-8621
Commit: 9332b1602cc0f7312fc22a3d088c549299015691
Parents: 5d5b082
Author: Mike McCandless <mi...@apache.org>
Authored: Fri Jan 29 19:09:04 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Fri Jan 29 19:09:04 2016 -0500
----------------------------------------------------------------------
.../apache/lucene/index/MultiPointValues.java | 4 +-
.../org/apache/lucene/util/LuceneTestCase.java | 64 ++++++++++++++++++++
2 files changed, 67 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9332b160/lucene/core/src/java/org/apache/lucene/index/MultiPointValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/MultiPointValues.java b/lucene/core/src/java/org/apache/lucene/index/MultiPointValues.java
index 12282e7..5dd4fcc 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MultiPointValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MultiPointValues.java
@@ -23,7 +23,8 @@ import java.util.List;
import org.apache.lucene.util.StringHelper;
-class MultiPointValues extends PointValues {
+/** Merges multiple {@link PointValues} into a single one. */
+public class MultiPointValues extends PointValues {
private final List<PointValues> subs;
private final List<Integer> docBases;
@@ -33,6 +34,7 @@ class MultiPointValues extends PointValues {
this.docBases = docBases;
}
+ /** Returns a {@link PointValues} merging all point values from the provided reader. */
public static PointValues get(IndexReader r) {
final List<LeafReaderContext> leaves = r.leaves();
final int size = leaves.size();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9332b160/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
index aaab030..7175837 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
@@ -1888,6 +1888,7 @@ public abstract class LuceneTestCase extends Assert {
assertDocValuesEquals(info, leftReader, rightReader);
assertDeletedDocsEquals(info, leftReader, rightReader);
assertFieldInfosEquals(info, leftReader, rightReader);
+ assertPointsEquals(info, leftReader, rightReader);
}
/**
@@ -2533,6 +2534,69 @@ public abstract class LuceneTestCase extends Assert {
assertEquals(info, left, right);
}
+ // naive silly memory heavy uninversion!! maps docID -> packed values (a Set because a given doc can be multi-valued)
+ private Map<Integer,Set<BytesRef>> uninvert(String fieldName, PointValues points) throws IOException {
+ final Map<Integer,Set<BytesRef>> docValues = new HashMap<>();
+ points.intersect(fieldName, new PointValues.IntersectVisitor() {
+ @Override
+ public void visit(int docID) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void visit(int docID, byte[] packedValue) throws IOException {
+ if (docValues.containsKey(docID) == false) {
+ docValues.put(docID, new HashSet<BytesRef>());
+ }
+ docValues.get(docID).add(new BytesRef(packedValue.clone()));
+ }
+
+ @Override
+ public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
+ // We pretend our query shape is so hairy that it crosses every single cell:
+ return PointValues.Relation.CELL_CROSSES_QUERY;
+ }
+ });
+ return docValues;
+ }
+
+ public void assertPointsEquals(String info, IndexReader leftReader, IndexReader rightReader) throws IOException {
+ assertPointsEquals(info,
+ MultiFields.getMergedFieldInfos(leftReader),
+ MultiPointValues.get(leftReader),
+ MultiFields.getMergedFieldInfos(rightReader),
+ MultiPointValues.get(rightReader));
+ }
+
+ public void assertPointsEquals(String info, FieldInfos fieldInfos1, PointValues points1, FieldInfos fieldInfos2, PointValues points2) throws IOException {
+ for(FieldInfo fieldInfo1 : fieldInfos1) {
+ if (fieldInfo1.getPointDimensionCount() != 0) {
+ FieldInfo fieldInfo2 = fieldInfos2.fieldInfo(fieldInfo1.name);
+ // same dimension count?
+ assertEquals(info, fieldInfo2.getPointDimensionCount(), fieldInfo2.getPointDimensionCount());
+ // same bytes per dimension?
+ assertEquals(info, fieldInfo2.getPointNumBytes(), fieldInfo2.getPointNumBytes());
+
+ assertEquals(info + " field=" + fieldInfo1.name,
+ uninvert(fieldInfo1.name, points1),
+ uninvert(fieldInfo1.name, points2));
+ }
+ }
+
+ // make sure FieldInfos2 doesn't have any point fields that FieldInfo1 didn't have
+ for(FieldInfo fieldInfo2 : fieldInfos2) {
+ if (fieldInfo2.getPointDimensionCount() != 0) {
+ FieldInfo fieldInfo1 = fieldInfos1.fieldInfo(fieldInfo2.name);
+ // same dimension count?
+ assertEquals(info, fieldInfo2.getPointDimensionCount(), fieldInfo1.getPointDimensionCount());
+ // same bytes per dimension?
+ assertEquals(info, fieldInfo2.getPointNumBytes(), fieldInfo1.getPointNumBytes());
+
+ // we don't need to uninvert and compare here ... we did that in the first loop above
+ }
+ }
+ }
+
/** Returns true if the file exists (can be opened), false
* if it cannot be opened, and (unlike Java's
* File.exists) throws IOException if there's some
[12/21] lucene-solr git commit: SOLR-7968: Make QueryComponent
extensible
Posted by cp...@apache.org.
SOLR-7968: Make QueryComponent extensible
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4cdce3db
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4cdce3db
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4cdce3db
Branch: refs/heads/master-solr-8621
Commit: 4cdce3db77cde2506ac77dc9ced8c1f91ed9800f
Parents: 14a2c16ca
Author: David Smiley <ds...@apache.org>
Authored: Sun Jan 31 22:25:37 2016 -0500
Committer: David Smiley <ds...@apache.org>
Committed: Sun Jan 31 22:25:37 2016 -0500
----------------------------------------------------------------------
solr/CHANGES.txt | 2 +
.../solr/handler/component/QueryComponent.java | 48 ++---
.../solr/handler/component/ResponseBuilder.java | 4 +
.../apache/solr/handler/component/ShardDoc.java | 166 +----------------
.../component/ShardFieldSortedHitQueue.java | 179 +++++++++++++++++++
5 files changed, 211 insertions(+), 188 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4cdce3db/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 4c0722e..adfc8d7 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -583,6 +583,8 @@ Other Changes
* SOLR-8597: add default, no-op QParserPlugin.init(NamedList) method (Christine Poerschke)
+* SOLR-7968: Make QueryComponent more extensible. (Markus Jelsma via David Smiley)
+
================== 5.4.1 ==================
Bug Fixes
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4cdce3db/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
index 2e99a2b..05fe28d 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
@@ -235,7 +235,7 @@ public class QueryComponent extends SearchComponent
}
}
- private void prepareGrouping(ResponseBuilder rb) throws IOException {
+ protected void prepareGrouping(ResponseBuilder rb) throws IOException {
SolrQueryRequest req = rb.req;
SolrParams params = req.getParams();
@@ -671,7 +671,7 @@ public class QueryComponent extends SearchComponent
}
}
- private int groupedDistributedProcess(ResponseBuilder rb) {
+ protected int groupedDistributedProcess(ResponseBuilder rb) {
int nextStage = ResponseBuilder.STAGE_DONE;
ShardRequestFactory shardRequestFactory = null;
@@ -705,7 +705,7 @@ public class QueryComponent extends SearchComponent
return nextStage;
}
- private int regularDistributedProcess(ResponseBuilder rb) {
+ protected int regularDistributedProcess(ResponseBuilder rb) {
if (rb.stage < ResponseBuilder.STAGE_PARSE_QUERY)
return ResponseBuilder.STAGE_PARSE_QUERY;
if (rb.stage == ResponseBuilder.STAGE_PARSE_QUERY) {
@@ -734,7 +734,7 @@ public class QueryComponent extends SearchComponent
}
}
- private void handleGroupedResponses(ResponseBuilder rb, ShardRequest sreq) {
+ protected void handleGroupedResponses(ResponseBuilder rb, ShardRequest sreq) {
ShardResponseProcessor responseProcessor = null;
if ((sreq.purpose & ShardRequest.PURPOSE_GET_TOP_GROUPS) != 0) {
responseProcessor = new SearchGroupShardResponseProcessor();
@@ -749,7 +749,7 @@ public class QueryComponent extends SearchComponent
}
}
- private void handleRegularResponses(ResponseBuilder rb, ShardRequest sreq) {
+ protected void handleRegularResponses(ResponseBuilder rb, ShardRequest sreq) {
if ((sreq.purpose & ShardRequest.PURPOSE_GET_TOP_IDS) != 0) {
mergeIds(rb, sreq);
}
@@ -775,11 +775,11 @@ public class QueryComponent extends SearchComponent
}
}
- private static final EndResultTransformer MAIN_END_RESULT_TRANSFORMER = new MainEndResultTransformer();
- private static final EndResultTransformer SIMPLE_END_RESULT_TRANSFORMER = new SimpleEndResultTransformer();
+ protected static final EndResultTransformer MAIN_END_RESULT_TRANSFORMER = new MainEndResultTransformer();
+ protected static final EndResultTransformer SIMPLE_END_RESULT_TRANSFORMER = new SimpleEndResultTransformer();
@SuppressWarnings("unchecked")
- private void groupedFinishStage(final ResponseBuilder rb) {
+ protected void groupedFinishStage(final ResponseBuilder rb) {
// To have same response as non-distributed request.
GroupingSpecification groupSpec = rb.getGroupingSpec();
if (rb.mergedTopGroups.isEmpty()) {
@@ -814,24 +814,24 @@ public class QueryComponent extends SearchComponent
endResultTransformer.transform(combinedMap, rb, solrDocumentSource);
}
- private void regularFinishStage(ResponseBuilder rb) {
+ protected void regularFinishStage(ResponseBuilder rb) {
// We may not have been able to retrieve all the docs due to an
// index change. Remove any null documents.
- for (Iterator<SolrDocument> iter = rb._responseDocs.iterator(); iter.hasNext();) {
+ for (Iterator<SolrDocument> iter = rb.getResponseDocs().iterator(); iter.hasNext();) {
if (iter.next() == null) {
iter.remove();
- rb._responseDocs.setNumFound(rb._responseDocs.getNumFound()-1);
+ rb.getResponseDocs().setNumFound(rb.getResponseDocs().getNumFound()-1);
}
}
- rb.rsp.addResponse(rb._responseDocs);
+ rb.rsp.addResponse(rb.getResponseDocs());
if (null != rb.getNextCursorMark()) {
rb.rsp.add(CursorMarkParams.CURSOR_MARK_NEXT,
rb.getNextCursorMark().getSerializedTotem());
}
}
- private void createDistributedStats(ResponseBuilder rb) {
+ protected void createDistributedStats(ResponseBuilder rb) {
StatsCache cache = rb.req.getCore().getStatsCache();
if ( (rb.getFieldFlags() & SolrIndexSearcher.GET_SCORES)!=0 || rb.getSortSpec().includesScore()) {
ShardRequest sreq = cache.retrieveStatsRequest(rb);
@@ -841,12 +841,12 @@ public class QueryComponent extends SearchComponent
}
}
- private void updateStats(ResponseBuilder rb, ShardRequest sreq) {
+ protected void updateStats(ResponseBuilder rb, ShardRequest sreq) {
StatsCache cache = rb.req.getCore().getStatsCache();
cache.mergeToGlobalStats(rb.req, sreq.responses);
}
- private void createMainQuery(ResponseBuilder rb) {
+ protected void createMainQuery(ResponseBuilder rb) {
ShardRequest sreq = new ShardRequest();
sreq.purpose = ShardRequest.PURPOSE_GET_TOP_IDS;
@@ -931,13 +931,13 @@ public class QueryComponent extends SearchComponent
rb.addRequest(this, sreq);
}
- private boolean addFL(StringBuilder fl, String field, boolean additionalAdded) {
+ protected boolean addFL(StringBuilder fl, String field, boolean additionalAdded) {
if (additionalAdded) fl.append(",");
fl.append(field);
return true;
}
- private void mergeIds(ResponseBuilder rb, ShardRequest sreq) {
+ protected void mergeIds(ResponseBuilder rb, ShardRequest sreq) {
List<MergeStrategy> mergeStrategies = rb.getMergeStrategies();
if(mergeStrategies != null) {
Collections.sort(mergeStrategies, MergeStrategy.MERGE_COMP);
@@ -1110,7 +1110,7 @@ public class QueryComponent extends SearchComponent
// again when retrieving stored fields.
// TODO: use ResponseBuilder (w/ comments) or the request context?
rb.resultIds = resultIds;
- rb._responseDocs = responseDocs;
+ rb.setResponseDocs(responseDocs);
populateNextCursorMarkFromMergedShards(rb);
@@ -1130,7 +1130,7 @@ public class QueryComponent extends SearchComponent
* <code>ShardDocs</code> in <code>resultIds</code>, may or may not be
* part of a Cursor based request (method will NOOP if not needed)
*/
- private void populateNextCursorMarkFromMergedShards(ResponseBuilder rb) {
+ protected void populateNextCursorMarkFromMergedShards(ResponseBuilder rb) {
final CursorMark lastCursorMark = rb.getCursorMark();
if (null == lastCursorMark) {
@@ -1172,7 +1172,7 @@ public class QueryComponent extends SearchComponent
rb.setNextCursorMark(nextCursorMark);
}
- private NamedList unmarshalSortValues(SortSpec sortSpec,
+ protected NamedList unmarshalSortValues(SortSpec sortSpec,
NamedList sortFieldValues,
IndexSchema schema) {
NamedList unmarshalledSortValsPerField = new NamedList();
@@ -1213,7 +1213,7 @@ public class QueryComponent extends SearchComponent
return unmarshalledSortValsPerField;
}
- private void createRetrieveDocs(ResponseBuilder rb) {
+ protected void createRetrieveDocs(ResponseBuilder rb) {
// TODO: in a system with nTiers > 2, we could be passed "ids" here
// unless those requests always go to the final destination shard
@@ -1267,7 +1267,7 @@ public class QueryComponent extends SearchComponent
}
- private void returnFields(ResponseBuilder rb, ShardRequest sreq) {
+ protected void returnFields(ResponseBuilder rb, ShardRequest sreq) {
// Keep in mind that this could also be a shard in a multi-tiered system.
// TODO: if a multi-tiered system, it seems like some requests
// could/should bypass middlemen (like retrieving stored fields)
@@ -1318,7 +1318,7 @@ public class QueryComponent extends SearchComponent
if (removeKeyField) {
doc.removeFields(keyFieldName);
}
- rb._responseDocs.set(sdoc.positionInResponse, doc);
+ rb.getResponseDocs().set(sdoc.positionInResponse, doc);
}
}
}
@@ -1344,7 +1344,7 @@ public class QueryComponent extends SearchComponent
*
* TODO: when SOLR-5595 is fixed, this wont be needed, as we dont need to recompute sort values here from the comparator
*/
- private static class FakeScorer extends Scorer {
+ protected static class FakeScorer extends Scorer {
final int docid;
final float score;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4cdce3db/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java b/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java
index c1fb21a..8f20dbf 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java
@@ -263,6 +263,10 @@ public class ResponseBuilder
public void setResponseDocs(SolrDocumentList _responseDocs) {
this._responseDocs = _responseDocs;
}
+
+ public SolrDocumentList getResponseDocs() {
+ return this._responseDocs;
+ }
public boolean isDebugTrack() {
return debugTrack;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4cdce3db/solr/core/src/java/org/apache/solr/handler/component/ShardDoc.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/ShardDoc.java b/solr/core/src/java/org/apache/solr/handler/component/ShardDoc.java
index 97b831b..2935aa1 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/ShardDoc.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/ShardDoc.java
@@ -16,21 +16,9 @@
*/
package org.apache.solr.handler.component;
-import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.FieldDoc;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.util.PriorityQueue;
-import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Comparator;
-import java.util.List;
-
-import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR;
-
public class ShardDoc extends FieldDoc {
public String shard;
public String shardAddress; // TODO
@@ -44,7 +32,7 @@ public class ShardDoc extends FieldDoc {
// this is currently the uniqueKeyField but
// may be replaced with internal docid in a future release.
- NamedList sortFieldValues;
+ public NamedList sortFieldValues;
// sort field values for *all* docs in a particular shard.
// this doc's values are in position orderInShard
@@ -93,154 +81,4 @@ public class ShardDoc extends FieldDoc {
+" ,positionInResponse="+positionInResponse
+" ,sortFieldValues="+sortFieldValues;
}
-}
-
-
-
-// used by distributed search to merge results.
-class ShardFieldSortedHitQueue extends PriorityQueue<ShardDoc> {
-
- /** Stores a comparator corresponding to each field being sorted by */
- protected Comparator<ShardDoc>[] comparators;
-
- /** Stores the sort criteria being used. */
- protected SortField[] fields;
-
- /** The order of these fieldNames should correspond to the order of sort field values retrieved from the shard */
- protected List<String> fieldNames = new ArrayList<>();
-
- public ShardFieldSortedHitQueue(SortField[] fields, int size, IndexSearcher searcher) {
- super(size);
- final int n = fields.length;
- //noinspection unchecked
- comparators = new Comparator[n];
- this.fields = new SortField[n];
- for (int i = 0; i < n; ++i) {
-
- // keep track of the named fields
- SortField.Type type = fields[i].getType();
- if (type!=SortField.Type.SCORE && type!=SortField.Type.DOC) {
- fieldNames.add(fields[i].getField());
- }
-
- String fieldname = fields[i].getField();
- comparators[i] = getCachedComparator(fields[i], searcher);
-
- if (fields[i].getType() == SortField.Type.STRING) {
- this.fields[i] = new SortField(fieldname, SortField.Type.STRING,
- fields[i].getReverse());
- } else {
- this.fields[i] = new SortField(fieldname, fields[i].getType(),
- fields[i].getReverse());
- }
-
- //System.out.println("%%%%%%%%%%%%%%%%%% got "+fields[i].getType() +" for "+ fieldname +" fields[i].getReverse(): "+fields[i].getReverse());
- }
- }
-
- @Override
- protected boolean lessThan(ShardDoc docA, ShardDoc docB) {
- // If these docs are from the same shard, then the relative order
- // is how they appeared in the response from that shard.
- if (docA.shard == docB.shard) {
- // if docA has a smaller position, it should be "larger" so it
- // comes before docB.
- // This will handle sorting by docid within the same shard
-
- // comment this out to test comparators.
- return !(docA.orderInShard < docB.orderInShard);
- }
-
-
- // run comparators
- final int n = comparators.length;
- int c = 0;
- for (int i = 0; i < n && c == 0; i++) {
- c = (fields[i].getReverse()) ? comparators[i].compare(docB, docA)
- : comparators[i].compare(docA, docB);
- }
-
- // solve tiebreaks by comparing shards (similar to using docid)
- // smaller docid's beat larger ids, so reverse the natural ordering
- if (c == 0) {
- c = -docA.shard.compareTo(docB.shard);
- }
-
- return c < 0;
- }
-
- Comparator<ShardDoc> getCachedComparator(SortField sortField, IndexSearcher searcher) {
- SortField.Type type = sortField.getType();
- if (type == SortField.Type.SCORE) {
- return comparatorScore();
- } else if (type == SortField.Type.REWRITEABLE) {
- try {
- sortField = sortField.rewrite(searcher);
- } catch (IOException e) {
- throw new SolrException(SERVER_ERROR, "Exception rewriting sort field " + sortField, e);
- }
- }
- return comparatorFieldComparator(sortField);
- }
-
- abstract class ShardComparator implements Comparator<ShardDoc> {
- final SortField sortField;
- final String fieldName;
- final int fieldNum;
-
- public ShardComparator(SortField sortField) {
- this.sortField = sortField;
- this.fieldName = sortField.getField();
- int fieldNum = 0;
- for (int i=0; i<fieldNames.size(); i++) {
- if (fieldNames.get(i).equals(fieldName)) {
- fieldNum = i;
- break;
- }
- }
- this.fieldNum = fieldNum;
- }
-
- Object sortVal(ShardDoc shardDoc) {
- assert(shardDoc.sortFieldValues.getName(fieldNum).equals(fieldName));
- List lst = (List)shardDoc.sortFieldValues.getVal(fieldNum);
- return lst.get(shardDoc.orderInShard);
- }
- }
-
- static Comparator<ShardDoc> comparatorScore() {
- return new Comparator<ShardDoc>() {
- @Override
- public final int compare(final ShardDoc o1, final ShardDoc o2) {
- final float f1 = o1.score;
- final float f2 = o2.score;
- if (f1 < f2)
- return -1;
- if (f1 > f2)
- return 1;
- return 0;
- }
- };
- }
-
- Comparator<ShardDoc> comparatorFieldComparator(SortField sortField) {
- final FieldComparator fieldComparator;
- try {
- fieldComparator = sortField.getComparator(0, 0);
- } catch (IOException e) {
- throw new RuntimeException("Unable to get FieldComparator for sortField " + sortField);
- }
-
- return new ShardComparator(sortField) {
- // Since the PriorityQueue keeps the biggest elements by default,
- // we need to reverse the field compare ordering so that the
- // smallest elements are kept instead of the largest... hence
- // the negative sign.
- @Override
- public int compare(final ShardDoc o1, final ShardDoc o2) {
- //noinspection unchecked
- return -fieldComparator.compareValues(sortVal(o1), sortVal(o2));
- }
- };
- }
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4cdce3db/solr/core/src/java/org/apache/solr/handler/component/ShardFieldSortedHitQueue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/ShardFieldSortedHitQueue.java b/solr/core/src/java/org/apache/solr/handler/component/ShardFieldSortedHitQueue.java
new file mode 100644
index 0000000..fd0603d
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/handler/component/ShardFieldSortedHitQueue.java
@@ -0,0 +1,179 @@
+package org.apache.solr.handler.component;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.List;
+
+import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.util.PriorityQueue;
+import org.apache.solr.common.SolrException;
+
+import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR;
+
+// used by distributed search to merge results.
+public class ShardFieldSortedHitQueue extends PriorityQueue<ShardDoc> {
+
+ /** Stores a comparator corresponding to each field being sorted by */
+ protected Comparator<ShardDoc>[] comparators;
+
+ /** Stores the sort criteria being used. */
+ protected SortField[] fields;
+
+ /** The order of these fieldNames should correspond to the order of sort field values retrieved from the shard */
+ protected List<String> fieldNames = new ArrayList<>();
+
+ public ShardFieldSortedHitQueue(SortField[] fields, int size, IndexSearcher searcher) {
+ super(size);
+ final int n = fields.length;
+ //noinspection unchecked
+ comparators = new Comparator[n];
+ this.fields = new SortField[n];
+ for (int i = 0; i < n; ++i) {
+
+ // keep track of the named fields
+ SortField.Type type = fields[i].getType();
+ if (type!=SortField.Type.SCORE && type!=SortField.Type.DOC) {
+ fieldNames.add(fields[i].getField());
+ }
+
+ String fieldname = fields[i].getField();
+ comparators[i] = getCachedComparator(fields[i], searcher);
+
+ if (fields[i].getType() == SortField.Type.STRING) {
+ this.fields[i] = new SortField(fieldname, SortField.Type.STRING,
+ fields[i].getReverse());
+ } else {
+ this.fields[i] = new SortField(fieldname, fields[i].getType(),
+ fields[i].getReverse());
+ }
+
+ //System.out.println("%%%%%%%%%%%%%%%%%% got "+fields[i].getType() +" for "+ fieldname +" fields[i].getReverse(): "+fields[i].getReverse());
+ }
+ }
+
+ @Override
+ protected boolean lessThan(ShardDoc docA, ShardDoc docB) {
+ // If these docs are from the same shard, then the relative order
+ // is how they appeared in the response from that shard.
+ if (docA.shard == docB.shard) {
+ // if docA has a smaller position, it should be "larger" so it
+ // comes before docB.
+ // This will handle sorting by docid within the same shard
+
+ // comment this out to test comparators.
+ return !(docA.orderInShard < docB.orderInShard);
+ }
+
+
+ // run comparators
+ final int n = comparators.length;
+ int c = 0;
+ for (int i = 0; i < n && c == 0; i++) {
+ c = (fields[i].getReverse()) ? comparators[i].compare(docB, docA)
+ : comparators[i].compare(docA, docB);
+ }
+
+ // solve tiebreaks by comparing shards (similar to using docid)
+ // smaller docid's beat larger ids, so reverse the natural ordering
+ if (c == 0) {
+ c = -docA.shard.compareTo(docB.shard);
+ }
+
+ return c < 0;
+ }
+
+ Comparator<ShardDoc> getCachedComparator(SortField sortField, IndexSearcher searcher) {
+ SortField.Type type = sortField.getType();
+ if (type == SortField.Type.SCORE) {
+ return comparatorScore();
+ } else if (type == SortField.Type.REWRITEABLE) {
+ try {
+ sortField = sortField.rewrite(searcher);
+ } catch (IOException e) {
+ throw new SolrException(SERVER_ERROR, "Exception rewriting sort field " + sortField, e);
+ }
+ }
+ return comparatorFieldComparator(sortField);
+ }
+
+ abstract class ShardComparator implements Comparator<ShardDoc> {
+ final SortField sortField;
+ final String fieldName;
+ final int fieldNum;
+
+ public ShardComparator(SortField sortField) {
+ this.sortField = sortField;
+ this.fieldName = sortField.getField();
+ int fieldNum = 0;
+ for (int i=0; i<fieldNames.size(); i++) {
+ if (fieldNames.get(i).equals(fieldName)) {
+ fieldNum = i;
+ break;
+ }
+ }
+ this.fieldNum = fieldNum;
+ }
+
+ Object sortVal(ShardDoc shardDoc) {
+ assert(shardDoc.sortFieldValues.getName(fieldNum).equals(fieldName));
+ List lst = (List)shardDoc.sortFieldValues.getVal(fieldNum);
+ return lst.get(shardDoc.orderInShard);
+ }
+ }
+
+ static Comparator<ShardDoc> comparatorScore() {
+ return new Comparator<ShardDoc>() {
+ @Override
+ public final int compare(final ShardDoc o1, final ShardDoc o2) {
+ final float f1 = o1.score;
+ final float f2 = o2.score;
+ if (f1 < f2)
+ return -1;
+ if (f1 > f2)
+ return 1;
+ return 0;
+ }
+ };
+ }
+
+ Comparator<ShardDoc> comparatorFieldComparator(SortField sortField) {
+ final FieldComparator fieldComparator;
+ try {
+ fieldComparator = sortField.getComparator(0, 0);
+ } catch (IOException e) {
+ throw new RuntimeException("Unable to get FieldComparator for sortField " + sortField);
+ }
+
+ return new ShardComparator(sortField) {
+ // Since the PriorityQueue keeps the biggest elements by default,
+ // we need to reverse the field compare ordering so that the
+ // smallest elements are kept instead of the largest... hence
+ // the negative sign.
+ @Override
+ public int compare(final ShardDoc o1, final ShardDoc o2) {
+ //noinspection unchecked
+ return -fieldComparator.compareValues(sortVal(o1), sortVal(o2));
+ }
+ };
+ }
+}
\ No newline at end of file
[19/21] lucene-solr git commit: SOLR-8508: Implement
DatabaseMetaDataImpl.getCatalogs()
Posted by cp...@apache.org.
SOLR-8508: Implement DatabaseMetaDataImpl.getCatalogs()
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/edf66598
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/edf66598
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/edf66598
Branch: refs/heads/master-solr-8621
Commit: edf665988d6f7acf95ec49ed16a3afc30ffcb342
Parents: ce0b931
Author: jbernste <jb...@apache.org>
Authored: Mon Feb 1 16:20:21 2016 -0500
Committer: jbernste <jb...@apache.org>
Committed: Mon Feb 1 16:21:13 2016 -0500
----------------------------------------------------------------------
.../org/apache/solr/handler/SQLHandler.java | 65 +++++++++++++++++---
.../client/solrj/io/sql/ConnectionImpl.java | 30 ++++++---
.../solrj/io/sql/DatabaseMetaDataImpl.java | 7 ++-
.../solr/client/solrj/io/sql/ResultSetImpl.java | 22 +++++--
.../solr/client/solrj/io/sql/StatementImpl.java | 29 +++------
.../solr/client/solrj/io/sql/JdbcTest.java | 9 ++-
6 files changed, 115 insertions(+), 47 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/edf66598/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
index 7bbe7ea..7aa8ce5 100644
--- a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
@@ -164,7 +164,13 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware {
TupleStream sqlStream = null;
- if(sqlVistor.groupByQuery) {
+ if(sqlVistor.table.toUpperCase(Locale.getDefault()).contains("_CATALOGS_")) {
+ if (!sqlVistor.fields.contains("TABLE_CAT")) {
+ throw new IOException("When querying _CATALOGS_, fields must contain column TABLE_CAT");
+ }
+
+ sqlStream = new CatalogsStream(defaultZkhost);
+ } else if(sqlVistor.groupByQuery) {
if(aggregationMode == AggregationMode.FACET) {
sqlStream = doGroupByWithAggregatesFacets(sqlVistor);
} else {
@@ -549,6 +555,11 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware {
throw new IOException("Select columns must be specified.");
}
+ TableSpec tableSpec = new TableSpec(sqlVisitor.table, defaultZkhost);
+
+ String zkHost = tableSpec.zkHost;
+ String collection = tableSpec.collection;
+
boolean score = false;
for (String field : fields) {
@@ -594,7 +605,7 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware {
}
} else {
if(sqlVisitor.limit < 0) {
- throw new IOException("order by is required for unlimited select statements.");
+ throw new IOException("order by is required for unlimited select statements.");
} else {
siBuf.append("score desc");
if(!score) {
@@ -603,12 +614,7 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware {
}
}
- TableSpec tableSpec = new TableSpec(sqlVisitor.table, defaultZkhost);
-
- String zkHost = tableSpec.zkHost;
- String collection = tableSpec.collection;
Map<String, String> params = new HashMap();
-
params.put("fl", fl.toString());
params.put("q", sqlVisitor.query);
@@ -616,7 +622,7 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware {
params.put("sort", siBuf.toString());
}
- TupleStream tupleStream = null;
+ TupleStream tupleStream;
if(sqlVisitor.limit > -1) {
params.put("rows", Integer.toString(sqlVisitor.limit));
@@ -1355,6 +1361,49 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware {
}
}
+ private static class CatalogsStream extends TupleStream {
+ private final String zkHost;
+ private StreamContext context;
+ private int currentIndex = 0;
+ private List<String> catalogs;
+
+ public CatalogsStream(String zkHost) {
+ this.zkHost = zkHost;
+ }
+
+ public List<TupleStream> children() {
+ return new ArrayList<>();
+ }
+
+ public void open() throws IOException {
+ this.catalogs = new ArrayList<>();
+ this.catalogs.add(this.zkHost);
+ }
+
+ public Tuple read() throws IOException {
+ Map fields = new HashMap<>();
+ if (this.currentIndex < this.catalogs.size()) {
+ this.currentIndex += 1;
+ fields.put("TABLE_CAT", this.zkHost);
+ } else {
+ fields.put("EOF", "true");
+ }
+ return new Tuple(fields);
+ }
+
+ public StreamComparator getStreamSort() {
+ return null;
+ }
+
+ public void close() throws IOException {
+
+ }
+
+ public void setStreamContext(StreamContext context) {
+ this.context = context;
+ }
+ }
+
private static class MetadataStream extends TupleStream {
private final TupleStream stream;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/edf66598/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ConnectionImpl.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ConnectionImpl.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ConnectionImpl.java
index ece28e0..a9d73d4 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ConnectionImpl.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ConnectionImpl.java
@@ -48,14 +48,18 @@ class ConnectionImpl implements Connection {
private final CloudSolrClient client;
private final String collection;
private final Properties properties;
+ private final DatabaseMetaData databaseMetaData;
+ private final Statement connectionStatement;
private boolean closed;
private SQLWarning currentWarning;
- ConnectionImpl(String url, String zkHost, String collection, Properties properties) {
+ ConnectionImpl(String url, String zkHost, String collection, Properties properties) throws SQLException {
this.url = url;
this.client = solrClientCache.getCloudSolrClient(zkHost);
this.collection = collection;
this.properties = properties;
+ this.connectionStatement = createStatement();
+ this.databaseMetaData = new DatabaseMetaDataImpl(this, this.connectionStatement);
}
String getUrl() {
@@ -119,11 +123,17 @@ class ConnectionImpl implements Connection {
if(closed) {
return;
}
+
+ this.closed = true;
+
try {
- this.solrClientCache.close();
- this.closed = true;
- } catch (Exception e) {
- throw new SQLException(e);
+ if(this.connectionStatement != null) {
+ this.connectionStatement.close();
+ }
+ } finally {
+ if (this.solrClientCache != null) {
+ this.solrClientCache.close();
+ }
}
}
@@ -134,7 +144,7 @@ class ConnectionImpl implements Connection {
@Override
public DatabaseMetaData getMetaData() throws SQLException {
- return new DatabaseMetaDataImpl(this);
+ return this.databaseMetaData;
}
@Override
@@ -154,7 +164,7 @@ class ConnectionImpl implements Connection {
@Override
public String getCatalog() throws SQLException {
- return this.collection;
+ return this.client.getZkHost();
}
@Override
@@ -170,7 +180,7 @@ class ConnectionImpl implements Connection {
@Override
public SQLWarning getWarnings() throws SQLException {
if(isClosed()) {
- throw new SQLException("Statement is closed.");
+ throw new SQLException("Connection is closed.");
}
return this.currentWarning;
@@ -179,7 +189,7 @@ class ConnectionImpl implements Connection {
@Override
public void clearWarnings() throws SQLException {
if(isClosed()) {
- throw new SQLException("Statement is closed.");
+ throw new SQLException("Connection is closed.");
}
this.currentWarning = null;
@@ -341,7 +351,7 @@ class ConnectionImpl implements Connection {
@Override
public String getSchema() throws SQLException {
- throw new UnsupportedOperationException();
+ return this.collection;
}
@Override
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/edf66598/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/DatabaseMetaDataImpl.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/DatabaseMetaDataImpl.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/DatabaseMetaDataImpl.java
index fb8ce27..4700f2f 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/DatabaseMetaDataImpl.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/DatabaseMetaDataImpl.java
@@ -22,12 +22,15 @@ import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.RowIdLifetime;
import java.sql.SQLException;
+import java.sql.Statement;
class DatabaseMetaDataImpl implements DatabaseMetaData {
private final ConnectionImpl connection;
+ private final Statement connectionStatement;
- DatabaseMetaDataImpl(ConnectionImpl connection) {
+ public DatabaseMetaDataImpl(ConnectionImpl connection, Statement connectionStatement) {
this.connection = connection;
+ this.connectionStatement = connectionStatement;
}
@Override
@@ -642,7 +645,7 @@ class DatabaseMetaDataImpl implements DatabaseMetaData {
@Override
public ResultSet getCatalogs() throws SQLException {
- return null;
+ return this.connectionStatement.executeQuery("select TABLE_CAT from _CATALOGS_");
}
@Override
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/edf66598/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java
index 0f4dcf0..7367b6f 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java
@@ -42,6 +42,8 @@ import java.util.Map;
import org.apache.solr.client.solrj.io.Tuple;
import org.apache.solr.client.solrj.io.stream.PushBackStream;
+import org.apache.solr.client.solrj.io.stream.SolrStream;
+import org.apache.solr.client.solrj.io.stream.StreamContext;
class ResultSetImpl implements ResultSet {
private final StatementImpl statement;
@@ -55,12 +57,18 @@ class ResultSetImpl implements ResultSet {
private SQLWarning currentWarning;
private boolean wasLastValueNull;
- ResultSetImpl(StatementImpl statement) {
+ ResultSetImpl(StatementImpl statement, SolrStream solrStream) throws SQLException {
this.statement = statement;
- this.solrStream = new PushBackStream(statement.getSolrStream());
- // Read the first tuple so that metadata can be gathered
try {
+ this.solrStream = new PushBackStream(solrStream);
+
+ StreamContext context = new StreamContext();
+ context.setSolrClientCache(((ConnectionImpl)this.statement.getConnection()).getSolrClientCache());
+ this.solrStream.setStreamContext(context);
+
+ this.solrStream.open();
+
this.metadataTuple = this.solrStream.read();
Object isMetadata = this.metadataTuple.get("isMetadata");
@@ -71,7 +79,7 @@ class ResultSetImpl implements ResultSet {
this.firstTuple = this.solrStream.read();
this.solrStream.pushBack(firstTuple);
} catch (IOException e) {
- throw new RuntimeException("Couldn't read first tuple", e);
+ throw new SQLException("Couldn't read first tuple", e);
}
this.resultSetMetaData = new ResultSetMetaDataImpl(this);
@@ -115,6 +123,12 @@ class ResultSetImpl implements ResultSet {
@Override
public void close() throws SQLException {
this.done = this.closed = true;
+
+ try {
+ this.solrStream.close();
+ } catch (IOException e) {
+ throw new SQLException(e);
+ }
}
@Override
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/edf66598/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/StatementImpl.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/StatementImpl.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/StatementImpl.java
index b86b8d4..14bae09 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/StatementImpl.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/StatementImpl.java
@@ -32,7 +32,6 @@ import java.util.HashMap;
import java.util.Random;
import org.apache.solr.client.solrj.io.stream.SolrStream;
-import org.apache.solr.client.solrj.io.stream.StreamContext;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
@@ -43,7 +42,6 @@ import org.apache.solr.common.params.CommonParams;
class StatementImpl implements Statement {
private final ConnectionImpl connection;
- private SolrStream solrStream;
private boolean closed;
private String currentSQL;
private ResultSetImpl currentResultSet;
@@ -53,26 +51,16 @@ class StatementImpl implements Statement {
this.connection = connection;
}
- public SolrStream getSolrStream() {
- return this.solrStream;
- }
-
@Override
public ResultSet executeQuery(String sql) throws SQLException {
try {
if(this.currentResultSet != null) {
this.currentResultSet.close();
this.currentResultSet = null;
- this.solrStream.close();
}
closed = false; // If closed reopen so Statement can be reused.
- this.solrStream = constructStream(sql);
- StreamContext context = new StreamContext();
- context.setSolrClientCache(this.connection.getSolrClientCache());
- this.solrStream.setStreamContext(context);
- this.solrStream.open();
- this.currentResultSet = new ResultSetImpl(this);
+ this.currentResultSet = new ResultSetImpl(this, constructStream(sql));
return this.currentResultSet;
} catch(Exception e) {
throw new SQLException(e);
@@ -83,10 +71,10 @@ class StatementImpl implements Statement {
try {
ZkStateReader zkStateReader = this.connection.getClient().getZkStateReader();
ClusterState clusterState = zkStateReader.getClusterState();
- Collection<Slice> slices = clusterState.getActiveSlices(this.connection.getCatalog());
+ Collection<Slice> slices = clusterState.getActiveSlices(this.connection.getSchema());
if(slices == null) {
- throw new Exception("Collection not found:"+this.connection.getCatalog());
+ throw new Exception("Collection not found:"+this.connection.getSchema());
}
List<Replica> shuffler = new ArrayList<>();
@@ -126,13 +114,10 @@ class StatementImpl implements Statement {
return;
}
- try {
- if(this.solrStream != null) {
- this.solrStream.close();
- }
- this.closed = true;
- } catch (Exception e) {
- throw new SQLException(e);
+ this.closed = true;
+
+ if(this.currentResultSet != null) {
+ this.currentResultSet.close();
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/edf66598/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
index e93cf10..7f9e98f 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
@@ -378,7 +378,8 @@ public class JdbcTest extends AbstractFullDistribZkTestBase {
private void testJDBCMethods(String collection, String connectionString, Properties properties, String sql) throws Exception {
try (Connection con = DriverManager.getConnection(connectionString, properties)) {
assertTrue(con.isValid(DEFAULT_CONNECTION_TIMEOUT));
- assertEquals(collection, con.getCatalog());
+ assertEquals(zkServer.getZkAddress(), con.getCatalog());
+ assertEquals(collection, con.getSchema());
DatabaseMetaData databaseMetaData = con.getMetaData();
assertNotNull(databaseMetaData);
@@ -386,6 +387,12 @@ public class JdbcTest extends AbstractFullDistribZkTestBase {
assertEquals(con, databaseMetaData.getConnection());
assertEquals(connectionString, databaseMetaData.getURL());
+ try(ResultSet rs = databaseMetaData.getCatalogs()) {
+ assertTrue(rs.next());
+ assertEquals(zkServer.getZkAddress(), rs.getString("TABLE_CAT"));
+ assertFalse(rs.next());
+ }
+
assertNull(con.getWarnings());
con.clearWarnings();
assertNull(con.getWarnings());
[20/21] lucene-solr git commit: SOLR-8625: SQL: Wrong error msg if
stmt param is missing
Posted by cp...@apache.org.
SOLR-8625: SQL: Wrong error msg if stmt param is missing
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c136bd7d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c136bd7d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c136bd7d
Branch: refs/heads/master-solr-8621
Commit: c136bd7dc737096843ff538e172762026b895f5b
Parents: edf6659
Author: jbernste <jb...@apache.org>
Authored: Mon Feb 1 16:52:01 2016 -0500
Committer: jbernste <jb...@apache.org>
Committed: Mon Feb 1 16:52:01 2016 -0500
----------------------------------------------------------------------
solr/core/src/java/org/apache/solr/handler/SQLHandler.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c136bd7d/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
index 7aa8ce5..ec0ec77 100644
--- a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
@@ -109,7 +109,7 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware {
try {
if(sql == null) {
- throw new Exception("sql parameter cannot be null");
+ throw new Exception("stmt parameter cannot be null");
}
context.setSolrClientCache(StreamHandler.clientCache);