You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by kr...@apache.org on 2016/10/27 20:09:05 UTC
[01/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9518: Kerberos
Delegation Tokens don't work without a chrooted ZK
Repository: lucene-solr
Updated Branches:
refs/heads/jira/solr-8593 285bbf012 -> 1cd8da986
SOLR-9518: Kerberos Delegation Tokens don't work without a chrooted ZK
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9b49c72d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9b49c72d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9b49c72d
Branch: refs/heads/jira/solr-8593
Commit: 9b49c72dbc4d27a3160b34b5e38e095ca85daa6f
Parents: 0ec1f22
Author: Noble Paul <no...@apache.org>
Authored: Mon Oct 24 13:06:40 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Mon Oct 24 13:06:40 2016 +0530
----------------------------------------------------------------------
solr/CHANGES.txt | 2 ++
.../solr/security/DelegationTokenKerberosFilter.java | 11 ++++++++---
.../java/org/apache/solr/security/KerberosPlugin.java | 9 +++++----
3 files changed, 15 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9b49c72d/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index e1c3971..92a994f 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -200,6 +200,8 @@ Bug Fixes
* SOLR-9325: solr.log is now written to $SOLR_LOGS_DIR without changing log4j.properties (janhoy)
+* SOLR-9518: Kerberos Delegation Tokens don't work without a chrooted ZK (Ishan Chattopadhyaya,via noble)
+
Optimizations
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9b49c72d/solr/core/src/java/org/apache/solr/security/DelegationTokenKerberosFilter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/DelegationTokenKerberosFilter.java b/solr/core/src/java/org/apache/solr/security/DelegationTokenKerberosFilter.java
index ca27861..421de52 100644
--- a/solr/core/src/java/org/apache/solr/security/DelegationTokenKerberosFilter.java
+++ b/solr/core/src/java/org/apache/solr/security/DelegationTokenKerberosFilter.java
@@ -50,6 +50,11 @@ import org.apache.zookeeper.data.ACL;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+/**
+ * This is an authentication filter based on Hadoop's {@link DelegationTokenAuthenticationFilter}.
+ * The Kerberos plugin can be configured to use delegation tokens, which allow an
+ * application to reuse the authentication of an end-user or another application.
+ */
public class DelegationTokenKerberosFilter extends DelegationTokenAuthenticationFilter {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -149,10 +154,10 @@ public class DelegationTokenKerberosFilter extends DelegationTokenAuthentication
throw new IllegalArgumentException("zkClient required");
}
String zkHost = zkClient.getZkServerAddress();
- String zkChroot = zkHost.substring(zkHost.indexOf("/"));
- zkChroot = zkChroot.startsWith("/") ? zkChroot.substring(1) : zkChroot;
+ String zkChroot = zkHost.contains("/")? zkHost.substring(zkHost.indexOf("/")): "";
String zkNamespace = zkChroot + SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH;
- String zkConnectionString = zkHost.substring(0, zkHost.indexOf("/"));
+ zkNamespace = zkNamespace.startsWith("/") ? zkNamespace.substring(1) : zkNamespace;
+ String zkConnectionString = zkHost.contains("/")? zkHost.substring(0, zkHost.indexOf("/")): zkHost;
SolrZkToCuratorCredentialsACLs curatorToSolrZk = new SolrZkToCuratorCredentialsACLs(zkClient);
final int connectionTimeoutMs = 30000; // this value is currently hard coded, see SOLR-7561.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9b49c72d/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java b/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java
index d4a2823..3655ac9 100644
--- a/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java
@@ -142,12 +142,13 @@ public class KerberosPlugin extends AuthenticationPlugin implements HttpClientBu
String zkHost = controller.getZkServerAddress();
putParam(params, "token.validity", DELEGATION_TOKEN_VALIDITY, "36000");
params.put("zk-dt-secret-manager.enable", "true");
+
+ String chrootPath = zkHost.contains("/")? zkHost.substring(zkHost.indexOf("/")): "";
+ String znodeWorkingPath = chrootPath + SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH + "/zkdtsm";
// Note - Curator complains if the znodeWorkingPath starts with /
- String chrootPath = zkHost.substring(zkHost.indexOf("/"));
- String relativePath = chrootPath.startsWith("/") ? chrootPath.substring(1) : chrootPath;
+ znodeWorkingPath = znodeWorkingPath.startsWith("/")? znodeWorkingPath.substring(1): znodeWorkingPath;
putParam(params, "zk-dt-secret-manager.znodeWorkingPath",
- DELEGATION_TOKEN_SECRET_MANAGER_ZNODE_WORKING_PATH,
- relativePath + SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH + "/zkdtsm");
+ DELEGATION_TOKEN_SECRET_MANAGER_ZNODE_WORKING_PATH, znodeWorkingPath);
putParam(params, "signer.secret.provider.zookeeper.path",
DELEGATION_TOKEN_SECRET_PROVIDER_ZK_PATH, "/token");
// ensure krb5 is setup properly before running curator
[07/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-7506: Roll over GC
logs by default via bin/solr scripts
Posted by kr...@apache.org.
SOLR-7506: Roll over GC logs by default via bin/solr scripts
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ef573746
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ef573746
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ef573746
Branch: refs/heads/jira/solr-8593
Commit: ef5737466e4597c21c80b167f1db295c081578d4
Parents: 61e180b
Author: Jan H�ydahl <ja...@apache.org>
Authored: Mon Oct 24 14:22:24 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Mon Oct 24 14:22:24 2016 +0200
----------------------------------------------------------------------
solr/CHANGES.txt | 1 +
solr/bin/solr | 5 +++--
solr/bin/solr.cmd | 8 ++++----
solr/core/src/java/org/apache/solr/util/SolrCLI.java | 4 ++--
solr/core/src/test/org/apache/solr/util/UtilsToolTest.java | 6 +++++-
5 files changed, 15 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ef573746/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 04d4d77..e223b4d 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -234,6 +234,7 @@ Optimizations
* SOLR-9506: cache IndexFingerprint for each segment (Pushkar Raste, yonik, noble)
+* SOLR-7506: Roll over GC logs by default via bin/solr scripts (shalin, janhoy)
Other Changes
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ef573746/solr/bin/solr
----------------------------------------------------------------------
diff --git a/solr/bin/solr b/solr/bin/solr
index d2936de..9d55e0a 100755
--- a/solr/bin/solr
+++ b/solr/bin/solr
@@ -1411,13 +1411,14 @@ if [ -z ${GC_LOG_OPTS+x} ]; then
else
GC_LOG_OPTS=($GC_LOG_OPTS)
fi
-# if verbose gc logging enabled, setup the location of the log file
+
+# if verbose gc logging enabled, setup the location of the log file and rotation
if [ "$GC_LOG_OPTS" != "" ]; then
gc_log_flag="-Xloggc"
if [ "$JAVA_VENDOR" == "IBM J9" ]; then
gc_log_flag="-Xverbosegclog"
fi
- GC_LOG_OPTS+=("$gc_log_flag:$SOLR_LOGS_DIR/solr_gc.log")
+ GC_LOG_OPTS+=("$gc_log_flag:$SOLR_LOGS_DIR/solr_gc.log" -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=9 -XX:GCLogFileSize=20M)
fi
# If ZK_HOST is defined, the assume SolrCloud mode
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ef573746/solr/bin/solr.cmd
----------------------------------------------------------------------
diff --git a/solr/bin/solr.cmd b/solr/bin/solr.cmd
index 317a789..4ab188f 100644
--- a/solr/bin/solr.cmd
+++ b/solr/bin/solr.cmd
@@ -1013,23 +1013,23 @@ IF NOT EXIST "%SOLR_SERVER_DIR%\tmp" (
)
IF "%JAVA_VENDOR%" == "IBM J9" (
- set "GCLOG_OPT=-Xverbosegclog"
+ set GCLOG_OPT="-Xverbosegclog:!SOLR_LOGS_DIR!\solr_gc.log" -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=9 -XX:GCLogFileSize=20M
) else (
- set "GCLOG_OPT=-Xloggc"
+ set GCLOG_OPT="-Xloggc:!SOLR_LOGS_DIR!\solr_gc.log" -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=9 -XX:GCLogFileSize=20M
)
IF "%FG%"=="1" (
REM run solr in the foreground
title "Solr-%SOLR_PORT%"
echo %SOLR_PORT%>"%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port
- "%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT%:"!SOLR_LOGS_DIR!/solr_gc.log" ^
+ "%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT% ^
-Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^
-Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" ^
-Djetty.host=%SOLR_JETTY_HOST% -Djetty.port=%SOLR_PORT% -Djetty.home="%SOLR_SERVER_DIR%" ^
-Djava.io.tmpdir="%SOLR_SERVER_DIR%\tmp" -jar start.jar "%SOLR_JETTY_CONFIG%"
) ELSE (
START /B "Solr-%SOLR_PORT%" /D "%SOLR_SERVER_DIR%" ^
- "%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT%:"!SOLR_LOGS_DIR!/solr_gc.log" ^
+ "%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT% ^
-Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^
-Dsolr.log.muteconsole ^
-Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" ^
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ef573746/solr/core/src/java/org/apache/solr/util/SolrCLI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index 76e5ee9..ebaeda8 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -3444,13 +3444,13 @@ public class SolrCLI {
Files.createDirectories(archivePath);
}
List<Path> archived = Files.find(archivePath, 1, (f, a)
- -> a.isRegularFile() && String.valueOf(f.getFileName()).startsWith("solr_gc_"))
+ -> a.isRegularFile() && String.valueOf(f.getFileName()).matches("^solr_gc[_.].+"))
.collect(Collectors.toList());
for (Path p : archived) {
Files.delete(p);
}
List<Path> files = Files.find(logsPath, 1, (f, a)
- -> a.isRegularFile() && String.valueOf(f.getFileName()).startsWith("solr_gc_"))
+ -> a.isRegularFile() && String.valueOf(f.getFileName()).matches("^solr_gc[_.].+"))
.collect(Collectors.toList());
if (files.size() > 0) {
out("Archiving " + files.size() + " old GC log files to " + archivePath);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ef573746/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java b/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
index 6b2d31c..0ca65ed 100644
--- a/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
+++ b/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
@@ -55,6 +55,10 @@ public class UtilsToolTest extends SolrTestCaseJ4 {
"solr_log_20160304",
"solr-8983-console.log",
"solr_gc_log_20160102",
+ "solr_gcnotremove",
+ "solr_gc.log",
+ "solr_gc.log.0",
+ "solr_gc.log.0.current",
"solr_gc_log_2");
@Before
@@ -136,7 +140,7 @@ public class UtilsToolTest extends SolrTestCaseJ4 {
String[] args = {"utils", "-archive_gc_logs", "-l", dir.toString()};
assertEquals(files.size(), fileCount());
assertEquals(0, runTool(args));
- assertEquals(files.size()-2, fileCount());
+ assertEquals(files.size()-5, fileCount());
assertFalse(listFiles().contains("solr_gc_log_2"));
assertTrue(Files.exists(dir.resolve("archived").resolve("solr_gc_log_2")));
assertEquals(0, runTool(args));
[23/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9579: fix intellij
compilation: add lucene core dependency to the langid contrib
Posted by kr...@apache.org.
SOLR-9579: fix intellij compilation: add lucene core dependency to the langid contrib
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/941c5e92
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/941c5e92
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/941c5e92
Branch: refs/heads/jira/solr-8593
Commit: 941c5e92ba6ff76e913746caf68e05b563983f17
Parents: 9303112
Author: Steve Rowe <sa...@apache.org>
Authored: Tue Oct 25 17:27:37 2016 -0400
Committer: Steve Rowe <sa...@apache.org>
Committed: Tue Oct 25 17:30:25 2016 -0400
----------------------------------------------------------------------
dev-tools/idea/solr/contrib/langid/langid.iml | 1 +
1 file changed, 1 insertion(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/941c5e92/dev-tools/idea/solr/contrib/langid/langid.iml
----------------------------------------------------------------------
diff --git a/dev-tools/idea/solr/contrib/langid/langid.iml b/dev-tools/idea/solr/contrib/langid/langid.iml
index 1fa1e5d..28223bd 100644
--- a/dev-tools/idea/solr/contrib/langid/langid.iml
+++ b/dev-tools/idea/solr/contrib/langid/langid.iml
@@ -28,6 +28,7 @@
</orderEntry>
<orderEntry type="module" scope="TEST" module-name="lucene-test-framework" />
<orderEntry type="module" scope="TEST" module-name="solr-test-framework" />
+ <orderEntry type="module" module-name="lucene-core" />
<orderEntry type="module" module-name="solr-core" />
<orderEntry type="module" module-name="solrj" />
</component>
[21/50] [abbrv] lucene-solr:jira/solr-8593: Merge remote-tracking
branch 'origin/master'
Posted by kr...@apache.org.
Merge remote-tracking branch 'origin/master'
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a916877f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a916877f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a916877f
Branch: refs/heads/jira/solr-8593
Commit: a916877f3877e416f26a4b5f9c8914ccca649766
Parents: 34ad857 c15c8af
Author: Noble Paul <no...@apache.org>
Authored: Wed Oct 26 02:04:41 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Wed Oct 26 02:04:41 2016 +0530
----------------------------------------------------------------------
lucene/CHANGES.txt | 7 +
.../LegacySortedNumericDocValuesWrapper.java | 1 +
.../DefaultSortedSetDocValuesReaderState.java | 3 +-
.../SortedSetDocValuesFacetCounts.java | 124 ++++++++++++---
.../facet/taxonomy/FastTaxonomyFacetCounts.java | 49 ++++++
.../lucene/facet/taxonomy/TaxonomyFacets.java | 4 +-
.../sortedset/TestSortedSetDocValuesFacets.java | 25 +--
.../facet/taxonomy/TestTaxonomyFacetCounts.java | 84 ++++------
.../highlight/WeightedSpanTermExtractor.java | 13 +-
.../search/highlight/HighlighterTest.java | 17 ++
solr/CHANGES.txt | 27 +++-
solr/bin/solr | 13 +-
solr/bin/solr.cmd | 8 +-
solr/bin/solr.in.sh | 4 +-
.../backup/repository/HdfsBackupRepository.java | 9 ++
.../apache/solr/handler/OldBackupDirectory.java | 2 +-
.../org/apache/solr/request/IntervalFacets.java | 12 +-
.../apache/solr/search/facet/FacetField.java | 1 +
.../solr/search/facet/FacetFieldMerger.java | 8 +-
.../solr/search/facet/FacetFieldProcessor.java | 22 ++-
.../apache/solr/search/facet/FacetRequest.java | 1 +
.../TemplateUpdateProcessorFactory.java | 10 +-
.../src/java/org/apache/solr/util/SolrCLI.java | 8 +-
.../apache/solr/cloud/TestCloudRecovery.java | 154 +++++++++++++++++++
.../TestLeaderRecoverFromLogOnStartup.java | 77 ----------
.../solr/handler/TestHdfsBackupRestoreCore.java | 11 ++
.../solr/request/TestIntervalFaceting.java | 7 +-
.../solr/search/facet/TestJsonFacets.java | 37 +++++
.../org/apache/solr/util/UtilsToolTest.java | 6 +-
29 files changed, 536 insertions(+), 208 deletions(-)
----------------------------------------------------------------------
[39/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9654: tests:
temporarily disable overrequest code
Posted by kr...@apache.org.
SOLR-9654: tests: temporarily disable overrequest code
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e4483607
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e4483607
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e4483607
Branch: refs/heads/jira/solr-8593
Commit: e4483607a58c68df6a8ea0c7cb107bf2f8707a45
Parents: 3d21029
Author: yonik <yo...@apache.org>
Authored: Wed Oct 26 17:53:00 2016 -0400
Committer: yonik <yo...@apache.org>
Committed: Wed Oct 26 17:53:00 2016 -0400
----------------------------------------------------------------------
.../src/test/org/apache/solr/search/facet/TestJsonFacets.java | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e4483607/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
index 1c1a343..c8ed0e6 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
@@ -1147,7 +1147,7 @@ public class TestJsonFacets extends SolrTestCaseHS {
);
-
+/*
if (!client.local()) {
client.testJQ(params(p, "q", "*:*"
, "json.facet", "{" +
@@ -1182,7 +1182,7 @@ public class TestJsonFacets extends SolrTestCaseHS {
"}"
);
}
-
+*/
}
[20/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-7604: add testcase
to verify the schema of .system collection
Posted by kr...@apache.org.
SOLR-7604: add testcase to verify the schema of .system collection
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/34ad8577
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/34ad8577
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/34ad8577
Branch: refs/heads/jira/solr-8593
Commit: 34ad8577b6fac0e48cc1885f2fe40b0abf60bd79
Parents: c9de11d
Author: Noble Paul <no...@apache.org>
Authored: Wed Oct 26 02:04:20 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Wed Oct 26 02:04:20 2016 +0530
----------------------------------------------------------------------
.../src/test/org/apache/solr/handler/TestBlobHandler.java | 10 ++++++++++
1 file changed, 10 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/34ad8577/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java b/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java
index 896e4de..2880e8f 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java
@@ -28,6 +28,8 @@ import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.GenericSolrRequest;
+import org.apache.solr.client.solrj.request.schema.SchemaRequest;
import org.apache.solr.client.solrj.response.CollectionAdminResponse;
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
import org.apache.solr.common.cloud.DocCollection;
@@ -81,6 +83,14 @@ public class TestBlobHandler extends AbstractFullDistribZkTestBase {
"requestHandler",
"/blob",
"class")));
+ map = TestSolrConfigHandlerConcurrent.getAsMap(baseUrl + "/.system/schema/fields/blob", cloudClient);
+ assertNotNull(map);
+ assertEquals("blob", getObjectByPath(map, true, Arrays.asList(
+ "field",
+ "name")));
+ assertEquals("bytes", getObjectByPath(map, true, Arrays.asList(
+ "field",
+ "type")));
byte[] bytarr = new byte[1024];
for (int i = 0; i < bytarr.length; i++) bytarr[i] = (byte) (i % 127);
[26/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-2039: Multivalued
fields with dynamic names does not work properly with DIH
Posted by kr...@apache.org.
SOLR-2039: Multivalued fields with dynamic names does not work properly with DIH
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b8d96473
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b8d96473
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b8d96473
Branch: refs/heads/jira/solr-8593
Commit: b8d9647307c5559706aeec3aad32c2e416188979
Parents: 62bc90d
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Wed Oct 26 11:02:10 2016 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Wed Oct 26 11:02:10 2016 +0530
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +++
.../solr/handler/dataimport/DocBuilder.java | 2 ++
.../handler/dataimport/TestDocBuilder2.java | 23 ++++++++++++++++++++
3 files changed, 28 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b8d96473/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 506ad9a..9dfed73 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -220,6 +220,9 @@ Bug Fixes
* SOLR-9536: OldBackupDirectory timestamp field needs to be initialized to avoid NPE.
(Hrishikesh Gadre, hossman via Mark Miller)
+
+* SOLR-2039: Multivalued fields with dynamic names does not work properly with DIH.
+ (K A, ruslan.shv, Cao Manh Dat via shalin)
Optimizations
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b8d96473/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
index f9ccfb6..c80d275 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
@@ -501,7 +501,9 @@ public class DocBuilder {
doc.addChildDocument(childDoc);
} else {
handleSpecialCommands(arow, doc);
+ vr.addNamespace(epw.getEntity().getName(), arow);
addFields(epw.getEntity(), doc, arow, vr);
+ vr.removeNamespace(epw.getEntity().getName());
}
}
if (epw.getEntity().getChildren() != null) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b8d96473/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder2.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder2.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder2.java
index 40ee2b9..32a0d4a 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder2.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder2.java
@@ -117,6 +117,20 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
@Test
@SuppressWarnings("unchecked")
+ public void testDynamicFieldNames() throws Exception {
+ List rows = new ArrayList();
+ rows.add(createMap("mypk", "101", "text", "ApacheSolr"));
+ MockDataSource.setIterator("select * from x", rows.iterator());
+
+ LocalSolrQueryRequest request = lrf.makeRequest("command", "full-import",
+ "debug", "on", "clean", "true", "commit", "true",
+ "dataConfig", dataConfigWithDynamicFieldNames);
+ h.query("/dataimport", request);
+ assertQ(req("id:101"), "//*[@numFound='1']", "//*[@name='101_s']");
+ }
+
+ @Test
+ @SuppressWarnings("unchecked")
public void testRequestParamsAsFieldName() throws Exception {
List rows = new ArrayList();
rows.add(createMap("mypk", "101", "text", "ApacheSolr"));
@@ -398,6 +412,15 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
" </document>\n" +
"</dataConfig>";
+ private final String dataConfigWithDynamicFieldNames = "<dataConfig><dataSource type=\"MockDataSource\"/>\n" +
+ " <document>\n" +
+ " <entity name=\"books\" query=\"select * from x\">\n" +
+ " <field column=\"mypk\" name=\"id\" />\n" +
+ " <field column=\"text\" name=\"${books.mypk}_s\" />\n" +
+ " </entity>\n" +
+ " </document>\n" +
+ "</dataConfig>";
+
private final String dataConfigFileList = "<dataConfig>\n" +
"\t<document>\n" +
"\t\t<entity name=\"x\" processor=\"FileListEntityProcessor\" \n" +
[28/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9481: Add info-level
log "Initializing authentication plugin: " Move Sha256AuthProv
warning "No users configured yet" to debug level, as this is quite normal
Posted by kr...@apache.org.
SOLR-9481: Add info-level log "Initializing authentication plugin: <classname>"
Move Sha256AuthProv warning "No users configured yet" to debug level, as this is quite normal
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b69c5d9f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b69c5d9f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b69c5d9f
Branch: refs/heads/jira/solr-8593
Commit: b69c5d9f27aea722401674ed72b876da4dbdb7f4
Parents: 768c7e2
Author: Jan H�ydahl <ja...@apache.org>
Authored: Wed Oct 26 09:22:55 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Wed Oct 26 09:22:55 2016 +0200
----------------------------------------------------------------------
solr/core/src/java/org/apache/solr/core/CoreContainer.java | 1 +
.../org/apache/solr/security/Sha256AuthenticationProvider.java | 2 +-
2 files changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b69c5d9f/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index e3d577e..e641b2c 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -308,6 +308,7 @@ public class CoreContainer {
// Initialize the plugin
if (pluginClassName != null) {
+ log.info("Initializing authentication plugin: " + pluginClassName);
authenticationPlugin = new SecurityPluginHolder<>(readVersion(authenticationConfig),
getResourceLoader().newInstance(pluginClassName,
AuthenticationPlugin.class,
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b69c5d9f/solr/core/src/java/org/apache/solr/security/Sha256AuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/Sha256AuthenticationProvider.java b/solr/core/src/java/org/apache/solr/security/Sha256AuthenticationProvider.java
index 545792f..69664fd 100644
--- a/solr/core/src/java/org/apache/solr/security/Sha256AuthenticationProvider.java
+++ b/solr/core/src/java/org/apache/solr/security/Sha256AuthenticationProvider.java
@@ -64,7 +64,7 @@ public class Sha256AuthenticationProvider implements ConfigEditablePlugin, Basi
credentials = new LinkedHashMap<>();
Map<String,String> users = (Map<String,String>) pluginConfig.get("credentials");
if (users == null) {
- log.warn("No users configured yet");
+ log.debug("No users configured yet");
return;
}
for (Map.Entry<String, String> e : users.entrySet()) {
[49/50] [abbrv] lucene-solr:jira/solr-8593: LUCENE-7429:
AnalyzerWrapper can now wrap the normalization analysis chain too.
Posted by kr...@apache.org.
LUCENE-7429: AnalyzerWrapper can now wrap the normalization analysis chain too.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/af600480
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/af600480
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/af600480
Branch: refs/heads/jira/solr-8593
Commit: af60048097a83220aae135b09d209a0f2d4ba3c6
Parents: 2172f3e
Author: Adrien Grand <jp...@gmail.com>
Authored: Thu Oct 27 16:27:45 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Thu Oct 27 16:27:45 2016 +0200
----------------------------------------------------------------------
lucene/CHANGES.txt | 3 +
.../lucene/analysis/custom/CustomAnalyzer.java | 2 +-
.../lucene/collation/CollationKeyAnalyzer.java | 2 +-
.../org/apache/lucene/analysis/Analyzer.java | 9 +-
.../apache/lucene/analysis/AnalyzerWrapper.java | 50 ++++++++-
.../analysis/DelegatingAnalyzerWrapper.java | 14 ++-
.../analysis/TestDelegatingAnalyzerWrapper.java | 107 +++++++++++++++++++
.../lucene/analysis/MockBytesAnalyzer.java | 2 +-
.../apache/solr/analysis/TokenizerChain.java | 2 +-
9 files changed, 180 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index d574a8a..5a6601b 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -107,6 +107,9 @@ Bug Fixes
allTermsRequired is false and context filters are specified (Mike
McCandless)
+* LUCENE-7429: AnalyzerWrapper can now modify the normalization chain too and
+ DelegatingAnalyzerWrapper does the right thing automatically. (Adrien Grand)
+
Improvements
* LUCENE-7439: FuzzyQuery now matches all terms within the specified
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java
index b2de5e8..466642c 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java
@@ -131,7 +131,7 @@ public final class CustomAnalyzer extends Analyzer {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
- final Tokenizer tk = tokenizer.create(attributeFactory());
+ final Tokenizer tk = tokenizer.create(attributeFactory(fieldName));
TokenStream ts = tk;
for (final TokenFilterFactory filter : tokenFilters) {
ts = filter.create(ts);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java
index ea98731..4d0f039 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java
@@ -85,7 +85,7 @@ public final class CollationKeyAnalyzer extends Analyzer {
}
@Override
- protected AttributeFactory attributeFactory() {
+ protected AttributeFactory attributeFactory(String fieldName) {
return factory;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java b/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
index aa4b42d..3a5d41c 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
@@ -238,7 +238,7 @@ public abstract class Analyzer implements Closeable {
throw new IllegalStateException("Normalization threw an unexpected exeption", e);
}
- final AttributeFactory attributeFactory = attributeFactory();
+ final AttributeFactory attributeFactory = attributeFactory(fieldName);
try (TokenStream ts = normalize(fieldName,
new StringTokenStream(attributeFactory, filteredText, text.length()))) {
final TermToBytesRefAttribute termAtt = ts.addAttribute(TermToBytesRefAttribute.class);
@@ -286,9 +286,10 @@ public abstract class Analyzer implements Closeable {
/** Return the {@link AttributeFactory} to be used for
* {@link #tokenStream analysis} and
- * {@link #normalize(String, String) normalization}. The default
- * implementation returns {@link TokenStream#DEFAULT_TOKEN_ATTRIBUTE_FACTORY}. */
- protected AttributeFactory attributeFactory() {
+ * {@link #normalize(String, String) normalization} on the given
+ * {@code FieldName}. The default implementation returns
+ * {@link TokenStream#DEFAULT_TOKEN_ATTRIBUTE_FACTORY}. */
+ protected AttributeFactory attributeFactory(String fieldName) {
return TokenStream.DEFAULT_TOKEN_ATTRIBUTE_FACTORY;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/core/src/java/org/apache/lucene/analysis/AnalyzerWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/AnalyzerWrapper.java b/lucene/core/src/java/org/apache/lucene/analysis/AnalyzerWrapper.java
index 1e5640f..d23d004 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/AnalyzerWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/AnalyzerWrapper.java
@@ -19,6 +19,8 @@ package org.apache.lucene.analysis;
import java.io.Reader;
+import org.apache.lucene.util.AttributeFactory;
+
/**
* Extension to {@link Analyzer} suitable for Analyzers which wrap
* other Analyzers.
@@ -82,6 +84,22 @@ public abstract class AnalyzerWrapper extends Analyzer {
}
/**
+ * Wraps / alters the given TokenStream for normalization purposes, taken
+ * from the wrapped Analyzer, to form new components. It is through this
+ * method that new TokenFilters can be added by AnalyzerWrappers. By default,
+ * the given token stream are returned.
+ *
+ * @param fieldName
+ * Name of the field which is to be analyzed
+ * @param in
+ * TokenStream taken from the wrapped Analyzer
+ * @return Wrapped / altered TokenStreamComponents.
+ */
+ protected TokenStream wrapTokenStreamForNormalization(String fieldName, TokenStream in) {
+ return in;
+ }
+
+ /**
* Wraps / alters the given Reader. Through this method AnalyzerWrappers can
* implement {@link #initReader(String, Reader)}. By default, the given reader
* is returned.
@@ -95,13 +113,33 @@ public abstract class AnalyzerWrapper extends Analyzer {
protected Reader wrapReader(String fieldName, Reader reader) {
return reader;
}
-
+
+ /**
+ * Wraps / alters the given Reader. Through this method AnalyzerWrappers can
+ * implement {@link #initReaderForNormalization(String, Reader)}. By default,
+ * the given reader is returned.
+ *
+ * @param fieldName
+ * name of the field which is to be analyzed
+ * @param reader
+ * the reader to wrap
+ * @return the wrapped reader
+ */
+ protected Reader wrapReaderForNormalization(String fieldName, Reader reader) {
+ return reader;
+ }
+
@Override
protected final TokenStreamComponents createComponents(String fieldName) {
return wrapComponents(fieldName, getWrappedAnalyzer(fieldName).createComponents(fieldName));
}
@Override
+ protected final TokenStream normalize(String fieldName, TokenStream in) {
+ return wrapTokenStreamForNormalization(fieldName, getWrappedAnalyzer(fieldName).normalize(fieldName, in));
+ }
+
+ @Override
public int getPositionIncrementGap(String fieldName) {
return getWrappedAnalyzer(fieldName).getPositionIncrementGap(fieldName);
}
@@ -115,4 +153,14 @@ public abstract class AnalyzerWrapper extends Analyzer {
public final Reader initReader(String fieldName, Reader reader) {
return getWrappedAnalyzer(fieldName).initReader(fieldName, wrapReader(fieldName, reader));
}
+
+ @Override
+ protected final Reader initReaderForNormalization(String fieldName, Reader reader) {
+ return getWrappedAnalyzer(fieldName).initReaderForNormalization(fieldName, wrapReaderForNormalization(fieldName, reader));
+ }
+
+ @Override
+ protected final AttributeFactory attributeFactory(String fieldName) {
+ return getWrappedAnalyzer(fieldName).attributeFactory(fieldName);
+ }
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/core/src/java/org/apache/lucene/analysis/DelegatingAnalyzerWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/DelegatingAnalyzerWrapper.java b/lucene/core/src/java/org/apache/lucene/analysis/DelegatingAnalyzerWrapper.java
index 6f05d4d..edf5b2b 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/DelegatingAnalyzerWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/DelegatingAnalyzerWrapper.java
@@ -54,12 +54,22 @@ public abstract class DelegatingAnalyzerWrapper extends AnalyzerWrapper {
protected final TokenStreamComponents wrapComponents(String fieldName, TokenStreamComponents components) {
return super.wrapComponents(fieldName, components);
}
-
+
+ @Override
+ protected final TokenStream wrapTokenStreamForNormalization(String fieldName, TokenStream in) {
+ return super.wrapTokenStreamForNormalization(fieldName, in);
+ }
+
@Override
protected final Reader wrapReader(String fieldName, Reader reader) {
return super.wrapReader(fieldName, reader);
}
-
+
+ @Override
+ protected final Reader wrapReaderForNormalization(String fieldName, Reader reader) {
+ return super.wrapReaderForNormalization(fieldName, reader);
+ }
+
private static final class DelegatingReuseStrategy extends ReuseStrategy {
DelegatingAnalyzerWrapper wrapper;
private final ReuseStrategy fallbackStrategy;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/core/src/test/org/apache/lucene/analysis/TestDelegatingAnalyzerWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/analysis/TestDelegatingAnalyzerWrapper.java b/lucene/core/src/test/org/apache/lucene/analysis/TestDelegatingAnalyzerWrapper.java
new file mode 100644
index 0000000..1d6cf15
--- /dev/null
+++ b/lucene/core/src/test/org/apache/lucene/analysis/TestDelegatingAnalyzerWrapper.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.analysis;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.nio.charset.StandardCharsets;
+
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestDelegatingAnalyzerWrapper extends LuceneTestCase {
+
+ public void testDelegatesNormalization() {
+ Analyzer analyzer1 = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
+ DelegatingAnalyzerWrapper w1 = new DelegatingAnalyzerWrapper(Analyzer.GLOBAL_REUSE_STRATEGY) {
+ @Override
+ protected Analyzer getWrappedAnalyzer(String fieldName) {
+ return analyzer1;
+ }
+ };
+ assertEquals(new BytesRef("Ab C"), w1.normalize("foo", "Ab C"));
+
+ Analyzer analyzer2 = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, true);
+ DelegatingAnalyzerWrapper w2 = new DelegatingAnalyzerWrapper(Analyzer.GLOBAL_REUSE_STRATEGY) {
+ @Override
+ protected Analyzer getWrappedAnalyzer(String fieldName) {
+ return analyzer2;
+ }
+ };
+ assertEquals(new BytesRef("ab c"), w2.normalize("foo", "Ab C"));
+ }
+
+ public void testDelegatesAttributeFactory() throws Exception {
+ Analyzer analyzer1 = new MockBytesAnalyzer();
+ DelegatingAnalyzerWrapper w1 = new DelegatingAnalyzerWrapper(Analyzer.GLOBAL_REUSE_STRATEGY) {
+ @Override
+ protected Analyzer getWrappedAnalyzer(String fieldName) {
+ return analyzer1;
+ }
+ };
+ assertEquals(new BytesRef("Ab C".getBytes(StandardCharsets.UTF_16LE)), w1.normalize("foo", "Ab C"));
+ }
+
+ public void testDelegatesCharFilter() throws Exception {
+ Analyzer analyzer1 = new Analyzer() {
+ @Override
+ protected Reader initReaderForNormalization(String fieldName, Reader reader) {
+ return new DummyCharFilter(reader, 'b', 'z');
+ }
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ Tokenizer tokenizer = new MockTokenizer(attributeFactory(fieldName));
+ return new TokenStreamComponents(tokenizer);
+ }
+ };
+ DelegatingAnalyzerWrapper w1 = new DelegatingAnalyzerWrapper(Analyzer.GLOBAL_REUSE_STRATEGY) {
+ @Override
+ protected Analyzer getWrappedAnalyzer(String fieldName) {
+ return analyzer1;
+ }
+ };
+ assertEquals(new BytesRef("az c"), w1.normalize("foo", "ab c"));
+ }
+
+ private static class DummyCharFilter extends CharFilter {
+
+ private final char match, repl;
+
+ public DummyCharFilter(Reader input, char match, char repl) {
+ super(input);
+ this.match = match;
+ this.repl = repl;
+ }
+
+ @Override
+ protected int correct(int currentOff) {
+ return currentOff;
+ }
+
+ @Override
+ public int read(char[] cbuf, int off, int len) throws IOException {
+ final int read = input.read(cbuf, off, len);
+ for (int i = 0; i < read; ++i) {
+ if (cbuf[off+i] == match) {
+ cbuf[off+i] = repl;
+ }
+ }
+ return read;
+ }
+
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java b/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java
index b8cfc5b..4d51717 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java
@@ -30,7 +30,7 @@ public final class MockBytesAnalyzer extends Analyzer {
}
@Override
- protected AttributeFactory attributeFactory() {
+ protected AttributeFactory attributeFactory(String fieldName) {
return MockUTF16TermAttributeImpl.UTF16_TERM_ATTRIBUTE_FACTORY;
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java b/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java
index a5afbec..ab5458c 100644
--- a/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java
+++ b/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java
@@ -99,7 +99,7 @@ public final class TokenizerChain extends SolrAnalyzer {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
- Tokenizer tk = tokenizer.create(attributeFactory());
+ Tokenizer tk = tokenizer.create(attributeFactory(fieldName));
TokenStream ts = tk;
for (TokenFilterFactory filter : filters) {
ts = filter.create(ts);
[35/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9670: Support
SOLR_AUTHENTICATION_OPTS in solr.cmd
Posted by kr...@apache.org.
SOLR-9670: Support SOLR_AUTHENTICATION_OPTS in solr.cmd
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/feb1a5d3
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/feb1a5d3
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/feb1a5d3
Branch: refs/heads/jira/solr-8593
Commit: feb1a5d3e78106160a5df440aa15884a16132b4a
Parents: db43bfb
Author: Jan H�ydahl <ja...@apache.org>
Authored: Wed Oct 26 15:17:13 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Wed Oct 26 15:17:13 2016 +0200
----------------------------------------------------------------------
solr/CHANGES.txt | 2 ++
solr/bin/solr.cmd | 70 +++++++++++++++++++++++++++++++++--------------
solr/bin/solr.in.cmd | 4 +++
3 files changed, 55 insertions(+), 21 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/feb1a5d3/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index a952a5e..efacec4 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -166,6 +166,8 @@ New Features
* SOLR-2212: Add a factory class corresponding to Lucene's NoMergePolicy. (Lance Norskog, Cao Manh Dat via shalin)
+* SOLR-9670: Support SOLR_AUTHENTICATION_OPTS in solr.cmd (janhoy)
+
Bug Fixes
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/feb1a5d3/solr/bin/solr.cmd
----------------------------------------------------------------------
diff --git a/solr/bin/solr.cmd b/solr/bin/solr.cmd
index 4ab188f..262a6f8 100644
--- a/solr/bin/solr.cmd
+++ b/solr/bin/solr.cmd
@@ -56,6 +56,16 @@ IF DEFINED SOLR_SSL_KEY_STORE (
set SOLR_SSL_OPTS=
)
+REM Authentication options
+IF DEFINED SOLR_AUTHENTICATION_CLIENT_CONFIGURER (
+ echo WARNING: Found unsupported configuration variable SOLR_AUTHENTICATION_CLIENT_CONFIGURER
+ echo Please start using SOLR_AUTHENTICATION_CLIENT_BUILDER instead
+)
+IF DEFINED SOLR_AUTHENTICATION_CLIENT_BUILDER (
+ set AUTHC_CLIENT_BUILDER_ARG="-Dsolr.authentication.httpclient.builder=%SOLR_AUTHENTICATION_CLIENT_BUILDER%"
+)
+set "AUTHC_OPTS=%AUTHC_CLIENT_BUILDER_ARG% %SOLR_AUTHENTICATION_OPTS%"
+
REM Set the SOLR_TOOL_HOST variable for use when connecting to a running Solr instance
IF NOT "%SOLR_HOST%"=="" (
set "SOLR_TOOL_HOST=%SOLR_HOST%"
@@ -1038,7 +1048,7 @@ IF "%FG%"=="1" (
echo %SOLR_PORT%>"%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port
REM now wait to see Solr come online ...
- "%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI status -maxWaitSecs 30 -solr !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:%SOLR_PORT%/solr
@@ -1049,9 +1059,11 @@ goto done
:run_example
REM Run the requested example
-"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
- org.apache.solr.util.SolrCLI run_example -script "%SDIR%\solr.cmd" -e %EXAMPLE% -d "%SOLR_SERVER_DIR%" -urlScheme !SOLR_URL_SCHEME! !PASS_TO_RUN_EXAMPLE!
+ org.apache.solr.util.SolrCLI run_example -script "%SDIR%\solr.cmd" -e %EXAMPLE% -d "%SOLR_SERVER_DIR%" ^
+ -urlScheme !SOLR_URL_SCHEME! !PASS_TO_RUN_EXAMPLE!
REM End of run_example
goto done
@@ -1069,7 +1081,8 @@ for /f "usebackq" %%i in (`dir /b "%SOLR_TIP%\bin" ^| findstr /i "^solr-.*\.port
@echo.
set has_info=1
echo Found Solr process %%k running on port !SOME_SOLR_PORT!
- "%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+ "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI status -solr !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:!SOME_SOLR_PORT!/solr
@echo.
@@ -1108,13 +1121,15 @@ goto parse_healthcheck_args
:run_healthcheck
IF NOT DEFINED HEALTHCHECK_COLLECTION goto healthcheck_usage
IF NOT DEFINED HEALTHCHECK_ZK_HOST set "HEALTHCHECK_ZK_HOST=localhost:9983"
-"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI healthcheck -collection !HEALTHCHECK_COLLECTION! -zkHost !HEALTHCHECK_ZK_HOST!
goto done
:run_assert
-"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI %*
if errorlevel 1 (
@@ -1123,14 +1138,16 @@ if errorlevel 1 (
goto done
:get_version
-"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI version
goto done
:run_utils
set "TOOL_CMD=%~1"
-"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI utils -s "%DEFAULT_SERVER_DIR%" -l "%SOLR_LOGS_DIR%" %TOOL_CMD%
if errorlevel 1 (
@@ -1222,15 +1239,18 @@ if "!CREATE_PORT!"=="" (
)
if "%SCRIPT_CMD%"=="create_core" (
- "%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+ "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI create_core -name !CREATE_NAME! -solrUrl !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:!CREATE_PORT!/solr ^
-confdir !CREATE_CONFDIR! -configsetsDir "%SOLR_TIP%\server\solr\configsets"
) else (
- "%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
- -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
- org.apache.solr.util.SolrCLI create -name !CREATE_NAME! -shards !CREATE_NUM_SHARDS! -replicationFactor !CREATE_REPFACT! ^
- -confname !CREATE_CONFNAME! -confdir !CREATE_CONFDIR! -configsetsDir "%SOLR_TIP%\server\solr\configsets" -solrUrl !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:!CREATE_PORT!/solr
+ "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+ -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
+ org.apache.solr.util.SolrCLI create -name !CREATE_NAME! -shards !CREATE_NUM_SHARDS! -replicationFactor !CREATE_REPFACT! ^
+ -confname !CREATE_CONFNAME! -confdir !CREATE_CONFDIR! -configsetsDir "%SOLR_TIP%\server\solr\configsets" ^
+ -solrUrl !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:!CREATE_PORT!/solr
)
goto done
@@ -1293,7 +1313,8 @@ if "!DELETE_CONFIG!"=="" (
set DELETE_CONFIG=true
)
-"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI delete -name !DELETE_NAME! -deleteConfig !DELETE_CONFIG! ^
-solrUrl !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:!DELETE_PORT!/solr
@@ -1420,9 +1441,11 @@ IF "!ZK_OP!"=="upconfig" (
set ERROR_MSG="The -d option must be set for upconfig."
goto zk_short_usage
)
- "%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+ "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
- org.apache.solr.util.SolrCLI !ZK_OP! -confname !CONFIGSET_NAME! -confdir !CONFIGSET_DIR! -zkHost !ZK_HOST! -configsetsDir "%SOLR_TIP%/server/solr/configsets"
+ org.apache.solr.util.SolrCLI !ZK_OP! -confname !CONFIGSET_NAME! -confdir !CONFIGSET_DIR! -zkHost !ZK_HOST! ^
+ -configsetsDir "%SOLR_TIP%/server/solr/configsets"
) ELSE IF "!ZK_OP!"=="downconfig" (
IF "!CONFIGSET_NAME!"=="" (
set ERROR_MSG="-n option must be set for downconfig"
@@ -1432,7 +1455,8 @@ IF "!ZK_OP!"=="upconfig" (
set ERROR_MSG="The -d option must be set for downconfig."
goto zk_short_usage
)
- "%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+ "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI !ZK_OP! -confname !CONFIGSET_NAME! -confdir !CONFIGSET_DIR! -zkHost !ZK_HOST!
) ELSE IF "!ZK_OP!"=="cp" (
@@ -1450,7 +1474,8 @@ IF "!ZK_OP!"=="upconfig" (
goto zk_short_usage
)
)
- "%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+ "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI !ZK_OP! -zkHost !ZK_HOST! -src !ZK_SRC! -dst !ZK_DST! -recurse !ZK_RECURSE!
) ELSE IF "!ZK_OP!"=="mv" (
@@ -1462,7 +1487,8 @@ IF "!ZK_OP!"=="upconfig" (
set ERROR_MSG="<dest> must be specified for 'mv' command"
goto zk_short_usage
)
- "%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+ "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI !ZK_OP! -zkHost !ZK_HOST! -src !ZK_SRC! -dst !ZK_DST!
) ELSE IF "!ZK_OP!"=="rm" (
@@ -1470,7 +1496,8 @@ IF "!ZK_OP!"=="upconfig" (
set ERROR_MSG="Zookeeper path to remove must be specified when using the 'rm' command"
goto zk_short_usage
)
- "%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+ "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI !ZK_OP! -zkHost !ZK_HOST! -path !ZK_SRC! -recurse !ZK_RECURSE!
) ELSE IF "!ZK_OP!"=="ls" (
@@ -1478,7 +1505,8 @@ IF "!ZK_OP!"=="upconfig" (
set ERROR_MSG="Zookeeper path to remove must be specified when using the 'rm' command"
goto zk_short_usage
)
- "%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+ "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI !ZK_OP! -zkHost !ZK_HOST! -path !ZK_SRC! -recurse !ZK_RECURSE!
) ELSE (
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/feb1a5d3/solr/bin/solr.in.cmd
----------------------------------------------------------------------
diff --git a/solr/bin/solr.in.cmd b/solr/bin/solr.in.cmd
index 06da233..14f35e3 100644
--- a/solr/bin/solr.in.cmd
+++ b/solr/bin/solr.in.cmd
@@ -98,6 +98,10 @@ REM set SOLR_SSL_CLIENT_KEY_STORE_PASSWORD=
REM set SOLR_SSL_CLIENT_TRUST_STORE=
REM setSOLR_SSL_CLIENT_TRUST_STORE_PASSWORD=
+REM Settings for authentication
+REM set SOLR_AUTHENTICATION_CLIENT_BUILDER=
+REM set SOLR_AUTHENTICATION_OPTS="-Dbasicauth=solr:SolrRocks"
+
REM Settings for ZK ACL
REM set SOLR_ZK_CREDS_AND_ACLS=-DzkACLProvider=org.apache.solr.common.cloud.VMParamsAllAndReadonlyDigestZkACLProvider ^
REM -DzkCredentialsProvider=org.apache.solr.common.cloud.VMParamsSingleSetCredentialsDigestZkCredentialsProvider ^
[06/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9255: Rename
SOLR_AUTHENTICATION_CLIENT_CONFIGURER -> SOLR_AUTHENTICATION_CLIENT_BUILDER
Posted by kr...@apache.org.
SOLR-9255: Rename SOLR_AUTHENTICATION_CLIENT_CONFIGURER -> SOLR_AUTHENTICATION_CLIENT_BUILDER
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/61e180b7
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/61e180b7
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/61e180b7
Branch: refs/heads/jira/solr-8593
Commit: 61e180b7efa965edd4979b15ee56d946d50f8221
Parents: c9de11d
Author: Jan H�ydahl <ja...@apache.org>
Authored: Mon Oct 24 14:18:21 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Mon Oct 24 14:18:21 2016 +0200
----------------------------------------------------------------------
solr/CHANGES.txt | 5 ++++-
solr/bin/solr | 8 ++++++--
solr/bin/solr.in.sh | 4 ++--
solr/core/src/java/org/apache/solr/util/SolrCLI.java | 4 ++--
4 files changed, 14 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/61e180b7/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index f455002..04d4d77 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -25,7 +25,8 @@ Upgrading from Solr 6.x
SolrHttpClientBuilder rather than an HttpClientConfigurer.
* HttpClientUtil now allows configuring HttpClient instances via SolrHttpClientBuilder
- rather than an HttpClientConfigurer.
+ rather than an HttpClientConfigurer. Use of env variable SOLR_AUTHENTICATION_CLIENT_CONFIGURER
+ no longer works, please use SOLR_AUTHENTICATION_CLIENT_BUILDER
* SolrClient implementations now use their own internal configuration for socket timeouts,
connect timeouts, and allowing redirects rather than what is set as the default when
@@ -56,6 +57,8 @@ Optimizations
check on every request and move connection lifecycle management towards the client.
(Ryan Zezeski, Mark Miller, Shawn Heisey, Steve Davids)
+* SOLR-9255: Rename SOLR_AUTHENTICATION_CLIENT_CONFIGURER -> SOLR_AUTHENTICATION_CLIENT_BUILDER (janhoy)
+
* SOLR-9579: Make Solr's SchemaField implement Lucene's IndexableFieldType, removing the
creation of a Lucene FieldType every time a field is indexed. (John Call, yonik)
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/61e180b7/solr/bin/solr
----------------------------------------------------------------------
diff --git a/solr/bin/solr b/solr/bin/solr
index 6aa5709..d2936de 100755
--- a/solr/bin/solr
+++ b/solr/bin/solr
@@ -178,9 +178,13 @@ fi
# Authentication options
if [ "$SOLR_AUTHENTICATION_CLIENT_CONFIGURER" != "" ]; then
- AUTHC_CLIENT_CONFIGURER_ARG="-Dsolr.authentication.httpclient.configurer=$SOLR_AUTHENTICATION_CLIENT_CONFIGURER"
+ echo "WARNING: Found unsupported configuration variable SOLR_AUTHENTICATION_CLIENT_CONFIGURER"
+ echo " Please start using SOLR_AUTHENTICATION_CLIENT_BUILDER instead"
fi
-AUTHC_OPTS="$AUTHC_CLIENT_CONFIGURER_ARG $SOLR_AUTHENTICATION_OPTS"
+if [ "$SOLR_AUTHENTICATION_CLIENT_BUILDER" != "" ]; then
+ AUTHC_CLIENT_BUILDER_ARG="-Dsolr.authentication.httpclient.builder=$SOLR_AUTHENTICATION_CLIENT_BUILDER"
+fi
+AUTHC_OPTS="$AUTHC_CLIENT_BUILDER_ARG $SOLR_AUTHENTICATION_OPTS"
# Set the SOLR_TOOL_HOST variable for use when connecting to a running Solr instance
if [ "$SOLR_HOST" != "" ]; then
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/61e180b7/solr/bin/solr.in.sh
----------------------------------------------------------------------
diff --git a/solr/bin/solr.in.sh b/solr/bin/solr.in.sh
index 2fcaabb..40c59a6 100644
--- a/solr/bin/solr.in.sh
+++ b/solr/bin/solr.in.sh
@@ -105,8 +105,8 @@
#SOLR_SSL_CLIENT_TRUST_STORE_PASSWORD=
# Settings for authentication
-#SOLR_AUTHENTICATION_CLIENT_CONFIGURER=
-#SOLR_AUTHENTICATION_OPTS=
+#SOLR_AUTHENTICATION_CLIENT_BUILDER=
+#SOLR_AUTHENTICATION_OPTS="-Dbasicauth=solr:SolrRocks"
# Settings for ZK ACL
#SOLR_ZK_CREDS_AND_ACLS="-DzkACLProvider=org.apache.solr.common.cloud.VMParamsAllAndReadonlyDigestZkACLProvider \
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/61e180b7/solr/core/src/java/org/apache/solr/util/SolrCLI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index 8180c44..76e5ee9 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -281,10 +281,10 @@ public class SolrCLI {
Class c = Class.forName(builderClassName);
SolrHttpClientBuilder builder = (SolrHttpClientBuilder)c.newInstance();
HttpClientUtil.setHttpClientBuilder(builder);
- log.info("Set HttpClientConfigurer from: "+builderClassName);
+ log.info("Set SolrHttpClientBuilder from: "+builderClassName);
} catch (Exception ex) {
log.error(ex.getMessage());
- throw new RuntimeException("Error during loading of configurer '"+builderClassName+"'.", ex);
+ throw new RuntimeException("Error during loading of builder '"+builderClassName+"'.", ex);
}
}
[33/50] [abbrv] lucene-solr:jira/solr-8593: LUCENE-7489: Remove one
layer of abstraction in binary doc values and single-valued numerics.
Posted by kr...@apache.org.
LUCENE-7489: Remove one layer of abstraction in binary doc values and single-valued numerics.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/643429de
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/643429de
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/643429de
Branch: refs/heads/jira/solr-8593
Commit: 643429de6e162fd85d5100137d01ee29e4bb614a
Parents: 71c6518
Author: Adrien Grand <jp...@gmail.com>
Authored: Wed Oct 26 14:07:43 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Wed Oct 26 14:07:43 2016 +0200
----------------------------------------------------------------------
.../lucene70/Lucene70DocValuesProducer.java | 419 +++++++++++--------
1 file changed, 256 insertions(+), 163 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/643429de/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
index 19815ba..3f3e73f 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
@@ -340,88 +340,147 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
return getNumeric(entry);
}
- private NumericDocValues getNumeric(NumericEntry entry) throws IOException {
- if (entry.docsWithFieldOffset == -2) {
- // empty
- return DocValues.emptyNumeric();
- } else if (entry.docsWithFieldOffset == -1) {
- // dense
- final LongValues normValues = getNumericValues(entry);
- return new NumericDocValues() {
+ private static abstract class DenseNumericDocValues extends NumericDocValues {
- int doc = -1;
+ final int maxDoc;
+ int doc = -1;
- @Override
- public long longValue() throws IOException {
- return normValues.get(doc);
- }
+ DenseNumericDocValues(int maxDoc) {
+ this.maxDoc = maxDoc;
+ }
- @Override
- public int docID() {
- return doc;
- }
+ @Override
+ public int docID() {
+ return doc;
+ }
- @Override
- public int nextDoc() throws IOException {
- return advance(doc + 1);
- }
+ @Override
+ public int nextDoc() throws IOException {
+ return advance(doc + 1);
+ }
- @Override
- public int advance(int target) throws IOException {
- if (target >= maxDoc) {
- return doc = NO_MORE_DOCS;
- }
- return doc = target;
- }
+ @Override
+ public int advance(int target) throws IOException {
+ if (target >= maxDoc) {
+ return doc = NO_MORE_DOCS;
+ }
+ return doc = target;
+ }
- @Override
- public boolean advanceExact(int target) {
- doc = target;
- return true;
- }
+ @Override
+ public boolean advanceExact(int target) {
+ doc = target;
+ return true;
+ }
- @Override
- public long cost() {
- return maxDoc;
- }
+ @Override
+ public long cost() {
+ return maxDoc;
+ }
- };
- } else {
- // sparse
- final LongValues values = getNumericValues(entry);
- final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numValues);
- return new NumericDocValues() {
+ }
- @Override
- public int advance(int target) throws IOException {
- return disi.advance(target);
- }
+ private static abstract class SparseNumericDocValues extends NumericDocValues {
- @Override
- public boolean advanceExact(int target) throws IOException {
- return disi.advanceExact(target);
- }
+ final IndexedDISI disi;
- @Override
- public int nextDoc() throws IOException {
- return disi.nextDoc();
- }
+ SparseNumericDocValues(IndexedDISI disi) {
+ this.disi = disi;
+ }
- @Override
- public int docID() {
- return disi.docID();
- }
+ @Override
+ public int advance(int target) throws IOException {
+ return disi.advance(target);
+ }
- @Override
- public long cost() {
- return disi.cost();
- }
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ return disi.advanceExact(target);
+ }
- @Override
- public long longValue() throws IOException {
- return values.get(disi.index());
+ @Override
+ public int nextDoc() throws IOException {
+ return disi.nextDoc();
+ }
+
+ @Override
+ public int docID() {
+ return disi.docID();
+ }
+
+ @Override
+ public long cost() {
+ return disi.cost();
+ }
+ }
+
+ private NumericDocValues getNumeric(NumericEntry entry) throws IOException {
+ if (entry.docsWithFieldOffset == -2) {
+ // empty
+ return DocValues.emptyNumeric();
+ } else if (entry.docsWithFieldOffset == -1) {
+ // dense
+ if (entry.bitsPerValue == 0) {
+ return new DenseNumericDocValues(maxDoc) {
+ @Override
+ public long longValue() throws IOException {
+ return entry.minValue;
+ }
+ };
+ } else {
+ final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength);
+ final LongValues values = DirectReader.getInstance(slice, entry.bitsPerValue);
+ if (entry.table != null) {
+ final long[] table = entry.table;
+ return new DenseNumericDocValues(maxDoc) {
+ @Override
+ public long longValue() throws IOException {
+ return table[(int) values.get(doc)];
+ }
+ };
+ } else {
+ final long mul = entry.gcd;
+ final long delta = entry.minValue;
+ return new DenseNumericDocValues(maxDoc) {
+ @Override
+ public long longValue() throws IOException {
+ return mul * values.get(doc) + delta;
+ }
+ };
}
- };
+ }
+ } else {
+ // sparse
+ final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numValues);
+ if (entry.bitsPerValue == 0) {
+ return new SparseNumericDocValues(disi) {
+ @Override
+ public long longValue() throws IOException {
+ return entry.minValue;
+ }
+ };
+ } else {
+ final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength);
+ final LongValues values = DirectReader.getInstance(slice, entry.bitsPerValue);
+ if (entry.table != null) {
+ final long[] table = entry.table;
+ return new SparseNumericDocValues(disi) {
+ @Override
+ public long longValue() throws IOException {
+ return table[(int) values.get(disi.index())];
+ }
+ };
+ } else {
+ final long mul = entry.gcd;
+ final long delta = entry.minValue;
+ return new SparseNumericDocValues(disi) {
+ @Override
+ public long longValue() throws IOException {
+ return mul * values.get(disi.index()) + delta;
+ }
+ };
+ }
+ }
}
}
@@ -467,6 +526,79 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
}
}
+ private static abstract class DenseBinaryDocValues extends BinaryDocValues {
+
+ final int maxDoc;
+ int doc = -1;
+
+ DenseBinaryDocValues(int maxDoc) {
+ this.maxDoc = maxDoc;
+ }
+
+ @Override
+ public int nextDoc() throws IOException {
+ return advance(doc + 1);
+ }
+
+ @Override
+ public int docID() {
+ return doc;
+ }
+
+ @Override
+ public long cost() {
+ return maxDoc;
+ }
+
+ @Override
+ public int advance(int target) throws IOException {
+ if (target >= maxDoc) {
+ return doc = NO_MORE_DOCS;
+ }
+ return doc = target;
+ }
+
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ doc = target;
+ return true;
+ }
+ }
+
+ private static abstract class SparseBinaryDocValues extends BinaryDocValues {
+
+ final IndexedDISI disi;
+
+ SparseBinaryDocValues(IndexedDISI disi) {
+ this.disi = disi;
+ }
+
+ @Override
+ public int nextDoc() throws IOException {
+ return disi.nextDoc();
+ }
+
+ @Override
+ public int docID() {
+ return disi.docID();
+ }
+
+ @Override
+ public long cost() {
+ return disi.cost();
+ }
+
+ @Override
+ public int advance(int target) throws IOException {
+ return disi.advance(target);
+ }
+
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ return disi.advanceExact(target);
+ }
+ }
+
@Override
public BinaryDocValues getBinary(FieldInfo field) throws IOException {
BinaryEntry entry = binaries.get(field.name);
@@ -474,116 +606,77 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
return DocValues.emptyBinary();
}
- IndexInput bytesSlice = data.slice("fixed-binary", entry.dataOffset, entry.dataLength);
- BytesRefs bytesRefs;
- if (entry.minLength == entry.maxLength) {
- bytesRefs = new BytesRefs() {
- BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength);
- @Override
- public BytesRef get(int index) throws IOException {
- bytesSlice.seek((long) index * bytes.length);
- bytesSlice.readBytes(bytes.bytes, 0, bytes.length);
- return bytes;
- }
- };
- } else {
- final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength);
- final LongValues addresses = DirectMonotonicReader.getInstance(entry.addressesMeta, addressesData);
- bytesRefs = new BytesRefs() {
- BytesRef bytes = new BytesRef(entry.maxLength);
- @Override
- BytesRef get(int index) throws IOException {
- long startOffset = addresses.get(index);
- bytes.length = (int) (addresses.get(index + 1L) - startOffset);
- bytesSlice.seek(startOffset);
- bytesSlice.readBytes(bytes.bytes, 0, bytes.length);
- return bytes;
- }
- };
- }
+ final IndexInput bytesSlice = data.slice("fixed-binary", entry.dataOffset, entry.dataLength);
if (entry.docsWithFieldOffset == -1) {
// dense
- return new BinaryDocValues() {
-
- int doc = -1;
+ if (entry.minLength == entry.maxLength) {
+ // fixed length
+ final int length = entry.maxLength;
+ return new DenseBinaryDocValues(maxDoc) {
+ final BytesRef bytes = new BytesRef(new byte[length], 0, length);
- @Override
- public int nextDoc() throws IOException {
- return advance(doc + 1);
- }
-
- @Override
- public int docID() {
- return doc;
- }
-
- @Override
- public long cost() {
- return maxDoc;
- }
-
- @Override
- public int advance(int target) throws IOException {
- if (target >= maxDoc) {
- return doc = NO_MORE_DOCS;
+ @Override
+ public BytesRef binaryValue() throws IOException {
+ bytesSlice.seek((long) doc * length);
+ bytesSlice.readBytes(bytes.bytes, 0, length);
+ return bytes;
}
- return doc = target;
- }
-
- @Override
- public boolean advanceExact(int target) throws IOException {
- doc = target;
- return true;
- }
+ };
+ } else {
+ // variable length
+ final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength);
+ final LongValues addresses = DirectMonotonicReader.getInstance(entry.addressesMeta, addressesData);
+ return new DenseBinaryDocValues(maxDoc) {
+ final BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength);
- @Override
- public BytesRef binaryValue() throws IOException {
- return bytesRefs.get(doc);
- }
- };
+ @Override
+ public BytesRef binaryValue() throws IOException {
+ long startOffset = addresses.get(doc);
+ bytes.length = (int) (addresses.get(doc + 1L) - startOffset);
+ bytesSlice.seek(startOffset);
+ bytesSlice.readBytes(bytes.bytes, 0, bytes.length);
+ return bytes;
+ }
+ };
+ }
} else {
// sparse
final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField);
- return new BinaryDocValues() {
-
- @Override
- public int nextDoc() throws IOException {
- return disi.nextDoc();
- }
-
- @Override
- public int docID() {
- return disi.docID();
- }
-
- @Override
- public long cost() {
- return disi.cost();
- }
+ if (entry.minLength == entry.maxLength) {
+ // fixed length
+ final int length = entry.maxLength;
+ return new SparseBinaryDocValues(disi) {
+ final BytesRef bytes = new BytesRef(new byte[length], 0, length);
- @Override
- public int advance(int target) throws IOException {
- return disi.advance(target);
- }
-
- @Override
- public boolean advanceExact(int target) throws IOException {
- return disi.advanceExact(target);
- }
+ @Override
+ public BytesRef binaryValue() throws IOException {
+ bytesSlice.seek((long) disi.index() * length);
+ bytesSlice.readBytes(bytes.bytes, 0, length);
+ return bytes;
+ }
+ };
+ } else {
+ // variable length
+ final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength);
+ final LongValues addresses = DirectMonotonicReader.getInstance(entry.addressesMeta, addressesData);
+ return new SparseBinaryDocValues(disi) {
+ final BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength);
- @Override
- public BytesRef binaryValue() throws IOException {
- return bytesRefs.get(disi.index());
- }
- };
+ @Override
+ public BytesRef binaryValue() throws IOException {
+ final int index = disi.index();
+ long startOffset = addresses.get(index);
+ bytes.length = (int) (addresses.get(index + 1L) - startOffset);
+ bytesSlice.seek(startOffset);
+ bytesSlice.readBytes(bytes.bytes, 0, bytes.length);
+ return bytes;
+ }
+ };
+ }
}
}
- private static abstract class BytesRefs {
- abstract BytesRef get(int index) throws IOException;
- }
-
@Override
public SortedDocValues getSorted(FieldInfo field) throws IOException {
SortedEntry entry = sorted.get(field.name);
[22/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-7604: add testcase
to verify the schema of .system collection
Posted by kr...@apache.org.
SOLR-7604: add testcase to verify the schema of .system collection
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/93031129
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/93031129
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/93031129
Branch: refs/heads/jira/solr-8593
Commit: 9303112981527640f24968fb811c9ff71e1ae830
Parents: a916877
Author: Noble Paul <no...@apache.org>
Authored: Wed Oct 26 02:04:58 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Wed Oct 26 02:04:58 2016 +0530
----------------------------------------------------------------------
.../apache/solr/handler/TestBlobHandler.java | 20 +++++++++-----------
1 file changed, 9 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/93031129/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java b/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java
index 2880e8f..4fda926 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java
@@ -16,6 +16,15 @@
*/
package org.apache.solr.handler;
+import java.io.IOException;
+import java.io.StringReader;
+import java.lang.invoke.MethodHandles;
+import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
@@ -28,8 +37,6 @@ import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
-import org.apache.solr.client.solrj.request.GenericSolrRequest;
-import org.apache.solr.client.solrj.request.schema.SchemaRequest;
import org.apache.solr.client.solrj.response.CollectionAdminResponse;
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
import org.apache.solr.common.cloud.DocCollection;
@@ -45,15 +52,6 @@ import org.noggit.ObjectBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.IOException;
-import java.io.StringReader;
-import java.lang.invoke.MethodHandles;
-import java.nio.ByteBuffer;
-import java.nio.charset.StandardCharsets;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-
import static org.apache.solr.common.util.Utils.getObjectByPath;
public class TestBlobHandler extends AbstractFullDistribZkTestBase {
[25/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-5245: Add a test to
ensure that election contexts are keyed off both collection name and
coreNodeName so that killing a shard in one collection does not result in
leader election in a different collection.
Posted by kr...@apache.org.
SOLR-5245: Add a test to ensure that election contexts are keyed off both collection name and coreNodeName so that killing a shard in one collection does not result in leader election in a different collection.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/62bc90d7
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/62bc90d7
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/62bc90d7
Branch: refs/heads/jira/solr-8593
Commit: 62bc90d7d2d586fd587c7a133fff83e535892764
Parents: d25a618
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Wed Oct 26 09:49:47 2016 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Wed Oct 26 09:49:47 2016 +0530
----------------------------------------------------------------------
solr/CHANGES.txt | 4 +
.../cloud/LeaderElectionContextKeyTest.java | 114 +++++++++++++++++++
2 files changed, 118 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/62bc90d7/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 62c9d4a..506ad9a 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -354,6 +354,10 @@ Other Changes
* SOLR-4531: Add tests to ensure that recovery does not fail on corrupted tlogs.
(Simon Scofield, Cao Manh Dat via shalin)
+* SOLR-5245: Add a test to ensure that election contexts are keyed off both collection name and coreNodeName
+ so that killing a shard in one collection does not result in leader election in a different collection.
+ See SOLR-5243 for the related bug. (Cao Manh Dat via shalin)
+
================== 6.2.1 ==================
Bug Fixes
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/62bc90d7/solr/core/src/test/org/apache/solr/cloud/LeaderElectionContextKeyTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionContextKeyTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionContextKeyTest.java
new file mode 100644
index 0000000..728ea1b
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionContextKeyTest.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.CoreAdminRequest;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.zookeeper.KeeperException;
+import org.hamcrest.CoreMatchers;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class LeaderElectionContextKeyTest extends SolrCloudTestCase {
+
+ private static final String TEST_COLLECTION_1 = "testCollection1";
+ private static final String TEST_COLLECTION_2 = "testCollection2";
+
+ @BeforeClass
+ public static void setupCluster() throws Exception {
+ configureCluster(1)
+ .addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf"))
+ .configure();
+
+ CollectionAdminRequest
+ .createCollection("testCollection1", "config", 2, 1)
+ .setMaxShardsPerNode(1000)
+ .process(cluster.getSolrClient());
+ CollectionAdminRequest
+ .createCollection("testCollection2", "config", 2, 1)
+ .setMaxShardsPerNode(1000)
+ .process(cluster.getSolrClient());
+ AbstractDistribZkTestBase.waitForRecoveriesToFinish("testCollection1", cluster.getSolrClient().getZkStateReader(),
+ false, true, 30);
+ AbstractDistribZkTestBase.waitForRecoveriesToFinish("testCollection2", cluster.getSolrClient().getZkStateReader(),
+ false, true, 30);
+ }
+
+ @Test
+ public void test() throws KeeperException, InterruptedException, IOException, SolrServerException {
+ ZkStateReader stateReader = cluster.getSolrClient().getZkStateReader();
+ stateReader.forceUpdateCollection(TEST_COLLECTION_1);
+ List<Replica> replicasOfCollection1 = stateReader.getClusterState().getCollection(TEST_COLLECTION_1).getReplicas();
+ List<Replica> replicasOfCollection2 = stateReader.getClusterState().getCollection(TEST_COLLECTION_2).getReplicas();
+ Replica replica = findLeaderReplicaWithDuplicatedName(replicasOfCollection1, replicasOfCollection2);
+ assertNotNull(replica);
+
+ SolrClient shardLeaderClient = new HttpSolrClient.Builder(replica.get("base_url").toString()).build();
+ try {
+ assertEquals(1L, getElectionNodes(TEST_COLLECTION_1, "shard1", stateReader.getZkClient()).size());
+ List<String> collection2Shard1Nodes = getElectionNodes(TEST_COLLECTION_2, "shard1", stateReader.getZkClient());
+ List<String> collection2Shard2Nodes = getElectionNodes(TEST_COLLECTION_2, "shard2", stateReader.getZkClient());
+ CoreAdminRequest.unloadCore(replica.getCoreName(), shardLeaderClient);
+ // Waiting for leader election being kicked off
+ long timeout = System.nanoTime() + TimeUnit.NANOSECONDS.convert(60, TimeUnit.SECONDS);
+ boolean found = false;
+ while (System.nanoTime() < timeout) {
+ try {
+ found = getElectionNodes(TEST_COLLECTION_1, "shard1", stateReader.getZkClient()).size() == 0;
+ break;
+ } catch (KeeperException.NoNodeException nne) {
+ // ignore
+ }
+ }
+ assertTrue(found);
+ // There are no leader election was kicked off on testCollection2
+ assertThat(collection2Shard1Nodes, CoreMatchers.is(getElectionNodes(TEST_COLLECTION_2, "shard1", stateReader.getZkClient())));
+ assertThat(collection2Shard2Nodes, CoreMatchers.is(getElectionNodes(TEST_COLLECTION_2, "shard2", stateReader.getZkClient())));
+ } finally {
+ shardLeaderClient.close();
+ }
+ }
+
+ private Replica findLeaderReplicaWithDuplicatedName(List<Replica> replicas1, List<Replica> replicas2) {
+ for (Replica replica1 : replicas1) {
+ if (!replica1.containsKey("leader")) continue;
+ for (Replica replica2 : replicas2) {
+ if (replica1.getName().equals(replica2.getName())
+ && replica1.get("base_url").equals(replica2.get("base_url"))
+ && replica2.containsKey("leader")) {
+ return replica1;
+ }
+ }
+ }
+ return null;
+ }
+
+ private List<String> getElectionNodes(String collection, String shard, SolrZkClient client) throws KeeperException, InterruptedException {
+ return client.getChildren("/collections/"+collection+"/leader_elect/"+shard+LeaderElector.ELECTION_NODE, null, true);
+ }
+}
[32/50] [abbrv] lucene-solr:jira/solr-8593: LUCENE-7462: Fix buggy
advanceExact impl of empty binary doc values.
Posted by kr...@apache.org.
LUCENE-7462: Fix buggy advanceExact impl of empty binary doc values.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/71c65184
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/71c65184
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/71c65184
Branch: refs/heads/jira/solr-8593
Commit: 71c65184562499eba365d166fe3fabe0dbdc747b
Parents: 5394d29
Author: Adrien Grand <jp...@gmail.com>
Authored: Wed Oct 26 11:36:23 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Wed Oct 26 11:36:23 2016 +0200
----------------------------------------------------------------------
lucene/core/src/java/org/apache/lucene/index/DocValues.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71c65184/lucene/core/src/java/org/apache/lucene/index/DocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/DocValues.java b/lucene/core/src/java/org/apache/lucene/index/DocValues.java
index b25d484..700ae58 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DocValues.java
@@ -45,7 +45,7 @@ public final class DocValues {
@Override
public boolean advanceExact(int target) throws IOException {
doc = target;
- return true;
+ return false;
}
@Override
[31/50] [abbrv] lucene-solr:jira/solr-8593: LUCENE-7475: Remove one
layer of abstraction in the Lucene70 norms impl.
Posted by kr...@apache.org.
LUCENE-7475: Remove one layer of abstraction in the Lucene70 norms impl.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/5394d29f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/5394d29f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/5394d29f
Branch: refs/heads/jira/solr-8593
Commit: 5394d29fca8546936dc8227f23c6561d6b386832
Parents: ecfbe51
Author: Adrien Grand <jp...@gmail.com>
Authored: Wed Oct 26 10:39:01 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Wed Oct 26 10:39:01 2016 +0200
----------------------------------------------------------------------
.../codecs/lucene70/Lucene70NormsProducer.java | 231 +++++++++++--------
1 file changed, 133 insertions(+), 98 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5394d29f/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java
index c97f1c3..eb7c41a 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java
@@ -95,8 +95,78 @@ final class Lucene70NormsProducer extends NormsProducer {
long normsOffset;
}
- static abstract class LongValues {
- abstract long get(int index) throws IOException;
+ static abstract class DenseNormsIterator extends NumericDocValues {
+
+ final int maxDoc;
+ int doc = -1;
+
+ DenseNormsIterator(int maxDoc) {
+ this.maxDoc = maxDoc;
+ }
+
+ @Override
+ public int docID() {
+ return doc;
+ }
+
+ @Override
+ public int nextDoc() throws IOException {
+ return advance(doc + 1);
+ }
+
+ @Override
+ public int advance(int target) throws IOException {
+ if (target >= maxDoc) {
+ return doc = NO_MORE_DOCS;
+ }
+ return doc = target;
+ }
+
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ this.doc = target;
+ return true;
+ }
+
+ @Override
+ public long cost() {
+ return maxDoc;
+ }
+
+ }
+
+ static abstract class SparseNormsIterator extends NumericDocValues {
+
+ final IndexedDISI disi;
+
+ SparseNormsIterator(IndexedDISI disi) {
+ this.disi = disi;
+ }
+
+ @Override
+ public int docID() {
+ return disi.docID();
+ }
+
+ @Override
+ public int nextDoc() throws IOException {
+ return disi.nextDoc();
+ }
+
+ @Override
+ public int advance(int target) throws IOException {
+ return disi.advance(target);
+ }
+
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ return disi.advanceExact(target);
+ }
+
+ @Override
+ public long cost() {
+ return disi.cost();
+ }
}
private void readFields(IndexInput meta, FieldInfos infos) throws IOException {
@@ -131,122 +201,87 @@ final class Lucene70NormsProducer extends NormsProducer {
return DocValues.emptyNumeric();
} else if (entry.docsWithFieldOffset == -1) {
// dense
- final LongValues normValues = getNormValues(entry);
- return new NumericDocValues() {
-
- int doc = -1;
-
- @Override
- public long longValue() throws IOException {
- return normValues.get(doc);
- }
-
- @Override
- public int docID() {
- return doc;
- }
-
- @Override
- public int nextDoc() throws IOException {
- return advance(doc + 1);
- }
-
- @Override
- public int advance(int target) throws IOException {
- if (target >= maxDoc) {
- return doc = NO_MORE_DOCS;
+ if (entry.bytesPerNorm == 0) {
+ return new DenseNormsIterator(maxDoc) {
+ @Override
+ public long longValue() throws IOException {
+ return entry.normsOffset;
}
- return doc = target;
- }
-
- @Override
- public boolean advanceExact(int target) throws IOException {
- this.doc = target;
- return true;
- }
-
- @Override
- public long cost() {
- return maxDoc;
- }
-
- };
+ };
+ }
+ final RandomAccessInput slice = data.randomAccessSlice(entry.normsOffset, entry.numDocsWithField * (long) entry.bytesPerNorm);
+ switch (entry.bytesPerNorm) {
+ case 1:
+ return new DenseNormsIterator(maxDoc) {
+ @Override
+ public long longValue() throws IOException {
+ return slice.readByte(doc);
+ }
+ };
+ case 2:
+ return new DenseNormsIterator(maxDoc) {
+ @Override
+ public long longValue() throws IOException {
+ return slice.readShort(((long) doc) << 1);
+ }
+ };
+ case 4:
+ return new DenseNormsIterator(maxDoc) {
+ @Override
+ public long longValue() throws IOException {
+ return slice.readInt(((long) doc) << 2);
+ }
+ };
+ case 8:
+ return new DenseNormsIterator(maxDoc) {
+ @Override
+ public long longValue() throws IOException {
+ return slice.readLong(((long) doc) << 3);
+ }
+ };
+ default:
+ // should not happen, we already validate bytesPerNorm in readFields
+ throw new AssertionError();
+ }
} else {
// sparse
- final LongValues normValues = getNormValues(entry);
final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField);
- return new NumericDocValues() {
-
- @Override
- public int advance(int target) throws IOException {
- return disi.advance(target);
- }
-
- @Override
- public boolean advanceExact(int target) throws IOException {
- return disi.advanceExact(target);
- }
-
- @Override
- public int nextDoc() throws IOException {
- return disi.nextDoc();
- }
-
- @Override
- public int docID() {
- return disi.docID();
- }
-
- @Override
- public long cost() {
- return entry.numDocsWithField;
- }
-
- @Override
- public long longValue() throws IOException {
- return normValues.get(disi.index());
- }
- };
- }
- }
-
- private LongValues getNormValues(NormsEntry entry) throws IOException {
- if (entry.bytesPerNorm == 0) {
- return new LongValues() {
- @Override
- long get(int index) {
- return entry.normsOffset;
- }
- };
- } else {
+ if (entry.bytesPerNorm == 0) {
+ return new SparseNormsIterator(disi) {
+ @Override
+ public long longValue() throws IOException {
+ return entry.normsOffset;
+ }
+ };
+ }
final RandomAccessInput slice = data.randomAccessSlice(entry.normsOffset, entry.numDocsWithField * (long) entry.bytesPerNorm);
switch (entry.bytesPerNorm) {
case 1:
- return new LongValues() {
+ return new SparseNormsIterator(disi) {
@Override
- long get(int index) throws IOException {
- return slice.readByte(index);
+ public long longValue() throws IOException {
+ return slice.readByte(disi.index());
}
};
case 2:
- return new LongValues() {
+ return new SparseNormsIterator(disi) {
@Override
- long get(int index) throws IOException {
- return slice.readShort(((long) index) << 1);
+ public long longValue() throws IOException {
+ return slice.readShort(((long) disi.index()) << 1);
}
};
case 4:
- return new LongValues() {
+ return new SparseNormsIterator(disi) {
@Override
- long get(int index) throws IOException {
- return slice.readInt(((long) index) << 2);
+ public long longValue() throws IOException {
+ return slice.readInt(((long) disi.index()) << 2);
}
};
case 8:
- return new LongValues() {
+ return new SparseNormsIterator(disi) {
@Override
- long get(int index) throws IOException {
- return slice.readLong(((long) index) << 3);
+ public long longValue() throws IOException {
+ return slice.readLong(((long) disi.index()) << 3);
}
};
default:
[24/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9481: Authentication
and Authorization plugins now work in standalone mode, including edit API
Posted by kr...@apache.org.
SOLR-9481: Authentication and Authorization plugins now work in standalone mode, including edit API
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d25a6181
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d25a6181
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d25a6181
Branch: refs/heads/jira/solr-8593
Commit: d25a6181612fa00a8e5a1c1e6d889b6d21053486
Parents: 941c5e9
Author: Jan H�ydahl <ja...@apache.org>
Authored: Wed Oct 26 00:37:11 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Wed Oct 26 01:04:40 2016 +0200
----------------------------------------------------------------------
solr/CHANGES.txt | 4 +
.../org/apache/solr/core/CoreContainer.java | 31 +--
.../solr/handler/admin/SecurityConfHandler.java | 142 ++++++++----
.../handler/admin/SecurityConfHandlerLocal.java | 102 +++++++++
.../handler/admin/SecurityConfHandlerZk.java | 92 ++++++++
.../apache/solr/security/BasicAuthPlugin.java | 14 +-
.../SecurityConfHandlerLocalForTesting.java | 43 ++++
.../handler/admin/SecurityConfHandlerTest.java | 66 +++---
.../solr/security/BasicAuthIntegrationTest.java | 4 +-
.../solr/security/BasicAuthStandaloneTest.java | 220 +++++++++++++++++++
10 files changed, 624 insertions(+), 94 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d25a6181/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 8e6ee7e..62c9d4a 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -160,6 +160,10 @@ New Features
* SOLR-9654: Add "overrequest" parameter to JSON Facet API to control amount of overrequest
on a distributed terms facet. (yonik)
+* SOLR-9481: Authentication and Authorization plugins now work in standalone mode if security.json is placed in
+ SOLR_HOME on every node. Editing config through API is supported but affects only that one node.
+ (janhoy)
+
Bug Fixes
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d25a6181/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index a020b00..e3d577e 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -48,7 +48,6 @@ import org.apache.solr.cloud.Overseer;
import org.apache.solr.cloud.ZkController;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.common.util.Utils;
@@ -60,6 +59,8 @@ import org.apache.solr.handler.admin.ConfigSetsHandler;
import org.apache.solr.handler.admin.CoreAdminHandler;
import org.apache.solr.handler.admin.InfoHandler;
import org.apache.solr.handler.admin.SecurityConfHandler;
+import org.apache.solr.handler.admin.SecurityConfHandlerLocal;
+import org.apache.solr.handler.admin.SecurityConfHandlerZk;
import org.apache.solr.handler.admin.ZookeeperInfoHandler;
import org.apache.solr.handler.component.ShardHandlerFactory;
import org.apache.solr.logging.LogWatcher;
@@ -78,7 +79,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.google.common.base.Preconditions.checkNotNull;
-import static java.util.Collections.EMPTY_MAP;
import static org.apache.solr.common.params.CommonParams.AUTHC_PATH;
import static org.apache.solr.common.params.CommonParams.AUTHZ_PATH;
import static org.apache.solr.common.params.CommonParams.COLLECTIONS_HANDLER_PATH;
@@ -88,7 +88,6 @@ import static org.apache.solr.common.params.CommonParams.INFO_HANDLER_PATH;
import static org.apache.solr.common.params.CommonParams.ZK_PATH;
import static org.apache.solr.security.AuthenticationPlugin.AUTHENTICATION_PLUGIN_PROP;
-
/**
*
* @since solr 1.3
@@ -296,10 +295,10 @@ public class CoreContainer {
}
if (pluginClassName != null) {
- log.info("Authentication plugin class obtained from ZK: "+pluginClassName);
+ log.debug("Authentication plugin class obtained from security.json: "+pluginClassName);
} else if (System.getProperty(AUTHENTICATION_PLUGIN_PROP) != null) {
pluginClassName = System.getProperty(AUTHENTICATION_PLUGIN_PROP);
- log.info("Authentication plugin class obtained from system property '" +
+ log.debug("Authentication plugin class obtained from system property '" +
AUTHENTICATION_PLUGIN_PROP + "': " + pluginClassName);
} else {
log.debug("No authentication plugin used.");
@@ -463,14 +462,11 @@ public class CoreContainer {
MDCLoggingContext.setNode(this);
- ZkStateReader.ConfigData securityConfig = isZooKeeperAware() ? getZkController().getZkStateReader().getSecurityProps(false) : new ZkStateReader.ConfigData(EMPTY_MAP, -1);
- initializeAuthorizationPlugin((Map<String, Object>) securityConfig.data.get("authorization"));
- initializeAuthenticationPlugin((Map<String, Object>) securityConfig.data.get("authentication"));
-
+ securityConfHandler = isZooKeeperAware() ? new SecurityConfHandlerZk(this) : new SecurityConfHandlerLocal(this);
+ reloadSecurityProperties();
this.backupRepoFactory = new BackupRepositoryFactory(cfg.getBackupRepositoryPlugins());
containerHandlers.put(ZK_PATH, new ZookeeperInfoHandler(this));
- securityConfHandler = new SecurityConfHandler(this);
collectionsHandler = createHandler(cfg.getCollectionsHandlerClass(), CollectionsHandler.class);
containerHandlers.put(COLLECTIONS_HANDLER_PATH, collectionsHandler);
infoHandler = createHandler(cfg.getInfoHandlerClass(), InfoHandler.class);
@@ -567,10 +563,17 @@ public class CoreContainer {
}
public void securityNodeChanged() {
- log.info("Security node changed");
- ZkStateReader.ConfigData securityConfig = getZkController().getZkStateReader().getSecurityProps(false);
- initializeAuthorizationPlugin((Map<String, Object>) securityConfig.data.get("authorization"));
- initializeAuthenticationPlugin((Map<String, Object>) securityConfig.data.get("authentication"));
+ log.info("Security node changed, reloading security.json");
+ reloadSecurityProperties();
+ }
+
+ /**
+ * Make sure securityConfHandler is initialized
+ */
+ private void reloadSecurityProperties() {
+ SecurityConfHandler.SecurityConfig securityConfig = securityConfHandler.getSecurityConfig(false);
+ initializeAuthorizationPlugin((Map<String, Object>) securityConfig.getData().get("authorization"));
+ initializeAuthenticationPlugin((Map<String, Object>) securityConfig.getData().get("authentication"));
}
private static void checkForDuplicateCoreNames(List<CoreDescriptor> cds) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d25a6181/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java
index 0f4dd7b..1fea431 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java
@@ -17,6 +17,8 @@
package org.apache.solr.handler.admin;
import java.io.IOException;
+import java.io.InputStream;
+import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
@@ -25,7 +27,6 @@ import java.util.Map;
import java.util.Objects;
import org.apache.solr.common.SolrException;
-import org.apache.solr.common.cloud.ZkStateReader.ConfigData;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.Utils;
import org.apache.solr.core.CoreContainer;
@@ -37,10 +38,14 @@ import org.apache.solr.security.AuthorizationContext;
import org.apache.solr.security.ConfigEditablePlugin;
import org.apache.solr.security.PermissionNameProvider;
import org.apache.solr.util.CommandOperation;
-import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-public class SecurityConfHandler extends RequestHandlerBase implements PermissionNameProvider {
- private CoreContainer cores;
+import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR;
+
+public abstract class SecurityConfHandler extends RequestHandlerBase implements PermissionNameProvider {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ protected CoreContainer cores;
public SecurityConfHandler(CoreContainer coreContainer) {
this.cores = coreContainer;
@@ -92,11 +97,12 @@ public class SecurityConfHandler extends RequestHandlerBase implements Permissio
if (ops == null) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No commands");
}
- for (; ; ) {
- ConfigData data = getSecurityProps(true);
- Map<String, Object> latestConf = (Map<String, Object>) data.data.get(key);
+ for (int count = 1; count <= 3 ; count++ ) {
+ SecurityConfig securityConfig = getSecurityConfig(true);
+ Map<String, Object> data = securityConfig.getData();
+ Map<String, Object> latestConf = (Map<String, Object>) data.get(key);
if (latestConf == null) {
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No configuration present for " + key);
+ throw new SolrException(SERVER_ERROR, "No configuration present for " + key);
}
List<CommandOperation> commandsCopy = CommandOperation.clone(ops);
Map<String, Object> out = configEditablePlugin.edit(Utils.getDeepCopy(latestConf, 4) , commandsCopy);
@@ -106,20 +112,31 @@ public class SecurityConfHandler extends RequestHandlerBase implements Permissio
rsp.add(CommandOperation.ERR_MSGS, errs);
return;
}
- //no edits
+ log.debug("No edits made");
return;
} else {
if(!Objects.equals(latestConf.get("class") , out.get("class"))){
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "class cannot be modified");
+ throw new SolrException(SERVER_ERROR, "class cannot be modified");
}
Map meta = getMapValue(out, "");
- meta.put("v", data.version+1);//encode the expected zkversion
- data.data.put(key, out);
- if(persistConf("/security.json", Utils.toJSON(data.data), data.version)) return;
+ meta.put("v", securityConfig.getVersion()+1);//encode the expected zkversion
+ data.put(key, out);
+
+ if(persistConf(securityConfig)) {
+ securityConfEdited();
+ return;
+ }
}
+ log.debug("Security edit operation failed {} time(s)" + count);
}
+ throw new SolrException(SERVER_ERROR, "Failed to persist security config after 3 attempts. Giving up");
}
+ /**
+ * Hook where you can do stuff after a config has been edited. Defaults to NOP
+ */
+ protected void securityConfEdited() {}
+
Object getPlugin(String key) {
Object plugin = null;
if ("authentication".equals(key)) plugin = cores.getAuthenticationPlugin();
@@ -127,38 +144,14 @@ public class SecurityConfHandler extends RequestHandlerBase implements Permissio
return plugin;
}
- ConfigData getSecurityProps(boolean getFresh) {
- return cores.getZkController().getZkStateReader().getSecurityProps(getFresh);
- }
-
- boolean persistConf(String path, byte[] buf, int version) {
- try {
- cores.getZkController().getZkClient().setData(path,buf,version, true);
- return true;
- } catch (KeeperException.BadVersionException bdve){
- return false;
- } catch (Exception e) {
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, " Unable to persist conf",e);
- }
- }
-
-
- private void getConf(SolrQueryResponse rsp, String key) {
- ConfigData map = cores.getZkController().getZkStateReader().getSecurityProps(false);
- Object o = map == null ? null : map.data.get(key);
- if (o == null) {
- rsp.add(CommandOperation.ERR_MSGS, Collections.singletonList("No " + key + " configured"));
- } else {
- rsp.add(key+".enabled", getPlugin(key)!=null);
- rsp.add(key, o);
- }
- }
+ protected abstract void getConf(SolrQueryResponse rsp, String key);
public static Map<String, Object> getMapValue(Map<String, Object> lookupMap, String key) {
Map<String, Object> m = (Map<String, Object>) lookupMap.get(key);
if (m == null) lookupMap.put(key, m = new LinkedHashMap<>());
return m;
}
+
public static List getListValue(Map<String, Object> lookupMap, String key) {
List l = (List) lookupMap.get(key);
if (l == null) lookupMap.put(key, l= new ArrayList());
@@ -170,6 +163,77 @@ public class SecurityConfHandler extends RequestHandlerBase implements Permissio
return "Edit or read security configuration";
}
+ /**
+ * Gets security.json from source
+ */
+ public abstract SecurityConfig getSecurityConfig(boolean getFresh);
+
+ /**
+ * Persist security.json to the source, optionally with a version
+ */
+ protected abstract boolean persistConf(SecurityConfig securityConfig) throws IOException;
+
+ /**
+ * Object to hold security.json as nested <code>Map<String,Object></code> and optionally its version.
+ * The version property is optional and defaults to -1 if not initialized.
+ * The data object defaults to EMPTY_MAP if not set
+ */
+ public static class SecurityConfig {
+ private Map<String, Object> data = Collections.EMPTY_MAP;
+ private int version = -1;
+
+ public SecurityConfig() {}
+
+ /**
+ * Sets the data as a Map
+ * @param data a Map
+ * @return SecurityConf object (builder pattern)
+ */
+ public SecurityConfig setData(Map<String, Object> data) {
+ this.data = data;
+ return this;
+ }
+ /**
+ * Sets the data as an Object, but the object needs to be of type Map
+ * @param data an Object of type Map<String,Object>
+ * @return SecurityConf object (builder pattern)
+ */
+ public SecurityConfig setData(Object data) {
+ if (data instanceof Map) {
+ this.data = (Map<String, Object>) data;
+ return this;
+ } else {
+ throw new SolrException(SERVER_ERROR, "Illegal format when parsing security.json, not object");
+ }
+ }
+
+ /**
+ * Sets version
+ * @param version integer for version. Depends on underlying storage
+ * @return SecurityConf object (builder pattern)
+ */
+ public SecurityConfig setVersion(int version) {
+ this.version = version;
+ return this;
+ }
+
+ public Map<String, Object> getData() {
+ return data;
+ }
+
+ public int getVersion() {
+ return version;
+ }
+
+ /**
+ * Set data from input stream
+ * @param securityJsonInputStream an input stream for security.json
+ * @return this (builder pattern)
+ */
+ public SecurityConfig setData(InputStream securityJsonInputStream) {
+ return setData(Utils.fromJSON(securityJsonInputStream));
+ }
}
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d25a6181/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
new file mode 100644
index 0000000..d6745c5
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.handler.admin;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.lang.invoke.MethodHandles;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Collections;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.util.CommandOperation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Security Configuration Handler which works on standalone local files
+ */
+public class SecurityConfHandlerLocal extends SecurityConfHandler {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ protected Path securityJsonPath;
+
+ public SecurityConfHandlerLocal(CoreContainer coreContainer) {
+ super(coreContainer);
+ securityJsonPath = SolrResourceLoader.locateSolrHome().resolve("security.json");
+ }
+
+ /**
+ * Fetches security props from SOLR_HOME
+ * @param getFresh NOP
+ * @return SecurityConfig whose data property either contains security.json, or an empty map if not found
+ */
+ @Override
+ public SecurityConfig getSecurityConfig(boolean getFresh) {
+ if (Files.exists(securityJsonPath)) {
+ try (InputStream securityJsonIs = Files.newInputStream(securityJsonPath)) {
+ return new SecurityConfig().setData(securityJsonIs);
+ } catch (IOException e) { /* Fall through */ }
+ }
+ return new SecurityConfig();
+ }
+
+ @Override
+ protected void getConf(SolrQueryResponse rsp, String key) {
+ SecurityConfig props = getSecurityConfig(false);
+ Object o = props.getData().get(key);
+ if (o == null) {
+ rsp.add(CommandOperation.ERR_MSGS, Collections.singletonList("No " + key + " configured"));
+ } else {
+ rsp.add(key+".enabled", getPlugin(key)!=null);
+ rsp.add(key, o);
+ }
+ }
+
+ @Override
+ protected boolean persistConf(SecurityConfig securityConfig) throws IOException {
+ if (securityConfig == null || securityConfig.getData().isEmpty()) {
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+ "Failed persisting security.json to SOLR_HOME. Object was empty.");
+ }
+ try(OutputStream securityJsonOs = Files.newOutputStream(securityJsonPath)) {
+ securityJsonOs.write(Utils.toJSON(securityConfig.getData()));
+ log.debug("Persisted security.json to {}", securityJsonPath);
+ return true;
+ } catch (Exception e) {
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+ "Failed persisting security.json to " + securityJsonPath, e);
+ }
+ }
+
+ @Override
+ public String getDescription() {
+ return "Edit or read security configuration locally in SOLR_HOME";
+ }
+
+ @Override
+ protected void securityConfEdited() {
+ // Need to call explicitly since we will not get notified of changes to local security.json
+ cores.securityNodeChanged();
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d25a6181/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerZk.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerZk.java b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerZk.java
new file mode 100644
index 0000000..8323b8a
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerZk.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.handler.admin;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.Collections;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.util.CommandOperation;
+import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR;
+import static org.apache.solr.common.cloud.ZkStateReader.SOLR_SECURITY_CONF_PATH;
+
+/**
+ * Security Configuration Handler which works with Zookeeper
+ */
+public class SecurityConfHandlerZk extends SecurityConfHandler {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ public SecurityConfHandlerZk(CoreContainer coreContainer) {
+ super(coreContainer);
+ }
+
+ /**
+ * Fetches security props from Zookeeper and adds version
+ * @param getFresh refresh from ZK
+ * @return SecurityConfig whose data property either contains security.json, or an empty map if not found
+ */
+ @Override
+ public SecurityConfig getSecurityConfig(boolean getFresh) {
+ ZkStateReader.ConfigData configDataFromZk = cores.getZkController().getZkStateReader().getSecurityProps(getFresh);
+ return configDataFromZk == null ?
+ new SecurityConfig() :
+ new SecurityConfig().setData(configDataFromZk.data).setVersion(configDataFromZk.version);
+ }
+
+ @Override
+ protected void getConf(SolrQueryResponse rsp, String key) {
+ ZkStateReader.ConfigData map = cores.getZkController().getZkStateReader().getSecurityProps(false);
+ Object o = map == null ? null : map.data.get(key);
+ if (o == null) {
+ rsp.add(CommandOperation.ERR_MSGS, Collections.singletonList("No " + key + " configured"));
+ } else {
+ rsp.add(key+".enabled", getPlugin(key)!=null);
+ rsp.add(key, o);
+ }
+ }
+
+ @Override
+ protected boolean persistConf(SecurityConfig securityConfig) throws IOException {
+ try {
+ cores.getZkController().getZkClient().setData(SOLR_SECURITY_CONF_PATH,
+ Utils.toJSON(securityConfig.getData()),
+ securityConfig.getVersion(), true);
+ log.debug("Persisted security.json to {}", SOLR_SECURITY_CONF_PATH);
+ return true;
+ } catch (KeeperException.BadVersionException bdve){
+ log.warn("Failed persisting security.json to {}", SOLR_SECURITY_CONF_PATH, bdve);
+ return false;
+ } catch (Exception e) {
+ throw new SolrException(SERVER_ERROR, "Unable to persist security.json", e);
+ }
+ }
+
+ @Override
+ public String getDescription() {
+ return "Edit or read security configuration from Zookeeper";
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d25a6181/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java b/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java
index 49c02d7..29a887b 100644
--- a/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java
@@ -43,12 +43,12 @@ import org.slf4j.LoggerFactory;
public class BasicAuthPlugin extends AuthenticationPlugin implements ConfigEditablePlugin {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
- private AuthenticationProvider zkAuthentication;
+ private AuthenticationProvider authenticationProvider;
private final static ThreadLocal<Header> authHeader = new ThreadLocal<>();
private boolean blockUnknown = false;
public boolean authenticate(String username, String pwd) {
- return zkAuthentication.authenticate(username, pwd);
+ return authenticationProvider.authenticate(username, pwd);
}
@Override
@@ -61,7 +61,7 @@ public class BasicAuthPlugin extends AuthenticationPlugin implements ConfigEdita
log.error(e.getMessage());
}
}
- zkAuthentication = getAuthenticationProvider(pluginConfig);
+ authenticationProvider = getAuthenticationProvider(pluginConfig);
}
@Override
@@ -79,8 +79,8 @@ public class BasicAuthPlugin extends AuthenticationPlugin implements ConfigEdita
}
}
if (!CommandOperation.captureErrors(commands).isEmpty()) return null;
- if (zkAuthentication instanceof ConfigEditablePlugin) {
- ConfigEditablePlugin editablePlugin = (ConfigEditablePlugin) zkAuthentication;
+ if (authenticationProvider instanceof ConfigEditablePlugin) {
+ ConfigEditablePlugin editablePlugin = (ConfigEditablePlugin) authenticationProvider;
return editablePlugin.edit(latestConf, commands);
}
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "This cannot be edited");
@@ -93,7 +93,7 @@ public class BasicAuthPlugin extends AuthenticationPlugin implements ConfigEdita
}
private void authenticationFailure(HttpServletResponse response, String message) throws IOException {
- for (Map.Entry<String, String> entry : zkAuthentication.getPromptHeaders().entrySet()) {
+ for (Map.Entry<String, String> entry : authenticationProvider.getPromptHeaders().entrySet()) {
response.setHeader(entry.getKey(), entry.getValue());
}
response.sendError(401, message);
@@ -143,7 +143,7 @@ public class BasicAuthPlugin extends AuthenticationPlugin implements ConfigEdita
if (blockUnknown) {
authenticationFailure(response, "require authentication");
} else {
- request.setAttribute(AuthenticationPlugin.class.getName(), zkAuthentication.getPromptHeaders());
+ request.setAttribute(AuthenticationPlugin.class.getName(), authenticationProvider.getPromptHeaders());
filterChain.doFilter(request, response);
return true;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d25a6181/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerLocalForTesting.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerLocalForTesting.java b/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerLocalForTesting.java
new file mode 100644
index 0000000..92a18b1
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerLocalForTesting.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.handler.admin;
+
+import java.io.IOException;
+
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.SolrResourceLoader;
+
+/**
+ * Wrapper for use in tests
+ */
+public class SecurityConfHandlerLocalForTesting extends SecurityConfHandlerLocal {
+
+ public SecurityConfHandlerLocalForTesting(CoreContainer coreContainer) {
+ super(coreContainer);
+ }
+
+ public boolean persistConf(SecurityConfig securityConfig) throws IOException {
+ // Set JSON_PATH again since the test may have
+ securityJsonPath = SolrResourceLoader.locateSolrHome().resolve("security.json");
+ return super.persistConf(securityConfig);
+ }
+
+ public void securityConfEdited() {
+ super.securityConfEdited();
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d25a6181/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java
index 5e3d407..54c8587 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java
@@ -23,7 +23,6 @@ import java.util.List;
import java.util.Map;
import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.common.cloud.ZkStateReader.ConfigData;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.common.util.Utils;
@@ -34,6 +33,7 @@ import org.apache.solr.security.RuleBasedAuthorizationPlugin;
import org.apache.solr.util.CommandOperation;
import static org.apache.solr.common.util.Utils.makeMap;
+import static org.apache.solr.handler.admin.SecurityConfHandler.SecurityConfig;
public class SecurityConfHandlerTest extends SolrTestCaseJ4 {
@@ -51,8 +51,8 @@ public class SecurityConfHandlerTest extends SolrTestCaseJ4 {
handler.handleRequestBody(req,new SolrQueryResponse());
BasicAuthPlugin basicAuth = new BasicAuthPlugin();
- ConfigData securityCfg = (ConfigData) handler.m.get("/security.json");
- basicAuth.init((Map<String, Object>) securityCfg.data.get("authentication"));
+ SecurityConfig securityCfg = handler.m.get("/security.json");
+ basicAuth.init((Map<String, Object>) securityCfg.getData().get("authentication"));
assertTrue(basicAuth.authenticate("tom", "TomIsUberCool"));
command = "{\n" +
@@ -62,9 +62,9 @@ public class SecurityConfHandlerTest extends SolrTestCaseJ4 {
o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8),"");
req.setContentStreams(Collections.singletonList(o));
handler.handleRequestBody(req,new SolrQueryResponse());
- securityCfg = (ConfigData) handler.m.get("/security.json");
- assertEquals(3, securityCfg.version);
- Map result = (Map) securityCfg.data.get("authentication");
+ securityCfg = handler.m.get("/security.json");
+ assertEquals(3, securityCfg.getVersion());
+ Map result = (Map) securityCfg.getData().get("authentication");
result = (Map) result.get("credentials");
assertTrue(result.isEmpty());
@@ -86,7 +86,7 @@ public class SecurityConfHandlerTest extends SolrTestCaseJ4 {
SolrQueryResponse rsp = new SolrQueryResponse();
handler.handleRequestBody(req, rsp);
assertNull(rsp.getValues().get(CommandOperation.ERR_MSGS));
- Map authzconf = (Map) ((ConfigData) handler.m.get("/security.json")).data.get("authorization");
+ Map authzconf = (Map) handler.m.get("/security.json").getData().get("authorization");
Map userRoles = (Map) authzconf.get("user-role");
List tomRoles = (List) userRoles.get("tom");
assertTrue(tomRoles.contains("admin"));
@@ -108,7 +108,7 @@ public class SecurityConfHandlerTest extends SolrTestCaseJ4 {
req.setContentStreams(Collections.singletonList(o));
rsp = new SolrQueryResponse();
handler.handleRequestBody(req, rsp);
- authzconf = (Map) ((ConfigData) handler.m.get("/security.json")).data.get("authorization");
+ authzconf = (Map) handler.m.get("/security.json").getData().get("authorization");
permissions = (List<Map>) authzconf.get("permissions");
Map p = permissions.get(1);
@@ -128,7 +128,7 @@ public class SecurityConfHandlerTest extends SolrTestCaseJ4 {
req.setContentStreams(Collections.singletonList(o));
rsp = new SolrQueryResponse();
handler.handleRequestBody(req, rsp);
- authzconf = (Map) ((ConfigData) handler.m.get("/security.json")).data.get("authorization");
+ authzconf = (Map) handler.m.get("/security.json").getData().get("authorization");
permissions = (List<Map>) authzconf.get("permissions");
p = permissions.get(0);
@@ -151,7 +151,7 @@ public class SecurityConfHandlerTest extends SolrTestCaseJ4 {
rsp = new SolrQueryResponse();
handler.handleRequestBody(req, rsp);
assertNull(rsp.getValues().get(CommandOperation.ERR_MSGS));
- authzconf = (Map) ((ConfigData) handler.m.get("/security.json")).data.get("authorization");
+ authzconf = (Map) handler.m.get("/security.json").getData().get("authorization");
userRoles = (Map) authzconf.get("user-role");
assertEquals(0, userRoles.size());
permissions = (List<Map>) authzconf.get("permissions");
@@ -178,25 +178,26 @@ public class SecurityConfHandlerTest extends SolrTestCaseJ4 {
public static class MockSecurityHandler extends SecurityConfHandler {
- private Map<String, Object> m;
+ private Map<String, SecurityConfig> m;
final BasicAuthPlugin basicAuthPlugin = new BasicAuthPlugin();
final RuleBasedAuthorizationPlugin rulesBasedAuthorizationPlugin = new RuleBasedAuthorizationPlugin();
- public MockSecurityHandler() {
+ public MockSecurityHandler() {
super(null);
m = new HashMap<>();
- ConfigData data = new ConfigData(makeMap("authentication", makeMap("class", "solr."+ BasicAuthPlugin.class.getSimpleName())), 1);
- data.data.put("authorization", makeMap("class", "solr."+RuleBasedAuthorizationPlugin.class.getSimpleName()));
- m.put("/security.json", data);
-
+ SecurityConfig sp = new SecurityConfig();
+ sp.setData(makeMap("authentication", makeMap("class", "solr."+ BasicAuthPlugin.class.getSimpleName())));
+ sp.setVersion(1);
+ sp.getData().put("authorization", makeMap("class", "solr."+RuleBasedAuthorizationPlugin.class.getSimpleName()));
+ m.put("/security.json", sp);
basicAuthPlugin.init(new HashMap<>());
rulesBasedAuthorizationPlugin.init(new HashMap<>());
}
- public Map<String, Object> getM() {
+ public Map<String, SecurityConfig> getM() {
return m;
}
@@ -212,24 +213,25 @@ public class SecurityConfHandlerTest extends SolrTestCaseJ4 {
}
@Override
- ConfigData getSecurityProps(boolean getFresh) {
- return (ConfigData) m.get("/security.json");
+ protected void getConf(SolrQueryResponse rsp, String key) {
+ // NOP
+ }
+
+ @Override
+ public SecurityConfig getSecurityConfig(boolean getFresh) {
+ return m.get("/security.json");
}
@Override
- boolean persistConf(String key, byte[] buf, int version) {
- Object data = m.get(key);
- if (data instanceof ConfigData) {
- ConfigData configData = (ConfigData) data;
- if (configData.version == version) {
- ConfigData result = new ConfigData((Map<String, Object>) Utils.fromJSON(buf), version + 1);
- m.put(key, result);
- return true;
- } else {
- return false;
- }
+ protected boolean persistConf(SecurityConfig props) {
+ SecurityConfig fromMap = m.get("/security.json");
+ if (fromMap.getVersion() == props.getVersion()) {
+ props.setVersion(props.getVersion()+1);
+ m.put("/security.json", props);
+ return true;
+ } else {
+ return false;
}
- throw new RuntimeException();
}
@@ -254,7 +256,7 @@ public class SecurityConfHandlerTest extends SolrTestCaseJ4 {
req.setContentStreams(Collections.singletonList(o));
SolrQueryResponse rsp = new SolrQueryResponse();
handleRequestBody(req, rsp);
- Map<String, Object> data = ((ConfigData) m.get("/security.json")).data;
+ Map<String, Object> data = m.get("/security.json").getData();
((Map)data.get("authentication")).remove("");
((Map)data.get("authorization")).remove("");
return Utils.toJSONString (data);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d25a6181/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
index 6967b27..4134bf2 100644
--- a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
@@ -292,11 +292,11 @@ public class BasicAuthIntegrationTest extends SolrCloudTestCase {
return l.isEmpty() ? null : l.get(0);
}
- static final Predicate NOT_NULL_PREDICATE = o -> o != null;
+ protected static final Predicate NOT_NULL_PREDICATE = o -> o != null;
//the password is 'SolrRocks'
//this could be generated everytime. But , then we will not know if there is any regression
- private static final String STD_CONF = "{\n" +
+ protected static final String STD_CONF = "{\n" +
" 'authentication':{\n" +
" 'class':'solr.BasicAuthPlugin',\n" +
" 'credentials':{'solr':'orwp2Ghgj39lmnrZOTm7Qtre1VqHFDfwAEzr0ApbN3Y= Ju5osoAqOX8iafhWpPP01E5P+sg8tK8tHON7rCYZRRw='}},\n" +
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d25a6181/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
new file mode 100644
index 0000000..829ce9c
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
@@ -0,0 +1,220 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.security;
+
+import java.lang.invoke.MethodHandles;
+import java.nio.charset.Charset;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Collections;
+import java.util.Properties;
+
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.entity.ByteArrayEntity;
+import org.apache.http.message.AbstractHttpMessage;
+import org.apache.http.message.BasicHeader;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.HttpClientUtil;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.GenericSolrRequest;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.Base64;
+import org.apache.solr.common.util.ContentStreamBase;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.handler.admin.SecurityConfHandler;
+import org.apache.solr.handler.admin.SecurityConfHandlerLocalForTesting;
+import org.apache.solr.util.AbstractSolrTestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.apache.solr.security.BasicAuthIntegrationTest.NOT_NULL_PREDICATE;
+import static org.apache.solr.security.BasicAuthIntegrationTest.STD_CONF;
+import static org.apache.solr.security.BasicAuthIntegrationTest.verifySecurityStatus;
+
+public class BasicAuthStandaloneTest extends AbstractSolrTestCase {
+
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private Path ROOT_DIR = Paths.get(getSolrHome());
+ private Path CONF_DIR = ROOT_DIR.resolve("configsets").resolve("configset-2").resolve("conf");
+
+ SecurityConfHandlerLocalForTesting securityConfHandler;
+ SolrInstance instance = null;
+ JettySolrRunner jetty;
+
+ @Before
+ @Override
+ public void setUp() throws Exception
+ {
+ super.setUp();
+ instance = new SolrInstance("inst", null);
+ instance.setUp();
+ System.setProperty("solr.solr.home", instance.getHomeDir().toString());
+ jetty = createJetty(instance);
+ initCore("solrconfig.xml", "schema.xml", instance.getHomeDir().toString());
+ securityConfHandler = new SecurityConfHandlerLocalForTesting(jetty.getCoreContainer());
+ }
+
+ @Override
+ @After
+ public void tearDown() throws Exception {
+ jetty.stop();
+ super.tearDown();
+ }
+
+ @Test
+ public void testBasicAuth() throws Exception {
+
+ String authcPrefix = "/admin/authentication";
+
+ HttpClient cl = null;
+ HttpSolrClient httpSolrClient = null;
+ try {
+ cl = HttpClientUtil.createClient(null);
+ String baseUrl = buildUrl(jetty.getLocalPort(), "/solr");
+ httpSolrClient = getHttpSolrClient(baseUrl);
+
+ verifySecurityStatus(cl, baseUrl + authcPrefix, "/errorMessages", null, 20);
+
+ // Write security.json locally. Should cause security to be initialized
+ securityConfHandler.persistConf(new SecurityConfHandler.SecurityConfig()
+ .setData(Utils.fromJSONString(STD_CONF.replaceAll("'", "\""))));
+ securityConfHandler.securityConfEdited();
+ verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/class", "solr.BasicAuthPlugin", 20);
+
+ String command = "{\n" +
+ "'set-user': {'harry':'HarryIsCool'}\n" +
+ "}";
+
+ GenericSolrRequest genericReq = new GenericSolrRequest(SolrRequest.METHOD.POST, authcPrefix, new ModifiableSolrParams());
+ genericReq.setContentStreams(Collections.singletonList(new ContentStreamBase.ByteArrayStream(command.getBytes(UTF_8), "")));
+
+ HttpSolrClient finalHttpSolrClient = httpSolrClient;
+ HttpSolrClient.RemoteSolrException exp = expectThrows(HttpSolrClient.RemoteSolrException.class, () -> {
+ finalHttpSolrClient.request(genericReq);
+ });
+ assertEquals(401, exp.code());
+
+ command = "{\n" +
+ "'set-user': {'harry':'HarryIsUberCool'}\n" +
+ "}";
+
+ HttpPost httpPost = new HttpPost(baseUrl + authcPrefix);
+ setBasicAuthHeader(httpPost, "solr", "SolrRocks");
+ httpPost.setEntity(new ByteArrayEntity(command.getBytes(UTF_8)));
+ httpPost.addHeader("Content-Type", "application/json; charset=UTF-8");
+ verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication.enabled", "true", 20);
+ HttpResponse r = cl.execute(httpPost);
+ int statusCode = r.getStatusLine().getStatusCode();
+ Utils.consumeFully(r.getEntity());
+ assertEquals("proper_cred sent, but access denied", 200, statusCode);
+
+ verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/credentials/harry", NOT_NULL_PREDICATE, 20);
+
+ // Read file from SOLR_HOME and verify that it contains our new user
+ assertTrue(new String(Utils.toJSON(securityConfHandler.getSecurityConfig(false).getData()),
+ Charset.forName("UTF-8")).contains("harry"));
+ } finally {
+ if (cl != null) {
+ HttpClientUtil.close(cl);
+ httpSolrClient.close();
+ }
+ }
+ }
+
+ public static void setBasicAuthHeader(AbstractHttpMessage httpMsg, String user, String pwd) {
+ String userPass = user + ":" + pwd;
+ String encoded = Base64.byteArrayToBase64(userPass.getBytes(UTF_8));
+ httpMsg.setHeader(new BasicHeader("Authorization", "Basic " + encoded));
+ log.info("Added Basic Auth security Header {}",encoded );
+ }
+
+ private JettySolrRunner createJetty(SolrInstance instance) throws Exception {
+ Properties nodeProperties = new Properties();
+ nodeProperties.setProperty("solr.data.dir", instance.getDataDir().toString());
+ JettySolrRunner jetty = new JettySolrRunner(instance.getHomeDir().toString(), nodeProperties, buildJettyConfig("/solr"));
+ jetty.start();
+ return jetty;
+ }
+
+
+ private class SolrInstance {
+ String name;
+ Integer port;
+ Path homeDir;
+ Path confDir;
+ Path dataDir;
+
+ /**
+ * if masterPort is null, this instance is a master -- otherwise this instance is a slave, and assumes the master is
+ * on localhost at the specified port.
+ */
+ public SolrInstance(String name, Integer port) {
+ this.name = name;
+ this.port = port;
+ }
+
+ public Path getHomeDir() {
+ return homeDir;
+ }
+
+ public Path getSchemaFile() {
+ return CONF_DIR.resolve("schema.xml");
+ }
+
+ public Path getConfDir() {
+ return confDir;
+ }
+
+ public Path getDataDir() {
+ return dataDir;
+ }
+
+ public Path getSolrConfigFile() {
+ return CONF_DIR.resolve("solrconfig.xml");
+ }
+
+ public Path getSolrXmlFile() {
+ return ROOT_DIR.resolve("solr.xml");
+ }
+
+
+ public void setUp() throws Exception {
+ homeDir = createTempDir(name).toAbsolutePath();
+ dataDir = homeDir.resolve("collection1").resolve("data");
+ confDir = homeDir.resolve("collection1").resolve("conf");
+
+ Files.createDirectories(homeDir);
+ Files.createDirectories(dataDir);
+ Files.createDirectories(confDir);
+
+ Files.copy(getSolrXmlFile(), homeDir.resolve("solr.xml"));
+ Files.copy(getSolrConfigFile(), confDir.resolve("solrconfig.xml"));
+ Files.copy(getSchemaFile(), confDir.resolve("schema.xml"));
+
+ Files.createFile(homeDir.resolve("collection1").resolve("core.properties"));
+ }
+
+ }
+}
[50/50] [abbrv] lucene-solr:jira/solr-8593: Merge branch
'apache-https-master' into jira/solr-8593
Posted by kr...@apache.org.
Merge branch 'apache-https-master' into jira/solr-8593
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/1cd8da98
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/1cd8da98
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/1cd8da98
Branch: refs/heads/jira/solr-8593
Commit: 1cd8da9863c31efe52c54e908f477aea7a9d9fa4
Parents: 285bbf0 af60048
Author: Kevin Risden <kr...@apache.org>
Authored: Thu Oct 27 15:08:46 2016 -0500
Committer: Kevin Risden <kr...@apache.org>
Committed: Thu Oct 27 15:08:46 2016 -0500
----------------------------------------------------------------------
dev-tools/idea/solr/contrib/langid/langid.iml | 1 +
lucene/CHANGES.txt | 12 +
.../lucene/analysis/custom/CustomAnalyzer.java | 2 +-
.../lucene/collation/CollationKeyAnalyzer.java | 2 +-
.../codecs/lucene53/Lucene53NormsProducer.java | 6 +
.../lucene54/Lucene54DocValuesProducer.java | 65 ++-
.../lucene54/TestLucene54DocValuesFormat.java | 5 +-
.../simpletext/SimpleTextDocValuesReader.java | 95 +++-
.../simpletext/SimpleTextDocValuesWriter.java | 9 +
.../org/apache/lucene/analysis/Analyzer.java | 9 +-
.../apache/lucene/analysis/AnalyzerWrapper.java | 50 ++-
.../analysis/DelegatingAnalyzerWrapper.java | 14 +-
.../apache/lucene/codecs/DocValuesConsumer.java | 25 ++
.../org/apache/lucene/codecs/NormsConsumer.java | 5 +
.../lucene/codecs/lucene70/IndexedDISI.java | 88 +++-
.../lucene70/Lucene70DocValuesProducer.java | 441 +++++++++++++------
.../codecs/lucene70/Lucene70NormsProducer.java | 220 +++++----
.../apache/lucene/index/BinaryDocValues.java | 5 +-
.../lucene/index/BinaryDocValuesWriter.java | 5 +
.../org/apache/lucene/index/CheckIndex.java | 81 +++-
.../java/org/apache/lucene/index/DocValues.java | 95 ++--
.../apache/lucene/index/DocValuesIterator.java | 33 ++
.../lucene/index/FilterBinaryDocValues.java | 5 +
.../lucene/index/FilterNumericDocValues.java | 5 +
.../org/apache/lucene/index/IndexWriter.java | 16 +-
.../index/LegacyBinaryDocValuesWrapper.java | 8 +
.../index/LegacyNumericDocValuesWrapper.java | 9 +
.../index/LegacySortedDocValuesWrapper.java | 9 +
.../LegacySortedNumericDocValuesWrapper.java | 10 +
.../index/LegacySortedSetDocValuesWrapper.java | 10 +
.../org/apache/lucene/index/MultiDocValues.java | 125 ++++++
.../apache/lucene/index/NormValuesWriter.java | 5 +
.../apache/lucene/index/NumericDocValues.java | 7 +-
.../lucene/index/NumericDocValuesWriter.java | 5 +
.../apache/lucene/index/ReadersAndUpdates.java | 10 +
.../index/SingletonSortedNumericDocValues.java | 24 +-
.../index/SingletonSortedSetDocValues.java | 18 +-
.../apache/lucene/index/SortedDocValues.java | 3 +
.../lucene/index/SortedDocValuesWriter.java | 5 +
.../lucene/index/SortedNumericDocValues.java | 6 +-
.../index/SortedNumericDocValuesWriter.java | 5 +
.../apache/lucene/index/SortedSetDocValues.java | 5 +-
.../lucene/index/SortedSetDocValuesWriter.java | 5 +
.../apache/lucene/index/SortingLeafReader.java | 32 ++
.../apache/lucene/search/FieldComparator.java | 40 +-
.../lucene/search/SortedNumericSelector.java | 18 +
.../apache/lucene/search/SortedSetSelector.java | 36 ++
.../search/similarities/BM25Similarity.java | 8 +-
.../search/similarities/SimilarityBase.java | 6 +-
.../search/similarities/TFIDFSimilarity.java | 8 +-
.../analysis/TestDelegatingAnalyzerWrapper.java | 107 +++++
.../AbstractTestCompressionMode.java | 8 +-
.../AbstractTestLZ4CompressionMode.java | 10 +-
.../lucene/codecs/lucene50/TestForUtil.java | 8 +-
.../lucene/codecs/lucene70/TestIndexedDISI.java | 28 +-
.../lucene70/TestLucene70DocValuesFormat.java | 4 +-
.../lucene/index/Test4GBStoredFields.java | 4 +-
.../org/apache/lucene/search/TestBooleanOr.java | 4 +-
.../lucene/search/TestBooleanRewrites.java | 2 +-
.../lucene/util/TestTimSorterWorstCase.java | 6 +-
.../lucene/util/automaton/TestOperations.java | 4 +-
.../lucene/util/packed/TestPackedInts.java | 14 +-
.../DefaultSortedSetDocValuesReaderState.java | 3 +-
.../SortedSetDocValuesFacetCounts.java | 131 ++++--
.../facet/taxonomy/FastTaxonomyFacetCounts.java | 49 +++
.../lucene/facet/taxonomy/TaxonomyFacets.java | 4 +-
.../sortedset/TestSortedSetDocValuesFacets.java | 25 +-
.../facet/taxonomy/TestTaxonomyFacetCounts.java | 84 ++--
.../highlight/WeightedSpanTermExtractor.java | 13 +-
.../search/highlight/HighlighterTest.java | 17 +
lucene/ivy-versions.properties | 2 +-
.../lucene/search/join/BlockJoinSelector.java | 104 ++++-
.../search/join/GenericTermsCollector.java | 7 +
.../search/join/TestBlockJoinSelector.java | 12 +
.../apache/lucene/search/join/TestJoinUtil.java | 18 +-
.../randomizedtesting-runner-2.3.4.jar.sha1 | 1 -
.../randomizedtesting-runner-2.4.0.jar.sha1 | 1 +
.../apache/lucene/index/memory/MemoryIndex.java | 6 +
.../search/TestDiversifiedTopDocsCollector.java | 9 +
.../function/TestDocValuesFieldSources.java | 4 +-
.../apache/lucene/spatial3d/TestGeo3DPoint.java | 10 +-
.../lucene/analysis/MockBytesAnalyzer.java | 2 +-
.../codecs/compressing/CompressingCodec.java | 14 +-
.../lucene/index/AssertingLeafReader.java | 89 +++-
.../index/BaseDocValuesFormatTestCase.java | 331 ++++++++------
.../index/BaseIndexFileFormatTestCase.java | 12 +
.../lucene/index/BaseNormsFormatTestCase.java | 101 +++--
.../index/BaseStoredFieldsFormatTestCase.java | 22 +-
.../lucene/search/AssertingBulkScorer.java | 4 +-
.../lucene/search/RandomApproximationQuery.java | 4 +-
.../util/RunListenerPrintReproduceInfo.java | 4 +-
.../util/TestRuleSetupAndRestoreClassEnv.java | 13 +-
.../java/org/apache/lucene/util/TestUtil.java | 6 +-
.../TestCompressingStoredFieldsFormat.java | 4 +-
solr/CHANGES.txt | 71 ++-
solr/bin/install_solr_service.sh | 2 -
solr/bin/post | 23 +-
solr/bin/solr | 13 +-
solr/bin/solr.cmd | 78 ++--
solr/bin/solr.in.cmd | 4 +
solr/bin/solr.in.sh | 4 +-
.../solr/handler/dataimport/DIHCache.java | 16 +-
.../solr/handler/dataimport/DocBuilder.java | 2 +
.../solr/handler/dataimport/EventListener.java | 2 +-
.../handler/dataimport/VariableResolver.java | 4 +-
.../dataimport/XPathEntityProcessor.java | 52 ++-
.../handler/dataimport/XPathRecordReader.java | 15 +-
.../AbstractDataImportHandlerTestCase.java | 14 +-
.../dataimport/MockStringDataSource.java | 54 +++
.../solr/handler/dataimport/TestDocBuilder.java | 129 +++++-
.../handler/dataimport/TestDocBuilder2.java | 23 +
.../dataimport/TestXPathRecordReader.java | 15 +-
.../apache/solr/analysis/TokenizerChain.java | 2 +-
.../java/org/apache/solr/cloud/BackupCmd.java | 75 +++-
.../apache/solr/cloud/CreateSnapshotCmd.java | 179 ++++++++
.../apache/solr/cloud/DeleteSnapshotCmd.java | 160 +++++++
.../cloud/OverseerCollectionMessageHandler.java | 2 +
.../org/apache/solr/cloud/ZkController.java | 2 +-
.../org/apache/solr/core/CoreContainer.java | 32 +-
.../src/java/org/apache/solr/core/SolrCore.java | 44 +-
.../backup/repository/HdfsBackupRepository.java | 9 +
.../snapshots/CollectionSnapshotMetaData.java | 242 ++++++++++
.../core/snapshots/SolrSnapshotManager.java | 180 ++++++++
.../apache/solr/handler/DumpRequestHandler.java | 6 +-
.../apache/solr/handler/OldBackupDirectory.java | 2 +-
.../apache/solr/handler/ReplicationHandler.java | 2 +-
.../org/apache/solr/handler/StreamHandler.java | 32 +-
.../solr/handler/admin/CollectionsHandler.java | 54 ++-
.../solr/handler/admin/CoreAdminOperation.java | 7 +-
.../solr/handler/admin/CreateSnapshotOp.java | 10 +-
.../solr/handler/admin/DeleteSnapshotOp.java | 4 +
.../solr/handler/admin/SecurityConfHandler.java | 142 ++++--
.../handler/admin/SecurityConfHandlerLocal.java | 102 +++++
.../handler/admin/SecurityConfHandlerZk.java | 92 ++++
.../component/MoreLikeThisComponent.java | 2 +-
.../solr/handler/component/QueryComponent.java | 7 +-
.../solr/highlight/DefaultSolrHighlighter.java | 4 +-
.../apache/solr/index/NoMergePolicyFactory.java | 34 ++
.../apache/solr/request/DocValuesFacets.java | 20 +-
.../org/apache/solr/request/IntervalFacets.java | 12 +-
.../request/PerSegmentSingleValuedFaceting.java | 10 +-
.../solr/response/BinaryResponseWriter.java | 3 +-
.../solr/response/JSONResponseWriter.java | 3 +-
.../java/org/apache/solr/search/Grouping.java | 4 +-
.../apache/solr/search/HashQParserPlugin.java | 4 +-
.../apache/solr/search/SolrIndexSearcher.java | 67 ++-
.../TextLogisticRegressionQParserPlugin.java | 2 +-
.../apache/solr/search/facet/FacetField.java | 1 +
.../solr/search/facet/FacetFieldMerger.java | 8 +-
.../solr/search/facet/FacetFieldProcessor.java | 22 +-
.../facet/FacetFieldProcessorByArrayDV.java | 30 +-
.../apache/solr/search/facet/FacetRequest.java | 1 +
.../TopGroupsShardResponseProcessor.java | 9 +-
.../solr/search/mlt/SimpleMLTQParser.java | 33 +-
.../apache/solr/security/BasicAuthPlugin.java | 14 +-
.../security/DelegationTokenKerberosFilter.java | 11 +-
.../apache/solr/security/KerberosPlugin.java | 9 +-
.../security/Sha256AuthenticationProvider.java | 2 +-
.../apache/solr/servlet/SolrDispatchFilter.java | 22 +-
.../apache/solr/uninverting/FieldCacheImpl.java | 18 +
.../apache/solr/update/IndexFingerprint.java | 78 ++--
.../TemplateUpdateProcessorFactory.java | 23 +-
.../org/apache/solr/util/SimplePostTool.java | 59 ++-
.../src/java/org/apache/solr/util/SolrCLI.java | 239 +++++++---
.../conf/solrconfig-nomergepolicyfactory.xml | 32 ++
.../apache/solr/TestDistributedGrouping.java | 59 +--
.../cloud/LeaderElectionContextKeyTest.java | 114 +++++
.../cloud/LeaderFailureAfterFreshStartTest.java | 32 --
.../solr/cloud/PeerSyncReplicationTest.java | 2 +-
.../apache/solr/cloud/TestCloudRecovery.java | 154 +++++++
.../TestLeaderRecoverFromLogOnStartup.java | 77 ----
.../solr/core/TestCorePropertiesReload.java | 74 ++++
.../apache/solr/core/TestMergePolicyConfig.java | 20 +
.../core/snapshots/TestSolrCloudSnapshots.java | 285 ++++++++++++
.../apache/solr/handler/TestBlobHandler.java | 26 +-
.../solr/handler/TestHdfsBackupRestoreCore.java | 11 +
.../SecurityConfHandlerLocalForTesting.java | 39 ++
.../handler/admin/SecurityConfHandlerTest.java | 66 +--
.../solr/request/TestIntervalFaceting.java | 7 +-
.../solr/search/facet/TestJsonFacets.java | 59 +++
.../solr/security/BasicAuthIntegrationTest.java | 6 +-
.../solr/security/BasicAuthStandaloneTest.java | 219 +++++++++
.../org/apache/solr/update/PeerSyncTest.java | 4 +-
...PeerSyncWithIndexFingerprintCachingTest.java | 108 +++++
.../org/apache/solr/util/UtilsToolTest.java | 28 +-
solr/licenses/junit4-ant-2.3.4.jar.sha1 | 1 -
solr/licenses/junit4-ant-2.4.0.jar.sha1 | 1 +
.../randomizedtesting-runner-2.3.4.jar.sha1 | 1 -
.../randomizedtesting-runner-2.4.0.jar.sha1 | 1 +
.../org/apache/solr/client/solrj/SolrQuery.java | 248 +++++++++++
.../client/solrj/io/stream/ExecutorStream.java | 224 ++++++++++
.../solrj/request/CollectionAdminRequest.java | 116 ++++-
.../client/solrj/response/QueryResponse.java | 8 +
.../solr/common/params/CollectionParams.java | 3 +
.../solr/common/params/MoreLikeThisParams.java | 3 +
.../apache/solr/common/params/SolrParams.java | 124 +++++-
.../java/org/apache/solr/common/util/Cache.java | 26 +-
.../apache/solr/common/util/MapBackedCache.java | 57 +++
.../solr/client/solrj/SolrExampleTests.java | 21 +-
.../apache/solr/client/solrj/SolrQueryTest.java | 25 ++
.../solrj/io/stream/StreamExpressionTest.java | 152 ++++++-
.../solr/common/util/TestJavaBinCodec.java | 52 +--
.../java/org/apache/solr/SolrTestCaseHS.java | 1 +
.../solr/cloud/AbstractDistribZkTestBase.java | 31 ++
204 files changed, 6621 insertions(+), 1564 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1cd8da98/lucene/ivy-versions.properties
----------------------------------------------------------------------
[34/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9610: Add timeout
option,
abort early on auth failure SOLR-9680: Better error messages in SolrCLI when
authentication required
Posted by kr...@apache.org.
SOLR-9610: Add timeout option, abort early on auth failure
SOLR-9680: Better error messages in SolrCLI when authentication required
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/db43bfb0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/db43bfb0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/db43bfb0
Branch: refs/heads/jira/solr-8593
Commit: db43bfb0464347143e58e662113a5630224b070e
Parents: 643429d
Author: Jan H�ydahl <ja...@apache.org>
Authored: Wed Oct 26 14:19:22 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Wed Oct 26 14:19:22 2016 +0200
----------------------------------------------------------------------
solr/CHANGES.txt | 2 +
.../src/java/org/apache/solr/util/SolrCLI.java | 223 +++++++++++++------
2 files changed, 162 insertions(+), 63 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/db43bfb0/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 6c3d6e9..a952a5e 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -334,6 +334,8 @@ Other Changes
* SOLR-9610: New AssertTool in SolrCLI for easier cross platform assertions from command line (janhoy)
+* SOLR-9680: Better error messages in SolrCLI when authentication required (janhoy)
+
* SOLR-9639: Test only fix. Prevent CDCR tests from removing collection during recovery that used to blow up jvm (Mikhail Khludnev)
* SOLR-9625: Add HelloWorldSolrCloudTestCase class (Christine Poerschke, Alan Woodward, Alexandre Rafalovitch)
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/db43bfb0/solr/core/src/java/org/apache/solr/util/SolrCLI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index ebaeda8..3750190 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -42,6 +42,7 @@ import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
+import java.util.Optional;
import java.util.Scanner;
import java.util.Set;
import java.util.TreeSet;
@@ -75,6 +76,7 @@ import org.apache.http.client.HttpClient;
import org.apache.http.client.HttpResponseException;
import org.apache.http.client.ResponseHandler;
import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpHead;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.http.impl.client.CloseableHttpClient;
@@ -112,6 +114,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.apache.solr.common.SolrException.ErrorCode.FORBIDDEN;
+import static org.apache.solr.common.SolrException.ErrorCode.UNAUTHORIZED;
import static org.apache.solr.common.params.CommonParams.NAME;
/**
@@ -148,7 +152,7 @@ public class SolrCLI {
int toolExitStatus = 0;
try {
- setBasicAuth(cli);
+ setBasicAuth();
runImpl(cli);
} catch (Exception exc) {
// since this is a CLI, spare the user the stacktrace
@@ -163,21 +167,6 @@ public class SolrCLI {
return toolExitStatus;
}
- protected void setBasicAuth(CommandLine cli) throws Exception {
- String basicauth = System.getProperty("basicauth", null);
- if (basicauth != null) {
- List<String> ss = StrUtils.splitSmart(basicauth, ':');
- if (ss.size() != 2)
- throw new Exception("Please provide 'basicauth' in the 'user:password' format");
-
- HttpClientUtil.addRequestInterceptor((httpRequest, httpContext) -> {
- String pair = ss.get(0) + ":" + ss.get(1);
- byte[] encodedBytes = Base64.encodeBase64(pair.getBytes(UTF_8));
- httpRequest.addHeader(new BasicHeader("Authorization", "Basic " + new String(encodedBytes, UTF_8)));
- });
- }
- }
-
protected abstract void runImpl(CommandLine cli) throws Exception;
}
/**
@@ -204,9 +193,6 @@ public class SolrCLI {
cloudSolrClient.connect();
runCloudTool(cloudSolrClient, cli);
- } catch (Exception e) {
- log.error("Could not complete mv operation for reason: " + e.getMessage());
- throw (e);
}
}
@@ -541,6 +527,25 @@ public class SolrCLI {
}
return classes;
}
+
+ /**
+ * Inspects system property basicauth and enables authentication for HttpClient
+ * @throws Exception if the basicauth SysProp has wrong format
+ */
+ protected static void setBasicAuth() throws Exception {
+ String basicauth = System.getProperty("basicauth", null);
+ if (basicauth != null) {
+ List<String> ss = StrUtils.splitSmart(basicauth, ':');
+ if (ss.size() != 2)
+ throw new Exception("Please provide 'basicauth' in the 'user:password' format");
+
+ HttpClientUtil.addRequestInterceptor((httpRequest, httpContext) -> {
+ String pair = ss.get(0) + ":" + ss.get(1);
+ byte[] encodedBytes = Base64.encodeBase64(pair.getBytes(UTF_8));
+ httpRequest.addHeader(new BasicHeader("Authorization", "Basic " + new String(encodedBytes, UTF_8)));
+ });
+ }
+ }
/**
* Determine if a request to Solr failed due to a communication error,
@@ -555,6 +560,29 @@ public class SolrCLI {
rootCause instanceof SocketException);
return wasCommError;
}
+
+ /**
+ * Tries a simple HEAD request and throws SolrException in case of Authorization error
+ * @param url the url to do a HEAD request to
+ * @param httpClient the http client to use (make sure it has authentication optinos set)
+ * @return the HTTP response code
+ * @throws SolrException if auth/autz problems
+ * @throws IOException if connection failure
+ */
+ private static int attemptHttpHead(String url, HttpClient httpClient) throws SolrException, IOException {
+ HttpResponse response = httpClient.execute(new HttpHead(url), HttpClientUtil.createNewHttpClientRequestContext());
+ int code = response.getStatusLine().getStatusCode();
+ if (code == UNAUTHORIZED.code || code == FORBIDDEN.code) {
+ throw new SolrException(SolrException.ErrorCode.getErrorCode(code),
+ "Solr requires authentication for " + url + ". Please supply valid credentials. HTTP code=" + code);
+ }
+ return code;
+ }
+
+ private static boolean exceptionIsAuthRelated(Exception exc) {
+ return (exc instanceof SolrException
+ && Arrays.asList(UNAUTHORIZED.code, FORBIDDEN.code).contains(((SolrException) exc).code()));
+ }
public static CloseableHttpClient getHttpClient() {
ModifiableSolrParams params = new ModifiableSolrParams();
@@ -608,6 +636,9 @@ public class SolrCLI {
try {
json = getJson(httpClient, getUrl);
} catch (Exception exc) {
+ if (exceptionIsAuthRelated(exc)) {
+ throw exc;
+ }
if (--attempts > 0 && checkCommunicationError(exc)) {
if (!isFirstAttempt) // only show the log warning after the second attempt fails
log.warn("Request to "+getUrl+" failed due to: "+exc.getMessage()+
@@ -660,33 +691,46 @@ public class SolrCLI {
*/
@SuppressWarnings({"unchecked"})
public static Map<String,Object> getJson(HttpClient httpClient, String getUrl) throws Exception {
- // ensure we're requesting JSON back from Solr
- HttpGet httpGet = new HttpGet(new URIBuilder(getUrl).setParameter(CommonParams.WT, CommonParams.JSON).build());
- // make the request and get back a parsed JSON object
- Map<String,Object> json = httpClient.execute(httpGet, new SolrResponseHandler(), HttpClientUtil.createNewHttpClientRequestContext());
- // check the response JSON from Solr to see if it is an error
- Long statusCode = asLong("/responseHeader/status", json);
- if (statusCode == -1) {
- throw new SolrServerException("Unable to determine outcome of GET request to: "+
- getUrl+"! Response: "+json);
- } else if (statusCode != 0) {
- String errMsg = asString("/error/msg", json);
- if (errMsg == null)
- errMsg = String.valueOf(json);
- throw new SolrServerException(errMsg);
- } else {
- // make sure no "failure" object in there either
- Object failureObj = json.get("failure");
- if (failureObj != null) {
- if (failureObj instanceof Map) {
- Object err = ((Map)failureObj).get("");
- if (err != null)
- throw new SolrServerException(err.toString());
+ try {
+ // ensure we're requesting JSON back from Solr
+ HttpGet httpGet = new HttpGet(new URIBuilder(getUrl).setParameter(CommonParams.WT, CommonParams.JSON).build());
+
+ // make the request and get back a parsed JSON object
+ Map<String, Object> json = httpClient.execute(httpGet, new SolrResponseHandler(), HttpClientUtil.createNewHttpClientRequestContext());
+ // check the response JSON from Solr to see if it is an error
+ Long statusCode = asLong("/responseHeader/status", json);
+ if (statusCode == -1) {
+ throw new SolrServerException("Unable to determine outcome of GET request to: "+
+ getUrl+"! Response: "+json);
+ } else if (statusCode != 0) {
+ String errMsg = asString("/error/msg", json);
+ if (errMsg == null)
+ errMsg = String.valueOf(json);
+ throw new SolrServerException(errMsg);
+ } else {
+ // make sure no "failure" object in there either
+ Object failureObj = json.get("failure");
+ if (failureObj != null) {
+ if (failureObj instanceof Map) {
+ Object err = ((Map)failureObj).get("");
+ if (err != null)
+ throw new SolrServerException(err.toString());
+ }
+ throw new SolrServerException(failureObj.toString());
}
- throw new SolrServerException(failureObj.toString());
+ }
+ return json;
+ } catch (ClientProtocolException cpe) {
+ // Currently detecting authentication by string-matching the HTTP response
+ // Perhaps SolrClient should have thrown an exception itself??
+ if (cpe.getMessage().contains("HTTP ERROR 401") || cpe.getMessage().contentEquals("HTTP ERROR 403")) {
+ int code = cpe.getMessage().contains("HTTP ERROR 401") ? 401 : 403;
+ throw new SolrException(SolrException.ErrorCode.getErrorCode(code),
+ "Solr requires authentication for " + getUrl + ". Please supply valid credentials. HTTP code=" + code);
+ } else {
+ throw cpe;
}
}
- return json;
}
/**
@@ -820,6 +864,9 @@ public class SolrCLI {
new JSONWriter(arr, 2).write(getStatus(solrUrl));
echo(arr.toString());
} catch (Exception exc) {
+ if (exceptionIsAuthRelated(exc)) {
+ throw exc;
+ }
if (checkCommunicationError(exc)) {
// this is not actually an error from the tool as it's ok if Solr is not online.
System.err.println("Solr at "+solrUrl+" not online.");
@@ -836,6 +883,9 @@ public class SolrCLI {
try {
return getStatus(solrUrl);
} catch (Exception exc) {
+ if (exceptionIsAuthRelated(exc)) {
+ throw exc;
+ }
try {
Thread.sleep(2000L);
} catch (InterruptedException interrupted) {
@@ -1130,6 +1180,10 @@ public class SolrCLI {
if (slices == null)
throw new IllegalArgumentException("Collection "+collection+" not found!");
+ // Test http code using a HEAD request first, fail fast if authentication failure
+ String urlForColl = zkStateReader.getLeaderUrl(collection, slices.stream().findFirst().get().getName(), 1000);
+ attemptHttpHead(urlForColl, cloudSolrClient.getHttpClient());
+
SolrQuery q = new SolrQuery("*:*");
q.setRows(0);
QueryResponse qr = cloudSolrClient.query(q);
@@ -3150,7 +3204,8 @@ public class SolrCLI {
private static String message = null;
private static boolean useExitCode = false;
-
+ private static Optional<Long> timeoutMs = Optional.empty();
+
public AssertTool() { this(System.out); }
public AssertTool(PrintStream stdout) { super(stdout); }
@@ -3170,13 +3225,13 @@ public class SolrCLI {
.withLongOpt("root")
.create("r"),
OptionBuilder
- .withDescription("Asserts that Solr is NOT started on a certain URL")
+ .withDescription("Asserts that Solr is NOT running on a certain URL. Default timeout is 1000ms")
.withLongOpt("not-started")
.hasArg(true)
.withArgName("url")
.create("S"),
OptionBuilder
- .withDescription("Asserts that Solr is started on a certain URL")
+ .withDescription("Asserts that Solr is running on a certain URL. Default timeout is 1000ms")
.withLongOpt("started")
.hasArg(true)
.withArgName("url")
@@ -3206,6 +3261,13 @@ public class SolrCLI {
.withArgName("message")
.create("m"),
OptionBuilder
+ .withDescription("Timeout in ms for commands supporting a timeout")
+ .withLongOpt("ms")
+ .hasArg(true)
+ .withType(Long.class)
+ .withArgName("ms")
+ .create("t"),
+ OptionBuilder
.withDescription("Return an exit code instead of printing error message on assert fail.")
.withLongOpt("exitcode")
.create("e")
@@ -3217,14 +3279,14 @@ public class SolrCLI {
int toolExitStatus = 0;
try {
- setBasicAuth(cli);
+ setBasicAuth();
toolExitStatus = runAssert(cli);
} catch (Exception exc) {
// since this is a CLI, spare the user the stacktrace
String excMsg = exc.getMessage();
if (excMsg != null) {
System.err.println("\nERROR: " + excMsg + "\n");
- toolExitStatus = 1;
+ toolExitStatus = 100; // Exit >= 100 means error, else means number of tests that failed
} else {
throw exc;
}
@@ -3237,7 +3299,12 @@ public class SolrCLI {
runAssert(cli);
}
- // Custom run method which may return exit code
+ /**
+ * Custom run method which may return exit code
+ * @param cli the command line object
+ * @return 0 on success, or a number corresponding to number of tests that failed
+ * @throws Exception if a tool failed, e.g. authentication failure
+ */
protected int runAssert(CommandLine cli) throws Exception {
if (cli.getOptions().length == 0 || cli.getArgs().length > 0 || cli.hasOption("h")) {
new HelpFormatter().printHelp("bin/solr assert [-m <message>] [-e] [-rR] [-s <url>] [-S <url>] [-u <dir>] [-x <dir>] [-X <dir>]", getToolOptions(this));
@@ -3246,49 +3313,79 @@ public class SolrCLI {
if (cli.hasOption("m")) {
message = cli.getOptionValue("m");
}
+ if (cli.hasOption("t")) {
+ timeoutMs = Optional.of(Long.parseLong(cli.getOptionValue("t")));
+ }
if (cli.hasOption("e")) {
useExitCode = true;
}
+
+ int ret = 0;
if (cli.hasOption("r")) {
- if (assertRootUser() > 0) return 1;
+ ret += assertRootUser();
}
if (cli.hasOption("R")) {
- if (assertNotRootUser() > 0) return 1;
+ ret += assertNotRootUser();
}
if (cli.hasOption("x")) {
- if (assertFileExists(cli.getOptionValue("x")) > 0) return 1;
+ ret += assertFileExists(cli.getOptionValue("x"));
}
if (cli.hasOption("X")) {
- if (assertFileNotExists(cli.getOptionValue("X")) > 0) return 1;
+ ret += assertFileNotExists(cli.getOptionValue("X"));
}
if (cli.hasOption("u")) {
- if (sameUser(cli.getOptionValue("u")) > 0) return 1;
+ ret += sameUser(cli.getOptionValue("u"));
}
if (cli.hasOption("s")) {
- if (assertSolrRunning(cli.getOptionValue("s")) > 0) return 1;
+ ret += assertSolrRunning(cli.getOptionValue("s"));
}
if (cli.hasOption("S")) {
- if (assertSolrNotRunning(cli.getOptionValue("S")) > 0) return 1;
+ ret += assertSolrNotRunning(cli.getOptionValue("S"));
}
- return 0;
+ return ret;
}
public static int assertSolrRunning(String url) throws Exception {
StatusTool status = new StatusTool();
try {
- status.waitToSeeSolrUp(url, 5);
- } catch (Exception e) {
- return exitOrException("Solr is not running on url " + url);
+ status.waitToSeeSolrUp(url, timeoutMs.orElse(1000L).intValue() / 1000);
+ } catch (Exception se) {
+ if (exceptionIsAuthRelated(se)) {
+ throw se;
+ }
+ return exitOrException("Solr is not running on url " + url + " after " + timeoutMs.orElse(1000L) / 1000 + "s");
}
return 0;
}
public static int assertSolrNotRunning(String url) throws Exception {
StatusTool status = new StatusTool();
+ long timeout = System.nanoTime() + TimeUnit.NANOSECONDS.convert(timeoutMs.orElse(1000L), TimeUnit.MILLISECONDS);
try {
- status.waitToSeeSolrUp(url, 5);
- return exitOrException("Solr is running on url " + url);
- } catch (Exception e) { return 0; }
+ attemptHttpHead(url, getHttpClient());
+ } catch (SolrException se) {
+ throw se; // Auth error
+ } catch (IOException e) {
+ log.debug("Opening connection to " + url + " failed, Solr does not seem to be running", e);
+ return 0;
+ }
+ while (System.nanoTime() < timeout) {
+ try {
+ status.waitToSeeSolrUp(url, 1);
+ try {
+ log.debug("Solr still up. Waiting before trying again to see if it was stopped");
+ Thread.sleep(1000L);
+ } catch (InterruptedException interrupted) {
+ timeout = 0; // stop looping
+ }
+ } catch (Exception se) {
+ if (exceptionIsAuthRelated(se)) {
+ throw se;
+ }
+ return exitOrException(se.getMessage());
+ }
+ }
+ return exitOrException("Solr is still running at " + url + " after " + timeoutMs.orElse(1000L) / 1000 + "s");
}
public static int sameUser(String directory) throws Exception {
[42/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9559: Add
ExecutorStream to execute stored Streaming Expressions
Posted by kr...@apache.org.
SOLR-9559: Add ExecutorStream to execute stored Streaming Expressions
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/727bf559
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/727bf559
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/727bf559
Branch: refs/heads/jira/solr-8593
Commit: 727bf559a0089d67ddd7eb5ed572f79b67a006c6
Parents: 2ee7231
Author: Joel Bernstein <jb...@apache.org>
Authored: Wed Oct 26 17:39:59 2016 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Wed Oct 26 20:19:40 2016 -0400
----------------------------------------------------------------------
.../org/apache/solr/handler/StreamHandler.java | 32 +--
.../client/solrj/io/stream/ExecutorStream.java | 224 +++++++++++++++++++
.../solrj/io/stream/StreamExpressionTest.java | 152 ++++++++++++-
3 files changed, 372 insertions(+), 36 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/727bf559/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index 3e841bd..c6f3c62 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -35,36 +35,7 @@ import org.apache.solr.client.solrj.io.ops.ConcatOperation;
import org.apache.solr.client.solrj.io.ops.DistinctOperation;
import org.apache.solr.client.solrj.io.ops.GroupOperation;
import org.apache.solr.client.solrj.io.ops.ReplaceOperation;
-import org.apache.solr.client.solrj.io.stream.CloudSolrStream;
-import org.apache.solr.client.solrj.io.stream.CommitStream;
-import org.apache.solr.client.solrj.io.stream.ComplementStream;
-import org.apache.solr.client.solrj.io.stream.DaemonStream;
-import org.apache.solr.client.solrj.io.stream.ExceptionStream;
-import org.apache.solr.client.solrj.io.stream.FacetStream;
-import org.apache.solr.client.solrj.io.stream.FeaturesSelectionStream;
-import org.apache.solr.client.solrj.io.stream.FetchStream;
-import org.apache.solr.client.solrj.io.stream.HashJoinStream;
-import org.apache.solr.client.solrj.io.stream.InnerJoinStream;
-import org.apache.solr.client.solrj.io.stream.IntersectStream;
-import org.apache.solr.client.solrj.io.stream.JDBCStream;
-import org.apache.solr.client.solrj.io.stream.LeftOuterJoinStream;
-import org.apache.solr.client.solrj.io.stream.MergeStream;
-import org.apache.solr.client.solrj.io.stream.ModelStream;
-import org.apache.solr.client.solrj.io.stream.OuterHashJoinStream;
-import org.apache.solr.client.solrj.io.stream.ParallelStream;
-import org.apache.solr.client.solrj.io.stream.RankStream;
-import org.apache.solr.client.solrj.io.stream.ReducerStream;
-import org.apache.solr.client.solrj.io.stream.RollupStream;
-import org.apache.solr.client.solrj.io.stream.ScoreNodesStream;
-import org.apache.solr.client.solrj.io.stream.SelectStream;
-import org.apache.solr.client.solrj.io.stream.SortStream;
-import org.apache.solr.client.solrj.io.stream.StatsStream;
-import org.apache.solr.client.solrj.io.stream.StreamContext;
-import org.apache.solr.client.solrj.io.stream.TextLogitStream;
-import org.apache.solr.client.solrj.io.stream.TopicStream;
-import org.apache.solr.client.solrj.io.stream.TupleStream;
-import org.apache.solr.client.solrj.io.stream.UniqueStream;
-import org.apache.solr.client.solrj.io.stream.UpdateStream;
+import org.apache.solr.client.solrj.io.stream.*;
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
import org.apache.solr.client.solrj.io.stream.expr.Expressible;
@@ -167,6 +138,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
.withFunctionName("model", ModelStream.class)
.withFunctionName("classify", ClassifyStream.class)
.withFunctionName("fetch", FetchStream.class)
+ .withFunctionName("executor", ExecutorStream.class)
// metrics
.withFunctionName("min", MinMetric.class)
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/727bf559/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java
new file mode 100644
index 0000000..6765f72
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java
@@ -0,0 +1,224 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.stream;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Locale;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.comp.StreamComparator;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
+import org.apache.solr.client.solrj.io.stream.expr.Expressible;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+import org.apache.solr.common.util.ExecutorUtil;
+import org.apache.solr.common.util.SolrjNamedThreadFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * The executor function wraps a stream with Tuples containing Streaming Expressions
+ * and executes them in parallel. Sample syntax:
+ *
+ * executor(thread=10, topic(storedExpressions, q="*:*", fl="expr_s, id", id="topic1"))
+ *
+ * The Streaming Expression to execute is taken from the expr field in the Tuples.
+ */
+
+public class ExecutorStream extends TupleStream implements Expressible {
+
+ private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ private TupleStream stream;
+
+ private int threads;
+
+ private ExecutorService executorService;
+ private StreamFactory streamFactory;
+ private StreamContext streamContext;
+
+ public ExecutorStream(StreamExpression expression, StreamFactory factory) throws IOException {
+ // grab all parameters out
+ List<StreamExpression> streamExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, TupleStream.class);
+ StreamExpressionNamedParameter threadsParam = factory.getNamedOperand(expression, "threads");
+
+ int threads = 6;
+
+ if(threadsParam != null) {
+ threads = Integer.parseInt(((StreamExpressionValue)threadsParam.getParameter()).getValue());
+ }
+
+ if(1 != streamExpressions.size()){
+ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting a single stream but found %d",expression, streamExpressions.size()));
+ }
+
+ TupleStream stream = factory.constructStream(streamExpressions.get(0));
+ init(stream, threads, factory);
+ }
+
+ private void init(TupleStream tupleStream, int threads, StreamFactory factory) throws IOException{
+ this.threads = threads;
+ this.stream = tupleStream;
+ this.streamFactory = factory;
+ }
+
+ @Override
+ public StreamExpression toExpression(StreamFactory factory) throws IOException {
+ return toExpression(factory, true);
+ }
+
+ private StreamExpression toExpression(StreamFactory factory, boolean includeStreams) throws IOException {
+
+ // function name
+ StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
+ expression.addParameter(new StreamExpressionNamedParameter("threads", Integer.toString(threads)));
+
+ // stream
+ if(includeStreams) {
+ if (stream instanceof Expressible) {
+ expression.addParameter(((Expressible) stream).toExpression(factory));
+ } else {
+ throw new IOException("The ExecuteStream contains a non-expressible TupleStream - it cannot be converted to an expression");
+ }
+ }
+
+ return expression;
+ }
+
+ @Override
+ public Explanation toExplanation(StreamFactory factory) throws IOException {
+
+ return new StreamExplanation(getStreamNodeId().toString())
+ .withChildren(new Explanation[]{
+ stream.toExplanation(factory)
+ })
+ .withFunctionName(factory.getFunctionName(this.getClass()))
+ .withImplementingClass(this.getClass().getName())
+ .withExpressionType(ExpressionType.STREAM_DECORATOR)
+ .withExpression(toExpression(factory, false).toString());
+ }
+
+ public void setStreamContext(StreamContext streamContext) {
+ this.streamContext = streamContext;
+ this.stream.setStreamContext(streamContext);
+ }
+
+ public List<TupleStream> children() {
+ List<TupleStream> l = new ArrayList();
+ l.add(stream);
+ return l;
+ }
+
+ public void open() throws IOException {
+ executorService = ExecutorUtil.newMDCAwareFixedThreadPool(threads, new SolrjNamedThreadFactory("ExecutorStream"));
+ stream.open();
+ }
+
+ public void close() throws IOException {
+ stream.close();
+ executorService.shutdown();
+ try {
+ executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);
+ } catch(InterruptedException e) {
+ logger.error("Interrupted while waiting for termination", e);
+ }
+ }
+
+ public Tuple read() throws IOException {
+ ArrayBlockingQueue<Tuple> queue = new ArrayBlockingQueue(10000);
+ while(true) {
+ Tuple tuple = stream.read();
+ if (!tuple.EOF) {
+ try {
+ queue.put(tuple);
+ } catch (InterruptedException e) {
+ throw new IOException(e);
+ }
+ executorService.execute(new StreamTask(queue, streamFactory, streamContext));
+ } else {
+ return tuple;
+ }
+ }
+ }
+
+ public StreamComparator getStreamSort(){
+ return stream.getStreamSort();
+ }
+
+ public int getCost() {
+ return 0;
+ }
+
+ public static class StreamTask implements Runnable {
+
+ private ArrayBlockingQueue<Tuple> queue;
+ private StreamFactory streamFactory;
+ private StreamContext streamContext;
+
+ public StreamTask(ArrayBlockingQueue queue, StreamFactory streamFactory, StreamContext streamContext) {
+ this.queue = queue;
+ this.streamFactory = streamFactory;
+ this.streamContext = new StreamContext();
+ this.streamContext.setSolrClientCache(streamContext.getSolrClientCache());
+ this.streamContext.setModelCache(streamContext.getModelCache());
+ }
+
+ public void run() {
+ Tuple tuple = null;
+ try {
+ tuple = queue.take();
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+
+ String expr = tuple.getString("expr_s");
+ Object id = tuple.get("id");
+ TupleStream stream = null;
+
+ try {
+ stream = streamFactory.constructStream(expr);
+ stream.setStreamContext(streamContext);
+ stream.open();
+ while (true) {
+ Tuple t = stream.read();
+ if (t.EOF) {
+ break;
+ }
+ }
+ } catch (Exception e) {
+ logger.error("Executor Error: id="+id+" expr_s="+expr, e);
+ } finally {
+ try {
+ stream.close();
+ } catch (Exception e1) {
+ logger.error("Executor Error", e1);
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/727bf559/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index 7b5777d..106368e 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -256,7 +256,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
tuples = getTuples(stream);
assertEquals(4, tuples.size());
- assertOrder(tuples, 4,3,1,0);
+ assertOrder(tuples, 4, 3, 1, 0);
// Basic w/ multi comp
sParams.set("q2", "search(" + COLLECTION + ", q=\"id:(1 2)\", fl=\"id,a_s,a_i,a_f\", sort=${mySort})");
@@ -522,14 +522,14 @@ public class StreamExpressionTest extends SolrCloudTestCase {
// Basic test
expression = StreamExpressionParser.parse("top("
- + "n=3,"
- + "search(" + COLLECTION + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"),"
- + "sort=\"a_f asc, a_i asc\")");
+ + "n=3,"
+ + "search(" + COLLECTION + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"),"
+ + "sort=\"a_f asc, a_i asc\")");
stream = new RankStream(expression, factory);
tuples = getTuples(stream);
assert(tuples.size() == 3);
- assertOrder(tuples, 0,2,1);
+ assertOrder(tuples, 0, 2, 1);
// Basic test desc
expression = StreamExpressionParser.parse("top("
@@ -3794,7 +3794,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
paramsLoc = new ModifiableSolrParams();
paramsLoc.set("expr", expr);
- paramsLoc.set("qt","/stream");
+ paramsLoc.set("qt", "/stream");
SolrStream classifyStream = new SolrStream(url, paramsLoc);
Map<String, Double> idToLabel = getIdToLabel(classifyStream, "probability_d");
assertEquals(idToLabel.size(), 2);
@@ -3866,6 +3866,146 @@ public class StreamExpressionTest extends SolrCloudTestCase {
CollectionAdminRequest.deleteCollection("checkpointCollection").process(cluster.getSolrClient());
}
+ @Test
+ public void testExecutorStream() throws Exception {
+ CollectionAdminRequest.createCollection("workQueue", "conf", 2, 1).process(cluster.getSolrClient());
+ AbstractDistribZkTestBase.waitForRecoveriesToFinish("workQueue", cluster.getSolrClient().getZkStateReader(),
+ false, true, TIMEOUT);
+ CollectionAdminRequest.createCollection("mainCorpus", "conf", 2, 1).process(cluster.getSolrClient());
+ AbstractDistribZkTestBase.waitForRecoveriesToFinish("mainCorpus", cluster.getSolrClient().getZkStateReader(),
+ false, true, TIMEOUT);
+ CollectionAdminRequest.createCollection("destination", "conf", 2, 1).process(cluster.getSolrClient());
+ AbstractDistribZkTestBase.waitForRecoveriesToFinish("destination", cluster.getSolrClient().getZkStateReader(),
+ false, true, TIMEOUT);
+
+ UpdateRequest workRequest = new UpdateRequest();
+ UpdateRequest dataRequest = new UpdateRequest();
+
+
+ for (int i = 0; i < 500; i++) {
+ workRequest.add(id, String.valueOf(i), "expr_s", "update(destination, batchSize=50, search(mainCorpus, q=id:"+i+", rows=1, sort=\"id asc\", fl=\"id, body_t, field_i\"))");
+ dataRequest.add(id, String.valueOf(i), "body_t", "hello world "+i, "field_i", Integer.toString(i));
+ }
+
+ workRequest.commit(cluster.getSolrClient(), "workQueue");
+ dataRequest.commit(cluster.getSolrClient(), "mainCorpus");
+
+ String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/destination";
+ TupleStream executorStream;
+ ModifiableSolrParams paramsLoc;
+
+ StreamFactory factory = new StreamFactory()
+ .withCollectionZkHost("workQueue", cluster.getZkServer().getZkAddress())
+ .withCollectionZkHost("mainCorpus", cluster.getZkServer().getZkAddress())
+ .withCollectionZkHost("destination", cluster.getZkServer().getZkAddress())
+ .withFunctionName("search", CloudSolrStream.class)
+ .withFunctionName("executor", ExecutorStream.class)
+ .withFunctionName("update", UpdateStream.class);
+
+ String executorExpression = "executor(threads=3, search(workQueue, q=\"*:*\", fl=\"id, expr_s\", rows=1000, sort=\"id desc\"))";
+ executorStream = factory.constructStream(executorExpression);
+
+ StreamContext context = new StreamContext();
+ SolrClientCache clientCache = new SolrClientCache();
+ context.setSolrClientCache(clientCache);
+ executorStream.setStreamContext(context);
+ getTuples(executorStream);
+ //Destination collection should now contain all the records in the main corpus.
+ cluster.getSolrClient().commit("destination");
+ paramsLoc = new ModifiableSolrParams();
+ paramsLoc.set("expr", "search(destination, q=\"*:*\", fl=\"id, body_t, field_i\", rows=1000, sort=\"field_i asc\")");
+ paramsLoc.set("qt","/stream");
+
+ SolrStream solrStream = new SolrStream(url, paramsLoc);
+ List<Tuple> tuples = getTuples(solrStream);
+ assertTrue(tuples.size() == 500);
+ for(int i=0; i<500; i++) {
+ Tuple tuple = tuples.get(i);
+ long ivalue = tuple.getLong("field_i");
+ String body = tuple.getString("body_t");
+ assertTrue(ivalue == i);
+ assertTrue(body.equals("hello world "+i));
+ }
+
+ solrStream.close();
+ clientCache.close();
+ CollectionAdminRequest.deleteCollection("workQueue").process(cluster.getSolrClient());
+ CollectionAdminRequest.deleteCollection("mainCorpus").process(cluster.getSolrClient());
+ CollectionAdminRequest.deleteCollection("destination").process(cluster.getSolrClient());
+ }
+
+
+ @Test
+ public void testParallelExecutorStream() throws Exception {
+ CollectionAdminRequest.createCollection("workQueue", "conf", 2, 1).process(cluster.getSolrClient());
+ AbstractDistribZkTestBase.waitForRecoveriesToFinish("workQueue", cluster.getSolrClient().getZkStateReader(),
+ false, true, TIMEOUT);
+ CollectionAdminRequest.createCollection("mainCorpus", "conf", 2, 1).process(cluster.getSolrClient());
+ AbstractDistribZkTestBase.waitForRecoveriesToFinish("mainCorpus", cluster.getSolrClient().getZkStateReader(),
+ false, true, TIMEOUT);
+ CollectionAdminRequest.createCollection("destination", "conf", 2, 1).process(cluster.getSolrClient());
+ AbstractDistribZkTestBase.waitForRecoveriesToFinish("destination", cluster.getSolrClient().getZkStateReader(),
+ false, true, TIMEOUT);
+
+ UpdateRequest workRequest = new UpdateRequest();
+ UpdateRequest dataRequest = new UpdateRequest();
+
+
+ for (int i = 0; i < 500; i++) {
+ workRequest.add(id, String.valueOf(i), "expr_s", "update(destination, batchSize=50, search(mainCorpus, q=id:"+i+", rows=1, sort=\"id asc\", fl=\"id, body_t, field_i\"))");
+ dataRequest.add(id, String.valueOf(i), "body_t", "hello world "+i, "field_i", Integer.toString(i));
+ }
+
+ workRequest.commit(cluster.getSolrClient(), "workQueue");
+ dataRequest.commit(cluster.getSolrClient(), "mainCorpus");
+
+ String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/destination";
+ TupleStream executorStream;
+ ModifiableSolrParams paramsLoc;
+
+ StreamFactory factory = new StreamFactory()
+ .withCollectionZkHost("workQueue", cluster.getZkServer().getZkAddress())
+ .withCollectionZkHost("mainCorpus", cluster.getZkServer().getZkAddress())
+ .withCollectionZkHost("destination", cluster.getZkServer().getZkAddress())
+ .withFunctionName("search", CloudSolrStream.class)
+ .withFunctionName("executor", ExecutorStream.class)
+ .withFunctionName("parallel", ParallelStream.class)
+ .withFunctionName("update", UpdateStream.class);
+
+ String executorExpression = "parallel(workQueue, workers=2, sort=\"EOF asc\", executor(threads=3, queueSize=100, search(workQueue, q=\"*:*\", fl=\"id, expr_s\", rows=1000, partitionKeys=id, sort=\"id desc\")))";
+ executorStream = factory.constructStream(executorExpression);
+
+ StreamContext context = new StreamContext();
+ SolrClientCache clientCache = new SolrClientCache();
+ context.setSolrClientCache(clientCache);
+ executorStream.setStreamContext(context);
+ getTuples(executorStream);
+ //Destination collection should now contain all the records in the main corpus.
+ cluster.getSolrClient().commit("destination");
+ paramsLoc = new ModifiableSolrParams();
+ paramsLoc.set("expr", "search(destination, q=\"*:*\", fl=\"id, body_t, field_i\", rows=1000, sort=\"field_i asc\")");
+ paramsLoc.set("qt","/stream");
+
+ SolrStream solrStream = new SolrStream(url, paramsLoc);
+ List<Tuple> tuples = getTuples(solrStream);
+ assertTrue(tuples.size() == 500);
+ for(int i=0; i<500; i++) {
+ Tuple tuple = tuples.get(i);
+ long ivalue = tuple.getLong("field_i");
+ String body = tuple.getString("body_t");
+ assertTrue(ivalue == i);
+ assertTrue(body.equals("hello world " + i));
+ }
+
+ solrStream.close();
+ clientCache.close();
+ CollectionAdminRequest.deleteCollection("workQueue").process(cluster.getSolrClient());
+ CollectionAdminRequest.deleteCollection("mainCorpus").process(cluster.getSolrClient());
+ CollectionAdminRequest.deleteCollection("destination").process(cluster.getSolrClient());
+ }
+
+
+
private Map<String,Double> getIdToLabel(TupleStream stream, String outField) throws IOException {
Map<String, Double> idToLabel = new HashMap<>();
List<Tuple> tuples = getTuples(stream);
[47/50] [abbrv] lucene-solr:jira/solr-8593: general cleanup to use
java 8 feautures
Posted by kr...@apache.org.
general cleanup to use java 8 feautures
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0feca1a9
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0feca1a9
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0feca1a9
Branch: refs/heads/jira/solr-8593
Commit: 0feca1a9742a5c945024e6e2bbc9098a003ab538
Parents: d6b6e74
Author: Noble Paul <no...@apache.org>
Authored: Thu Oct 27 15:03:05 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Thu Oct 27 15:05:07 2016 +0530
----------------------------------------------------------------------
.../solr/handler/dataimport/DIHCache.java | 16 ++++----
.../solr/handler/dataimport/EventListener.java | 2 +-
.../dataimport/XPathEntityProcessor.java | 40 ++++++++------------
.../handler/dataimport/XPathRecordReader.java | 15 +++-----
.../AbstractDataImportHandlerTestCase.java | 14 ++-----
.../dataimport/TestXPathRecordReader.java | 15 +++-----
6 files changed, 38 insertions(+), 64 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0feca1a9/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHCache.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHCache.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHCache.java
index 7e72d4e..a67b3e4 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHCache.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHCache.java
@@ -35,7 +35,7 @@ public interface DIHCache extends Iterable<Map<String,Object>> {
* includes any parameters needed by the cache impl. This must be called
* before any read/write operations are permitted.
*/
- public void open(Context context);
+ void open(Context context);
/**
* <p>
@@ -43,14 +43,14 @@ public interface DIHCache extends Iterable<Map<String,Object>> {
* but not destroyed.
* </p>
*/
- public void close();
+ void close();
/**
* <p>
* Persists any pending data to the cache
* </p>
*/
- public void flush();
+ void flush();
/**
* <p>
@@ -67,7 +67,7 @@ public interface DIHCache extends Iterable<Map<String,Object>> {
* update a key's documents, first call delete(Object key).
* </p>
*/
- public void add(Map<String,Object> rec);
+ void add(Map<String, Object> rec);
/**
* <p>
@@ -76,7 +76,7 @@ public interface DIHCache extends Iterable<Map<String,Object>> {
* </p>
*/
@Override
- public Iterator<Map<String,Object>> iterator();
+ Iterator<Map<String,Object>> iterator();
/**
* <p>
@@ -84,20 +84,20 @@ public interface DIHCache extends Iterable<Map<String,Object>> {
* match the given key in insertion order.
* </p>
*/
- public Iterator<Map<String,Object>> iterator(Object key);
+ Iterator<Map<String,Object>> iterator(Object key);
/**
* <p>
* Delete all documents associated with the given key
* </p>
*/
- public void delete(Object key);
+ void delete(Object key);
/**
* <p>
* Delete all data from the cache,leaving the empty cache intact.
* </p>
*/
- public void deleteAll();
+ void deleteAll();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0feca1a9/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EventListener.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EventListener.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EventListener.java
index 0f22513..0c43a0b 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EventListener.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EventListener.java
@@ -30,6 +30,6 @@ public interface EventListener {
*
* @param ctx the Context in which this event was called
*/
- public void onEvent(Context ctx);
+ void onEvent(Context ctx);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0feca1a9/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
index cc28dc4..70b9aba 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
@@ -323,13 +323,7 @@ public class XPathEntityProcessor extends EntityProcessorBase {
rowIterator = getRowIterator(data, s);
} else {
try {
- xpathReader.streamRecords(data, new XPathRecordReader.Handler() {
- @Override
- @SuppressWarnings("unchecked")
- public void handle(Map<String, Object> record, String xpath) {
- rows.add(readRow(record, xpath));
- }
- });
+ xpathReader.streamRecords(data, (record, xpath) -> rows.add(readRow(record, xpath)));
} catch (Exception e) {
String msg = "Parsing failed for xml, url:" + s + " rows processed:" + rows.size();
if (rows.size() > 0) msg += " last row: " + rows.get(rows.size() - 1);
@@ -433,25 +427,21 @@ public class XPathEntityProcessor extends EntityProcessorBase {
@Override
public void run() {
try {
- xpathReader.streamRecords(data, new XPathRecordReader.Handler() {
- @Override
- @SuppressWarnings("unchecked")
- public void handle(Map<String, Object> record, String xpath) {
- if (isEnd.get()) {
- throwExp.set(false);
- //To end the streaming . otherwise the parsing will go on forever
- //though consumer has gone away
- throw new RuntimeException("BREAK");
- }
- Map<String, Object> row;
- try {
- row = readRow(record, xpath);
- } catch (Exception e) {
- isEnd.set(true);
- return;
- }
- offer(row);
+ xpathReader.streamRecords(data, (record, xpath) -> {
+ if (isEnd.get()) {
+ throwExp.set(false);
+ //To end the streaming . otherwise the parsing will go on forever
+ //though consumer has gone away
+ throw new RuntimeException("BREAK");
+ }
+ Map<String, Object> row;
+ try {
+ row = readRow(record, xpath);
+ } catch (Exception e) {
+ isEnd.set(true);
+ return;
}
+ offer(row);
});
} catch (Exception e) {
if(throwExp.get()) exp.set(e);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0feca1a9/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
index 266e047..69c6c57 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
@@ -162,12 +162,7 @@ public class XPathRecordReader {
*/
public List<Map<String, Object>> getAllRecords(Reader r) {
final List<Map<String, Object>> results = new ArrayList<>();
- streamRecords(r, new Handler() {
- @Override
- public void handle(Map<String, Object> record, String s) {
- results.add(record);
- }
- });
+ streamRecords(r, (record, s) -> results.add(record));
return results;
}
@@ -182,8 +177,8 @@ public class XPathRecordReader {
public void streamRecords(Reader r, Handler handler) {
try {
XMLStreamReader parser = factory.createXMLStreamReader(r);
- rootNode.parse(parser, handler, new HashMap<String, Object>(),
- new Stack<Set<String>>(), false);
+ rootNode.parse(parser, handler, new HashMap<>(),
+ new Stack<>(), false);
} catch (Exception e) {
throw new RuntimeException(e);
}
@@ -657,7 +652,7 @@ public class XPathRecordReader {
/**Implement this interface to stream records as and when one is found.
*
*/
- public static interface Handler {
+ public interface Handler {
/**
* @param record The record map. The key is the field name as provided in
* the addField() methods. The value can be a single String (for single
@@ -666,7 +661,7 @@ public class XPathRecordReader {
* If there is any change all parsing will be aborted and the Exception
* is propagated up
*/
- public void handle(Map<String, Object> record, String xpath);
+ void handle(Map<String, Object> record, String xpath);
}
private static final Pattern ATTRIB_PRESENT_WITHVAL = Pattern
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0feca1a9/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java
index 60f0811..db02993 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java
@@ -20,7 +20,6 @@ import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashMap;
-import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -28,6 +27,7 @@ import org.apache.commons.io.FileUtils;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SuppressForbidden;
+import org.apache.solr.common.util.Utils;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
@@ -145,7 +145,7 @@ public abstract class AbstractDataImportHandlerTestCase extends
if (resolver == null) resolver = new VariableResolver();
final Context delegate = new ContextImpl(parent, resolver,
parentDataSource, currProcess,
- new HashMap<String, Object>(), null, null);
+ new HashMap<>(), null, null);
return new TestContext(entityAttrs, delegate, entityFields, parent == null);
}
@@ -155,15 +155,7 @@ public abstract class AbstractDataImportHandlerTestCase extends
*/
@SuppressWarnings("unchecked")
public static Map createMap(Object... args) {
- Map result = new LinkedHashMap();
-
- if (args == null || args.length == 0)
- return result;
-
- for (int i = 0; i < args.length - 1; i += 2)
- result.put(args[i], args[i + 1]);
-
- return result;
+ return Utils.makeMap(args);
}
@SuppressForbidden(reason = "Needs currentTimeMillis to set modified time for a file")
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0feca1a9/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java
index 5f9a5ca..d8e3cbe 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java
@@ -16,13 +16,13 @@
*/
package org.apache.solr.handler.dataimport;
-import org.junit.Test;
-
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
+import org.junit.Test;
+
/**
* <p> Test for XPathRecordReader </p>
*
@@ -138,13 +138,10 @@ public class TestXPathRecordReader extends AbstractDataImportHandlerTestCase {
final List<Map<String, Object>> a = new ArrayList<>();
final List<Map<String, Object>> x = new ArrayList<>();
- rr.streamRecords(new StringReader(xml), new XPathRecordReader.Handler() {
- @Override
- public void handle(Map<String, Object> record, String xpath) {
- if (record == null) return;
- if (xpath.equals("/root/a")) a.add(record);
- if (xpath.equals("/root/x")) x.add(record);
- }
+ rr.streamRecords(new StringReader(xml), (record, xpath) -> {
+ if (record == null) return;
+ if (xpath.equals("/root/a")) a.add(record);
+ if (xpath.equals("/root/x")) x.add(record);
});
assertEquals(1, a.size());
[16/50] [abbrv] lucene-solr:jira/solr-8593: LUCENE-7519: add
optimized implementations for browse-only facets
Posted by kr...@apache.org.
LUCENE-7519: add optimized implementations for browse-only facets
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0782b095
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0782b095
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0782b095
Branch: refs/heads/jira/solr-8593
Commit: 0782b09571fc5ac3e92b566f9abc047b2bd7966c
Parents: b7aa582
Author: Mike McCandless <mi...@apache.org>
Authored: Tue Oct 25 06:22:23 2016 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Tue Oct 25 06:22:23 2016 -0400
----------------------------------------------------------------------
lucene/CHANGES.txt | 3 +
.../DefaultSortedSetDocValuesReaderState.java | 3 +-
.../SortedSetDocValuesFacetCounts.java | 124 +++++++++++++++----
.../facet/taxonomy/FastTaxonomyFacetCounts.java | 49 ++++++++
.../lucene/facet/taxonomy/TaxonomyFacets.java | 4 +-
.../sortedset/TestSortedSetDocValuesFacets.java | 25 ++--
.../facet/taxonomy/TestTaxonomyFacetCounts.java | 84 +++++--------
7 files changed, 202 insertions(+), 90 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0782b095/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 954137f..d574a8a 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -45,6 +45,9 @@ Optimizations
that have a facet value, so sparse faceting works as expected
(Adrien Grand via Mike McCandless)
+* LUCENE-7519: Add optimized APIs to compute browse-only top level
+ facets (Mike McCandless)
+
Other
* LUCENE-7328: Remove LegacyNumericEncoding from GeoPointField. (Nick Knize)
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0782b095/lucene/facet/src/java/org/apache/lucene/facet/sortedset/DefaultSortedSetDocValuesReaderState.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/sortedset/DefaultSortedSetDocValuesReaderState.java b/lucene/facet/src/java/org/apache/lucene/facet/sortedset/DefaultSortedSetDocValuesReaderState.java
index 7bbe94a..b959d25 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/sortedset/DefaultSortedSetDocValuesReaderState.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/sortedset/DefaultSortedSetDocValuesReaderState.java
@@ -36,7 +36,8 @@ import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.BytesRef;
/**
- * Default implementation of {@link SortedSetDocValuesFacetCounts}
+ * Default implementation of {@link SortedSetDocValuesFacetCounts}. You must ensure the original
+ * {@link IndexReader} passed to the constructor is not closed whenever you use this class!
*/
public class DefaultSortedSetDocValuesReaderState extends SortedSetDocValuesReaderState {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0782b095/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java b/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java
index 4fff6a6..9ba8547 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java
@@ -18,6 +18,7 @@ package org.apache.lucene.facet.sortedset;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
@@ -33,11 +34,15 @@ import org.apache.lucene.facet.TopOrdAndIntQueue;
import org.apache.lucene.facet.sortedset.SortedSetDocValuesReaderState.OrdRange;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues.MultiSortedSetDocValues;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.lucene.search.ConjunctionDISI;
import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongValues;
@@ -77,6 +82,17 @@ public class SortedSetDocValuesFacetCounts extends Facets {
count(hits.getMatchingDocs());
}
+ /** Returns all facet counts, same result as searching on {@link MatchAllDocsQuery} but faster. */
+ public SortedSetDocValuesFacetCounts(SortedSetDocValuesReaderState state)
+ throws IOException {
+ this.state = state;
+ this.field = state.getField();
+ dv = state.getDocValues();
+ counts = new int[state.getSize()];
+ //System.out.println("field=" + field);
+ countAll();
+ }
+
@Override
public FacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
if (topN <= 0) {
@@ -176,7 +192,8 @@ public class SortedSetDocValuesFacetCounts extends Facets {
continue;
}
- DocIdSetIterator docs = hits.bits.iterator();
+ DocIdSetIterator it = ConjunctionDISI.intersectIterators(Arrays.asList(
+ hits.bits.iterator(), segValues));
// TODO: yet another option is to count all segs
// first, only in seg-ord space, and then do a
@@ -196,16 +213,12 @@ public class SortedSetDocValuesFacetCounts extends Facets {
if (hits.totalHits < numSegOrds/10) {
//System.out.println(" remap as-we-go");
// Remap every ord to global ord as we iterate:
- int doc;
- while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- //System.out.println(" doc=" + doc);
- if (segValues.advanceExact(doc)) {
- int term = (int) segValues.nextOrd();
- while (term != SortedSetDocValues.NO_MORE_ORDS) {
- //System.out.println(" segOrd=" + segOrd + " ord=" + term + " globalOrd=" + ordinalMap.getGlobalOrd(segOrd, term));
- counts[(int) ordMap.get(term)]++;
- term = (int) segValues.nextOrd();
- }
+ for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) {
+ int term = (int) segValues.nextOrd();
+ while (term != SortedSetDocValues.NO_MORE_ORDS) {
+ //System.out.println(" segOrd=" + segOrd + " ord=" + term + " globalOrd=" + ordinalMap.getGlobalOrd(segOrd, term));
+ counts[(int) ordMap.get(term)]++;
+ term = (int) segValues.nextOrd();
}
}
} else {
@@ -213,16 +226,12 @@ public class SortedSetDocValuesFacetCounts extends Facets {
// First count in seg-ord space:
final int[] segCounts = new int[numSegOrds];
- int doc;
- while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- //System.out.println(" doc=" + doc);
- if (segValues.advanceExact(doc)) {
- int term = (int) segValues.nextOrd();
- while (term != SortedSetDocValues.NO_MORE_ORDS) {
- //System.out.println(" ord=" + term);
- segCounts[term]++;
- term = (int) segValues.nextOrd();
- }
+ for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) {
+ int term = (int) segValues.nextOrd();
+ while (term != SortedSetDocValues.NO_MORE_ORDS) {
+ //System.out.println(" ord=" + term);
+ segCounts[term]++;
+ term = (int) segValues.nextOrd();
}
}
@@ -238,9 +247,76 @@ public class SortedSetDocValuesFacetCounts extends Facets {
} else {
// No ord mapping (e.g., single segment index):
// just aggregate directly into counts:
- int doc;
- while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- if (segValues.advanceExact(doc)) {
+ for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) {
+ int term = (int) segValues.nextOrd();
+ while (term != SortedSetDocValues.NO_MORE_ORDS) {
+ counts[term]++;
+ term = (int) segValues.nextOrd();
+ }
+ }
+ }
+ }
+ }
+
+ /** Does all the "real work" of tallying up the counts. */
+ private final void countAll() throws IOException {
+ //System.out.println("ssdv count");
+
+ MultiDocValues.OrdinalMap ordinalMap;
+
+ // TODO: is this right? really, we need a way to
+ // verify that this ordinalMap "matches" the leaves in
+ // matchingDocs...
+ if (dv instanceof MultiDocValues.MultiSortedSetDocValues) {
+ ordinalMap = ((MultiSortedSetDocValues) dv).mapping;
+ } else {
+ ordinalMap = null;
+ }
+
+ IndexReader origReader = state.getOrigReader();
+
+ for(LeafReaderContext context : origReader.leaves()) {
+
+ LeafReader reader = context.reader();
+
+ SortedSetDocValues segValues = reader.getSortedSetDocValues(field);
+ if (segValues == null) {
+ continue;
+ }
+
+ Bits liveDocs = reader.getLiveDocs();
+
+ if (ordinalMap != null) {
+ final LongValues ordMap = ordinalMap.getGlobalOrds(context.ord);
+
+ int numSegOrds = (int) segValues.getValueCount();
+
+ // First count in seg-ord space:
+ final int[] segCounts = new int[numSegOrds];
+ int docID;
+ while ((docID = segValues.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
+ if (liveDocs == null || liveDocs.get(docID)) {
+ int term = (int) segValues.nextOrd();
+ while (term != SortedSetDocValues.NO_MORE_ORDS) {
+ segCounts[term]++;
+ term = (int) segValues.nextOrd();
+ }
+ }
+ }
+
+ // Then, migrate to global ords:
+ for(int ord=0;ord<numSegOrds;ord++) {
+ int count = segCounts[ord];
+ if (count != 0) {
+ counts[(int) ordMap.get(ord)] += count;
+ }
+ }
+ } else {
+ // No ord mapping (e.g., single segment index):
+ // just aggregate directly into counts:
+ int docID;
+ while ((docID = segValues.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
+ if (liveDocs == null || liveDocs.get(docID)) {
int term = (int) segValues.nextOrd();
while (term != SortedSetDocValues.NO_MORE_ORDS) {
counts[term]++;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0782b095/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FastTaxonomyFacetCounts.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FastTaxonomyFacetCounts.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FastTaxonomyFacetCounts.java
index ef96073..d560d40 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FastTaxonomyFacetCounts.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FastTaxonomyFacetCounts.java
@@ -24,8 +24,12 @@ import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.index.BinaryDocValues;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.ConjunctionDISI;
import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
/** Computes facets counts, assuming the default encoding
@@ -50,6 +54,16 @@ public class FastTaxonomyFacetCounts extends IntTaxonomyFacets {
count(fc.getMatchingDocs());
}
+ /** Create {@code FastTaxonomyFacetCounts}, using the
+ * specified {@code indexFieldName} for ordinals, and
+ * counting all non-deleted documents in the index. This is
+ * the same result as searching on {@link MatchAllDocsQuery},
+ * but faster */
+ public FastTaxonomyFacetCounts(String indexFieldName, IndexReader reader, TaxonomyReader taxoReader, FacetsConfig config) throws IOException {
+ super(indexFieldName, taxoReader, config);
+ countAll(reader);
+ }
+
private final void count(List<MatchingDocs> matchingDocs) throws IOException {
for(MatchingDocs hits : matchingDocs) {
BinaryDocValues dv = hits.context.reader().getBinaryDocValues(indexFieldName);
@@ -82,4 +96,39 @@ public class FastTaxonomyFacetCounts extends IntTaxonomyFacets {
rollup();
}
+
+ private final void countAll(IndexReader reader) throws IOException {
+ for(LeafReaderContext context : reader.leaves()) {
+ BinaryDocValues dv = context.reader().getBinaryDocValues(indexFieldName);
+ if (dv == null) { // this reader does not have DocValues for the requested category list
+ continue;
+ }
+
+ Bits liveDocs = context.reader().getLiveDocs();
+
+ for (int doc = dv.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = dv.nextDoc()) {
+ if (liveDocs != null && liveDocs.get(doc) == false) {
+ continue;
+ }
+ final BytesRef bytesRef = dv.binaryValue();
+ byte[] bytes = bytesRef.bytes;
+ int end = bytesRef.offset + bytesRef.length;
+ int ord = 0;
+ int offset = bytesRef.offset;
+ int prev = 0;
+ while (offset < end) {
+ byte b = bytes[offset++];
+ if (b >= 0) {
+ prev = ord = ((ord << 7) | b) + prev;
+ ++values[ord];
+ ord = 0;
+ } else {
+ ord = (ord << 7) | (b & 0x7F);
+ }
+ }
+ }
+ }
+
+ rollup();
+ }
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0782b095/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacets.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacets.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacets.java
index d111b44..e1903d1 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacets.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacets.java
@@ -76,7 +76,7 @@ public abstract class TaxonomyFacets extends Facets {
protected FacetsConfig.DimConfig verifyDim(String dim) {
FacetsConfig.DimConfig dimConfig = config.getDimConfig(dim);
if (!dimConfig.indexFieldName.equals(indexFieldName)) {
- throw new IllegalArgumentException("dimension \"" + dim + "\" was not indexed into field \"" + indexFieldName);
+ throw new IllegalArgumentException("dimension \"" + dim + "\" was not indexed into field \"" + indexFieldName + "\"");
}
return dimConfig;
}
@@ -102,4 +102,4 @@ public abstract class TaxonomyFacets extends Facets {
return results;
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0782b095/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java b/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java
index 60beddd..5aed22b 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java
@@ -16,6 +16,7 @@
*/
package org.apache.lucene.facet.sortedset;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -72,12 +73,8 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
// Per-top-reader state:
SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(searcher.getIndexReader());
-
- FacetsCollector c = new FacetsCollector();
- searcher.search(new MatchAllDocsQuery(), c);
-
- SortedSetDocValuesFacetCounts facets = new SortedSetDocValuesFacetCounts(state, c);
+ SortedSetDocValuesFacetCounts facets = getAllFacets(searcher, state);
assertEquals("dim=a path=[] value=4 childCount=3\n foo (2)\n bar (1)\n zoo (1)\n", facets.getTopChildren(10, "a").toString());
assertEquals("dim=b path=[] value=1 childCount=1\n baz (1)\n", facets.getTopChildren(10, "b").toString());
@@ -171,9 +168,7 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
// Per-top-reader state:
SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(searcher.getIndexReader());
- FacetsCollector c = new FacetsCollector();
- searcher.search(new MatchAllDocsQuery(), c);
- SortedSetDocValuesFacetCounts facets = new SortedSetDocValuesFacetCounts(state, c);
+ SortedSetDocValuesFacetCounts facets = getAllFacets(searcher, state);
// Ask for top 10 labels for any dims that have counts:
List<FacetResult> results = facets.getAllDims(10);
@@ -215,9 +210,7 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
// Per-top-reader state:
SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(searcher.getIndexReader());
- FacetsCollector c = new FacetsCollector();
- searcher.search(new MatchAllDocsQuery(), c);
- SortedSetDocValuesFacetCounts facets = new SortedSetDocValuesFacetCounts(state, c);
+ SortedSetDocValuesFacetCounts facets = getAllFacets(searcher, state);
// Ask for top 10 labels for any dims that have counts:
assertEquals("dim=a path=[] value=2 childCount=2\n foo1 (1)\n foo2 (1)\n", facets.getTopChildren(10, "a").toString());
@@ -312,4 +305,14 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
w.close();
IOUtils.close(searcher.getIndexReader(), indexDir, taxoDir);
}
+
+ private static SortedSetDocValuesFacetCounts getAllFacets(IndexSearcher searcher, SortedSetDocValuesReaderState state) throws IOException {
+ if (random().nextBoolean()) {
+ FacetsCollector c = new FacetsCollector();
+ searcher.search(new MatchAllDocsQuery(), c);
+ return new SortedSetDocValuesFacetCounts(state, c);
+ } else {
+ return new SortedSetDocValuesFacetCounts(state);
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0782b095/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts.java
index 20bfdb5..3bb480d 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts.java
@@ -17,6 +17,7 @@
package org.apache.lucene.facet.taxonomy;
import java.io.ByteArrayOutputStream;
+import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.HashMap;
@@ -102,16 +103,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
- // Aggregate the facet counts:
- FacetsCollector c = new FacetsCollector();
-
- // MatchAllDocsQuery is for "browsing" (counts facets
- // for all non-deleted docs in the index); normally
- // you'd use a "normal" query, and use MultiCollector to
- // wrap collecting the "normal" hits and also facets:
- searcher.search(new MatchAllDocsQuery(), c);
-
- Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, c);
+ Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
// Retrieve & verify results:
assertEquals("dim=Publish Date path=[] value=5 childCount=3\n 2010 (2)\n 2012 (2)\n 1999 (1)\n", facets.getTopChildren(10, "Publish Date").toString());
@@ -120,7 +112,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// Now user drills down on Publish Date/2010:
DrillDownQuery q2 = new DrillDownQuery(config);
q2.add("Publish Date", "2010");
- c = new FacetsCollector();
+ FacetsCollector c = new FacetsCollector();
searcher.search(q2, c);
facets = new FastTaxonomyFacetCounts(taxoReader, config, c);
assertEquals("dim=Author path=[] value=2 childCount=2\n Bob (1)\n Lisa (1)\n", facets.getTopChildren(10, "Author").toString());
@@ -185,11 +177,8 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
- FacetsCollector c = new FacetsCollector();
- searcher.search(new MatchAllDocsQuery(), c);
-
- Facets facets = getTaxonomyFacetCounts(taxoReader, new FacetsConfig(), c);
-
+ Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
+
// Ask for top 10 labels for any dims that have counts:
List<FacetResult> results = facets.getAllDims(10);
@@ -225,7 +214,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector c = new FacetsCollector();
- searcher.search(new MatchAllDocsQuery(), c);
+ searcher.search(new MatchAllDocsQuery(), c);
// Uses default $facets field:
Facets facets;
@@ -301,15 +290,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
- // Aggregate the facet counts:
- FacetsCollector c = new FacetsCollector();
-
- // MatchAllDocsQuery is for "browsing" (counts facets
- // for all non-deleted docs in the index); normally
- // you'd use a "normal" query, and use MultiCollector to
- // wrap collecting the "normal" hits and also facets:
- searcher.search(new MatchAllDocsQuery(), c);
- Facets facets = getTaxonomyFacetCounts(taxoReader, config, c);
+ Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
expectThrows(IllegalArgumentException.class, () -> {
facets.getSpecificValue("a");
@@ -344,10 +325,8 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
- FacetsCollector c = new FacetsCollector();
- searcher.search(new MatchAllDocsQuery(), c);
+ Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
- Facets facets = getTaxonomyFacetCounts(taxoReader, config, c);
assertEquals(1, facets.getSpecificValue("dim", "test\u001Fone"));
assertEquals(1, facets.getSpecificValue("dim", "test\u001Etwo"));
@@ -387,11 +366,8 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
-
- FacetsCollector c = new FacetsCollector();
- searcher.search(new MatchAllDocsQuery(), c);
+ Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
- Facets facets = getTaxonomyFacetCounts(taxoReader, config, c);
assertEquals(1, facets.getTopChildren(10, "dim").value);
assertEquals(1, facets.getTopChildren(10, "dim2").value);
assertEquals(1, facets.getTopChildren(10, "dim3").value);
@@ -432,15 +408,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
- // Aggregate the facet counts:
- FacetsCollector c = new FacetsCollector();
-
- // MatchAllDocsQuery is for "browsing" (counts facets
- // for all non-deleted docs in the index); normally
- // you'd use a "normal" query, and use MultiCollector to
- // wrap collecting the "normal" hits and also facets:
- searcher.search(new MatchAllDocsQuery(), c);
- Facets facets = getTaxonomyFacetCounts(taxoReader, config, c);
+ Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
FacetResult result = facets.getTopChildren(Integer.MAX_VALUE, "dim");
assertEquals(numLabels, result.labelValues.length);
@@ -544,9 +512,8 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
- FacetsCollector sfc = new FacetsCollector();
- newSearcher(r).search(new MatchAllDocsQuery(), sfc);
- Facets facets = getTaxonomyFacetCounts(taxoReader, config, sfc);
+ Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, newSearcher(r), taxoReader, config);
+
for (FacetResult result : facets.getAllDims(10)) {
assertEquals(r.numDocs(), result.value.intValue());
}
@@ -572,10 +539,8 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
- final FacetsCollector sfc = new FacetsCollector();
- newSearcher(r).search(new MatchAllDocsQuery(), sfc);
-
- Facets facets = getTaxonomyFacetCounts(taxoReader, config, sfc);
+ Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, newSearcher(r), taxoReader, config);
+
List<FacetResult> res1 = facets.getAllDims(10);
List<FacetResult> res2 = facets.getAllDims(10);
assertEquals("calling getFacetResults twice should return the .equals()=true result", res1, res2);
@@ -601,9 +566,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
- FacetsCollector sfc = new FacetsCollector();
- newSearcher(r).search(new MatchAllDocsQuery(), sfc);
- Facets facets = getTaxonomyFacetCounts(taxoReader, config, sfc);
+ Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, newSearcher(r), taxoReader, config);
assertEquals(10, facets.getTopChildren(2, "a").childCount);
@@ -754,4 +717,21 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
w.close();
IOUtils.close(tw, searcher.getIndexReader(), tr, indexDir, taxoDir);
}
+
+ private static Facets getAllFacets(String indexFieldName, IndexSearcher searcher, TaxonomyReader taxoReader, FacetsConfig config) throws IOException {
+ if (random().nextBoolean()) {
+ // Aggregate the facet counts:
+ FacetsCollector c = new FacetsCollector();
+
+ // MatchAllDocsQuery is for "browsing" (counts facets
+ // for all non-deleted docs in the index); normally
+ // you'd use a "normal" query, and use MultiCollector to
+ // wrap collecting the "normal" hits and also facets:
+ searcher.search(new MatchAllDocsQuery(), c);
+
+ return new FastTaxonomyFacetCounts(taxoReader, config, c);
+ } else {
+ return new FastTaxonomyFacetCounts(indexFieldName, searcher.getIndexReader(), taxoReader, config);
+ }
+ }
}
[37/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-4164: fix
group.limit=-1 in distributed mode
Posted by kr...@apache.org.
SOLR-4164: fix group.limit=-1 in distributed mode
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/7794fbd1
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/7794fbd1
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/7794fbd1
Branch: refs/heads/jira/solr-8593
Commit: 7794fbd13f1a0edfff8f121fb1c6a01075eeef6a
Parents: 24446f5
Author: yonik <yo...@apache.org>
Authored: Wed Oct 26 11:56:45 2016 -0400
Committer: yonik <yo...@apache.org>
Committed: Wed Oct 26 12:56:51 2016 -0400
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +
.../solr/handler/component/QueryComponent.java | 7 ++-
.../java/org/apache/solr/search/Grouping.java | 4 +-
.../TopGroupsShardResponseProcessor.java | 9 ++-
.../apache/solr/TestDistributedGrouping.java | 59 ++++++++++----------
5 files changed, 48 insertions(+), 34 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7794fbd1/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index efacec4..5d0af5c 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -227,6 +227,9 @@ Bug Fixes
* SOLR-2039: Multivalued fields with dynamic names does not work properly with DIH.
(K A, ruslan.shv, Cao Manh Dat via shalin)
+
+* SOLR-4164: group.limit=-1 was not supported for grouping in distributed mode.
+ (Cao Manh Dat, Lance Norskog, Webster Homer, hossman, yonik)
Optimizations
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7794fbd1/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
index bc80629..09fc74b 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
@@ -414,6 +414,9 @@ public class QueryComponent extends SearchComponent
.setTruncateGroups(groupingSpec.isTruncateGroups() && groupingSpec.getFields().length > 0)
.setSearcher(searcher);
+ int docsToCollect = Grouping.getMax(groupingSpec.getGroupOffset(), groupingSpec.getGroupLimit(), searcher.maxDoc());
+ docsToCollect = Math.max(docsToCollect, 1);
+
for (String field : groupingSpec.getFields()) {
SchemaField schemaField = schema.getField(field);
String[] topGroupsParam = params.getParams(GroupParams.GROUP_DISTRIBUTED_TOPGROUPS_PREFIX + field);
@@ -436,7 +439,7 @@ public class QueryComponent extends SearchComponent
.setGroupSort(groupingSpec.getGroupSort())
.setSortWithinGroup(groupingSpec.getSortWithinGroup())
.setFirstPhaseGroups(topGroups)
- .setMaxDocPerGroup(groupingSpec.getGroupOffset() + groupingSpec.getGroupLimit())
+ .setMaxDocPerGroup(docsToCollect)
.setNeedScores(needScores)
.setNeedMaxScore(needScores)
.build()
@@ -445,7 +448,7 @@ public class QueryComponent extends SearchComponent
for (String query : groupingSpec.getQueries()) {
secondPhaseBuilder.addCommandField(new Builder()
- .setDocsToCollect(groupingSpec.getOffset() + groupingSpec.getLimit())
+ .setDocsToCollect(docsToCollect)
.setSort(groupingSpec.getGroupSort())
.setQuery(query, rb.req)
.setDocSet(searcher)
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7794fbd1/solr/core/src/java/org/apache/solr/search/Grouping.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/Grouping.java b/solr/core/src/java/org/apache/solr/search/Grouping.java
index 80a6aeb..8d6f3ca 100644
--- a/solr/core/src/java/org/apache/solr/search/Grouping.java
+++ b/solr/core/src/java/org/apache/solr/search/Grouping.java
@@ -459,10 +459,10 @@ public class Grouping {
*
* @param offset The offset
* @param len The number of documents to return
- * @param max The number of document to return if len < 0 or if offset + len < 0
+ * @param max The number of document to return if len < 0 or if offset + len > 0
* @return offset + len if len equals zero or higher. Otherwise returns max
*/
- int getMax(int offset, int len, int max) {
+ public static int getMax(int offset, int len, int max) {
int v = len < 0 ? max : offset + len;
if (v < 0 || v > max) v = max;
return v;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7794fbd1/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
index d0a06c5..688a6c3 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
@@ -162,7 +162,14 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
}
TopGroups<BytesRef>[] topGroupsArr = new TopGroups[topGroups.size()];
- rb.mergedTopGroups.put(groupField, TopGroups.merge(topGroups.toArray(topGroupsArr), groupSort, sortWithinGroup, groupOffsetDefault, docsPerGroupDefault, TopGroups.ScoreMergeMode.None));
+ int docsPerGroup = docsPerGroupDefault;
+ if (docsPerGroup < 0) {
+ docsPerGroup = 0;
+ for (TopGroups subTopGroups : topGroups) {
+ docsPerGroup += subTopGroups.totalGroupedHitCount;
+ }
+ }
+ rb.mergedTopGroups.put(groupField, TopGroups.merge(topGroups.toArray(topGroupsArr), groupSort, sortWithinGroup, groupOffsetDefault, docsPerGroup, TopGroups.ScoreMergeMode.None));
}
for (String query : commandTopDocs.keySet()) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7794fbd1/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java b/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
index af42ff4..ad62fcc 100644
--- a/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
+++ b/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
@@ -58,12 +58,12 @@ public class TestDistributedGrouping extends BaseDistributedSearchTestCase {
handle.put("grouped", UNORDERED); // distrib grouping doesn't guarantee order of top level group commands
// Test distributed grouping with empty indices
- query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc");
- query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "hl","true","hl.fl",t1);
- query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "facet", "true", "facet.field", t1);
- query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "stats", "true", "stats.field", i1);
- query("q", "kings", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "spellcheck", "true", "spellcheck.build", "true", "qt", "spellCheckCompRH");
- query("q", "*:*", "fq", s1 + ":a", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "group.truncate", "true", "facet", "true", "facet.field", t1);
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc");
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "hl","true","hl.fl",t1);
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "facet", "true", "facet.field", t1);
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "stats", "true", "stats.field", i1);
+ query("q", "kings", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "spellcheck", "true", "spellcheck.build", "true", "qt", "spellCheckCompRH");
+ query("q", "*:*", "fq", s1 + ":a", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "group.truncate", "true", "facet", "true", "facet.field", t1);
indexr(id,1, i1, 100, tlong, 100, i1dv, 100, t1,"now is the time for all good men",
tdate_a, "2010-04-20T11:00:00Z",
@@ -154,23 +154,24 @@ public class TestDistributedGrouping extends BaseDistributedSearchTestCase {
// test grouping
// The second sort = id asc . The sorting behaviour is different in dist mode. See TopDocs#merge
// The shard the result came from matters in the order if both document sortvalues are equal
- query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc");
- query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", "id asc, _docid_ asc");
- query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", "{!func}add(" + i1 + ",5) asc, id asc");
- query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "facet", "true", "facet.field", t1);
- query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "stats", "true", "stats.field", tlong);
- query("q", "kings", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "spellcheck", "true", "spellcheck.build", "true", "qt", "spellCheckCompRH");
- query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "facet", "true", "hl","true","hl.fl",t1);
- query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "group.sort", "id desc");
-
- query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.offset", 5, "group.limit", 5, "sort", i1 + " asc, id asc");
- query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "offset", 5, "rows", 5, "group.offset", 5, "group.limit", 5, "sort", i1 + " asc, id asc");
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc");
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 0, "sort", i1 + " asc, id asc");
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", "id asc, _docid_ asc");
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", "{!func}add(" + i1 + ",5) asc, id asc");
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "facet", "true", "facet.field", t1);
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "stats", "true", "stats.field", tlong);
+ query("q", "kings", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "spellcheck", "true", "spellcheck.build", "true", "qt", "spellCheckCompRH");
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "facet", "true", "hl","true","hl.fl",t1);
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "group.sort", "id desc");
+
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.offset", 5, "group.limit", -1, "sort", i1 + " asc, id asc");
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "offset", 5, "rows", 5, "group.offset", 5, "group.limit", -1, "sort", i1 + " asc, id asc");
query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "offset", 5, "rows", 5, "sort", i1 + " asc, id asc", "group.format", "simple");
query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "offset", 5, "rows", 5, "sort", i1 + " asc, id asc", "group.main", "true");
query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.offset", 5, "group.limit", 5, "sort", i1 + " asc, id asc", "group.format", "simple", "offset", 5, "rows", 5);
query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.offset", 5, "group.limit", 5, "sort", i1 + " asc, id asc", "group.main", "true", "offset", 5, "rows", 5);
- query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.query", t1 + ":kings OR " + t1 + ":eggs", "group.limit", 10, "sort", i1 + " asc, id asc");
+ query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.query", t1 + ":kings OR " + t1 + ":eggs", "group.limit", -1, "sort", i1 + " asc, id asc");
query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.query", t1 + ":kings OR " + t1 + ":eggs", "group.limit", 10, "sort", i1 + " asc, id asc");
query("q", "*:*", "fl", "id," + i1dv, "group", "true", "group.field", i1dv, "group.limit", 10, "sort", i1 + " asc, id asc");
@@ -180,7 +181,7 @@ public class TestDistributedGrouping extends BaseDistributedSearchTestCase {
query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true",
"group.query", t1 + ":kings OR " + t1 + ":eggs",
"group.query", "id:5", // single doc, so only one shard will have it
- "group.limit", 10, "sort", i1 + " asc, id asc");
+ "group.limit", -1, "sort", i1 + " asc, id asc");
handle.put(t1 + ":this_will_never_match", SKIP); // :TODO: SOLR-4181
query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true",
"group.query", t1 + ":kings OR " + t1 + ":eggs",
@@ -220,8 +221,8 @@ public class TestDistributedGrouping extends BaseDistributedSearchTestCase {
}
// SOLR-3316
- query("q", "*:*", "fq", s1 + ":a", "rows", 0, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "facet", "true", "facet.field", t1);
- query("q", "*:*", "fq", s1 + ":a", "rows", 0, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "group.truncate", "true", "facet", "true", "facet.field", t1);
+ query("q", "*:*", "fq", s1 + ":a", "rows", 0, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "facet", "true", "facet.field", t1);
+ query("q", "*:*", "fq", s1 + ":a", "rows", 0, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "group.truncate", "true", "facet", "true", "facet.field", t1);
// SOLR-3436
query("q", "*:*", "fq", s1 + ":a", "fl", "id," + i1, "group", "true", "group.field", i1, "sort", i1 + " asc, id asc", "group.ngroups", "true");
@@ -241,7 +242,7 @@ public class TestDistributedGrouping extends BaseDistributedSearchTestCase {
}
ModifiableSolrParams params = new ModifiableSolrParams();
- Object[] q = {"q", "*:*", "fq", s1 + ":a", "rows", 1, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "group.ngroups", "true"};
+ Object[] q = {"q", "*:*", "fq", s1 + ":a", "rows", 1, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "group.ngroups", "true"};
for (int i = 0; i < q.length; i += 2) {
params.add(q[i].toString(), q[i + 1].toString());
@@ -263,25 +264,25 @@ public class TestDistributedGrouping extends BaseDistributedSearchTestCase {
// We validate distributed grouping with scoring as first sort.
// note: this 'q' matches all docs and returns the 'id' as the score, which is unique and so our results should be deterministic.
handle.put("maxScore", SKIP);// TODO see SOLR-6612
- query("q", "{!func}id", "rows", 100, "fl", "score,id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " desc", "group.sort", "score desc"); // SOLR-2955
- query("q", "{!func}id", "rows", 100, "fl", "score,id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", "score desc, _docid_ asc, id asc");
- query("q", "{!func}id", "rows", 100, "fl", "score,id," + i1, "group", "true", "group.field", i1, "group.limit", 10);
+ query("q", "{!func}id", "rows", 100, "fl", "score,id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " desc", "group.sort", "score desc"); // SOLR-2955
+ query("q", "{!func}id", "rows", 100, "fl", "score,id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", "score desc, _docid_ asc, id asc");
+ query("q", "{!func}id", "rows", 100, "fl", "score,id," + i1, "group", "true", "group.field", i1, "group.limit", -1);
// some explicit checks of non default sorting, and sort/group.sort with diff clauses
query("q", "{!func}id", "rows", 100, "fl", tlong + ",id," + i1, "group", "true",
- "group.field", i1, "group.limit", 10,
+ "group.field", i1, "group.limit", -1,
"sort", tlong+" asc, id desc");
query("q", "{!func}id", "rows", 100, "fl", tlong + ",id," + i1, "group", "true",
- "group.field", i1, "group.limit", 10,
+ "group.field", i1, "group.limit", -1,
"sort", "id asc",
"group.sort", tlong+" asc, id desc");
query("q", "{!func}id", "rows", 100, "fl", tlong + ",id," + i1, "group", "true",
- "group.field", i1, "group.limit", 10,
+ "group.field", i1, "group.limit", -1,
"sort", tlong+" asc, id desc",
"group.sort", "id asc");
rsp = query("q", "{!func}id", "fq", oddField+":[* TO *]",
"rows", 100, "fl", tlong + ",id," + i1, "group", "true",
- "group.field", i1, "group.limit", 10,
+ "group.field", i1, "group.limit", -1,
"sort", tlong+" asc",
"group.sort", oddField+" asc");
nl = (NamedList<?>) rsp.getResponse().get("grouped");
[12/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9657: Fixed Javadocs
and added example
Posted by kr...@apache.org.
SOLR-9657: Fixed Javadocs and added example
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9d692cde
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9d692cde
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9d692cde
Branch: refs/heads/jira/solr-8593
Commit: 9d692cde53c25230d6db2663816f313cf356535b
Parents: 4a85163
Author: Alexandre Rafalovitch <ar...@apache.org>
Authored: Mon Oct 24 18:16:38 2016 -0400
Committer: Alexandre Rafalovitch <ar...@apache.org>
Committed: Mon Oct 24 18:16:38 2016 -0400
----------------------------------------------------------------------
.../update/processor/TemplateUpdateProcessorFactory.java | 10 ++++++++--
1 file changed, 8 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9d692cde/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
index b791d3b..c16a0c7 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
@@ -31,8 +31,14 @@ import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.util.ConcurrentLRUCache;
-//Adds new fields to documents based on a template pattern specified via Template.field
-// request parameters (multi-valued) or 'field' value specified in initArgs
+/**
+* Adds new fields to documents based on a template pattern specified via Template.field
+* request parameters (multi-valued) or 'field' value specified in initArgs.
+* <p>
+* The format of the parameter is <field-name>:<the-template-string>, for example: <br>
+* <b>Template.field=fname:${somefield}some_string${someotherfield}</b>
+*
+*/
public class TemplateUpdateProcessorFactory extends SimpleUpdateProcessorFactory {
private Cache<String, Resolved> templateCache = new ConcurrentLRUCache<>(1000, 800, 900, 10, false, false, null);
@Override
[13/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9654: tests: specify
descending count sort for streaming
Posted by kr...@apache.org.
SOLR-9654: tests: specify descending count sort for streaming
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c9132ac6
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c9132ac6
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c9132ac6
Branch: refs/heads/jira/solr-8593
Commit: c9132ac66100ab46bea480397396105f8489b239
Parents: 9d692cd
Author: yonik <yo...@apache.org>
Authored: Mon Oct 24 21:18:51 2016 -0400
Committer: yonik <yo...@apache.org>
Committed: Mon Oct 24 21:19:06 2016 -0400
----------------------------------------------------------------------
.../apache/solr/search/facet/TestJsonFacets.java | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c9132ac6/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
index 0ec0be4..1c1a343 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
@@ -1151,10 +1151,10 @@ public class TestJsonFacets extends SolrTestCaseHS {
if (!client.local()) {
client.testJQ(params(p, "q", "*:*"
, "json.facet", "{" +
- "cat0:{type:terms, field:${cat_s}, limit:1, overrequest:0}" +
- ",cat1:{type:terms, field:${cat_s}, limit:1, overrequest:1}" +
- ",catDef:{type:terms, field:${cat_s}, limit:1, overrequest:-1}" + // -1 is default overrequest
- ",catBig:{type:terms, field:${cat_s}, offset:1, limit:2147483647, overrequest:2147483647}" + // make sure overflows don't mess us up
+ "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0}" +
+ ",cat1:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:1}" +
+ ",catDef:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:-1}" + // -1 is default overrequest
+ ",catBig:{type:terms, field:${cat_s}, sort:'count desc', offset:1, limit:2147483647, overrequest:2147483647}" + // make sure overflows don't mess us up
"}"
)
, "facets=={ count:6" +
@@ -1168,10 +1168,10 @@ public class TestJsonFacets extends SolrTestCaseHS {
// In non-distrib mode, should still be able to specify overrequest, but it shouldn't matter.
client.testJQ(params(p, "q", "*:*"
, "json.facet", "{" +
- "cat0:{type:terms, field:${cat_s}, limit:1, overrequest:0}" +
- ",cat1:{type:terms, field:${cat_s}, limit:1, overrequest:1}" +
- ",catDef:{type:terms, field:${cat_s}, limit:1, overrequest:-1}" + // -1 is default overrequest
- ",catBig:{type:terms, field:${cat_s}, offset:1, limit:2147483647, overrequest:2147483647}" + // make sure overflows don't mess us up
+ "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0}" +
+ ",cat1:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:1}" +
+ ",catDef:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:-1}" + // -1 is default overrequest
+ ",catBig:{type:terms, field:${cat_s}, sort:'count desc', offset:1, limit:2147483647, overrequest:2147483647}" + // make sure overflows don't mess us up
"}"
)
, "facets=={ count:6" +
[03/50] [abbrv] lucene-solr:jira/solr-8593: LUCENE-7462: Give doc
values APIs an `advanceExact` method.
Posted by kr...@apache.org.
LUCENE-7462: Give doc values APIs an `advanceExact` method.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9aca4c9d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9aca4c9d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9aca4c9d
Branch: refs/heads/jira/solr-8593
Commit: 9aca4c9d56089a9ac89df5fd93be76a4fe822448
Parents: 9b49c72
Author: Adrien Grand <jp...@gmail.com>
Authored: Thu Oct 20 14:07:10 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Mon Oct 24 10:51:23 2016 +0200
----------------------------------------------------------------------
.../codecs/lucene53/Lucene53NormsProducer.java | 6 +
.../lucene54/Lucene54DocValuesProducer.java | 65 +++-
.../lucene54/TestLucene54DocValuesFormat.java | 5 +-
.../simpletext/SimpleTextDocValuesReader.java | 95 +++++-
.../simpletext/SimpleTextDocValuesWriter.java | 9 +
.../apache/lucene/codecs/DocValuesConsumer.java | 25 ++
.../org/apache/lucene/codecs/NormsConsumer.java | 5 +
.../lucene/codecs/lucene70/IndexedDISI.java | 88 +++--
.../lucene70/Lucene70DocValuesProducer.java | 62 ++++
.../codecs/lucene70/Lucene70NormsProducer.java | 11 +
.../apache/lucene/index/BinaryDocValues.java | 5 +-
.../lucene/index/BinaryDocValuesWriter.java | 5 +
.../org/apache/lucene/index/CheckIndex.java | 81 ++++-
.../java/org/apache/lucene/index/DocValues.java | 95 +++---
.../apache/lucene/index/DocValuesIterator.java | 33 ++
.../lucene/index/FilterBinaryDocValues.java | 5 +
.../lucene/index/FilterNumericDocValues.java | 5 +
.../index/LegacyBinaryDocValuesWrapper.java | 8 +
.../index/LegacyNumericDocValuesWrapper.java | 9 +
.../index/LegacySortedDocValuesWrapper.java | 9 +
.../LegacySortedNumericDocValuesWrapper.java | 9 +
.../index/LegacySortedSetDocValuesWrapper.java | 10 +
.../org/apache/lucene/index/MultiDocValues.java | 125 +++++++
.../apache/lucene/index/NormValuesWriter.java | 5 +
.../apache/lucene/index/NumericDocValues.java | 7 +-
.../lucene/index/NumericDocValuesWriter.java | 5 +
.../apache/lucene/index/ReadersAndUpdates.java | 10 +
.../index/SingletonSortedNumericDocValues.java | 24 +-
.../index/SingletonSortedSetDocValues.java | 18 +-
.../apache/lucene/index/SortedDocValues.java | 3 +
.../lucene/index/SortedDocValuesWriter.java | 5 +
.../lucene/index/SortedNumericDocValues.java | 6 +-
.../index/SortedNumericDocValuesWriter.java | 5 +
.../apache/lucene/index/SortedSetDocValues.java | 5 +-
.../lucene/index/SortedSetDocValuesWriter.java | 5 +
.../apache/lucene/index/SortingLeafReader.java | 32 ++
.../apache/lucene/search/FieldComparator.java | 40 +--
.../lucene/search/SortedNumericSelector.java | 18 +
.../apache/lucene/search/SortedSetSelector.java | 36 ++
.../search/similarities/BM25Similarity.java | 8 +-
.../search/similarities/SimilarityBase.java | 6 +-
.../search/similarities/TFIDFSimilarity.java | 8 +-
.../lucene/codecs/lucene70/TestIndexedDISI.java | 28 +-
.../lucene70/TestLucene70DocValuesFormat.java | 4 +-
.../SortedSetDocValuesFacetCounts.java | 15 +-
.../lucene/search/join/BlockJoinSelector.java | 104 +++++-
.../search/join/GenericTermsCollector.java | 7 +
.../search/join/TestBlockJoinSelector.java | 12 +
.../apache/lucene/index/memory/MemoryIndex.java | 6 +
.../search/TestDiversifiedTopDocsCollector.java | 9 +
.../lucene/index/AssertingLeafReader.java | 89 ++++-
.../index/BaseDocValuesFormatTestCase.java | 331 ++++++++++++-------
.../index/BaseIndexFileFormatTestCase.java | 12 +
.../lucene/index/BaseNormsFormatTestCase.java | 101 +++---
.../apache/solr/request/DocValuesFacets.java | 20 +-
.../request/PerSegmentSingleValuedFaceting.java | 10 +-
.../apache/solr/search/SolrIndexSearcher.java | 6 +-
.../facet/FacetFieldProcessorByArrayDV.java | 30 +-
.../apache/solr/uninverting/FieldCacheImpl.java | 18 +
59 files changed, 1405 insertions(+), 413 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene53/Lucene53NormsProducer.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene53/Lucene53NormsProducer.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene53/Lucene53NormsProducer.java
index a97cb5a..718fcd6 100644
--- a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene53/Lucene53NormsProducer.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene53/Lucene53NormsProducer.java
@@ -222,6 +222,12 @@ class Lucene53NormsProducer extends NormsProducer {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return true;
+ }
+
+ @Override
public long cost() {
// TODO
return 0;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
index 1f785fe..f1c169c 100644
--- a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
@@ -477,6 +477,12 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return true;
+ }
+
+ @Override
public long cost() {
// TODO
return 0;
@@ -524,6 +530,13 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ doc = target;
+ value = values.get(doc);
+ return value != 0 || docsWithField.get(doc);
+ }
+
+ @Override
public long cost() {
return maxDoc;
}
@@ -696,6 +709,16 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ if (advance(target) == target) {
+ return true;
+ }
+ --index;
+ doc = target;
+ return false;
+ }
+
+ @Override
public long longValue() {
assert index >= 0;
assert index < docIDsLength;
@@ -891,6 +914,11 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ return sparseValues.advanceExact(target);
+ }
+
+ @Override
public long cost() {
return sparseValues.cost();
}
@@ -933,7 +961,14 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
return nextDoc();
}
}
-
+
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ ord = (int) ordinals.get(target);
+ return ord != -1;
+ }
+
@Override
public int ordValue() {
return ord;
@@ -1017,6 +1052,11 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ return sparseValues.advanceExact(target);
+ }
+
+ @Override
public long cost() {
return sparseValues.cost();
}
@@ -1061,6 +1101,12 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return docsWithField.get(docID);
+ }
+
+ @Override
public long cost() {
// TODO
return 0;
@@ -1122,6 +1168,14 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ startOffset = ordIndex.get(docID);
+ endOffset = ordIndex.get(docID+1L);
+ return endOffset > startOffset;
+ }
+
+ @Override
public long cost() {
// TODO
return 0;
@@ -1185,6 +1239,15 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ int ord = (int) ordinals.get(docID);
+ startOffset = offsets[ord];
+ endOffset = offsets[ord+1];
+ return endOffset > startOffset;
+ }
+
+ @Override
public long cost() {
// TODO
return 0;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
index c6ca201..b231716 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
@@ -106,7 +106,7 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
public void testSortedVariableLengthBigVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
- doTestSortedVsStoredFields(atLeast(300), 1, 32766);
+ doTestSortedVsStoredFields(atLeast(300), 1d, 1, 32766);
}
}
@@ -114,7 +114,7 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
public void testSortedVariableLengthManyVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
- doTestSortedVsStoredFields(TestUtil.nextInt(random(), 1024, 2049), 1, 500);
+ doTestSortedVsStoredFields(TestUtil.nextInt(random(), 1024, 2049), 1d, 1, 500);
}
}
@@ -201,6 +201,7 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
}
final IndexReader indexReader = writer.getReader();
+ TestUtil.checkReader(indexReader);
writer.close();
for (LeafReaderContext context : indexReader.leaves()) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesReader.java
----------------------------------------------------------------------
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesReader.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesReader.java
index adf5e42..09f97ab 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesReader.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesReader.java
@@ -144,7 +144,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
if (values == null) {
return null;
} else {
- DocIdSetIterator docsWithField = getNumericDocsWithField(fieldInfo);
+ DocValuesIterator docsWithField = getNumericDocsWithField(fieldInfo);
return new NumericDocValues() {
@Override
@@ -168,6 +168,11 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ return docsWithField.advanceExact(target);
+ }
+
+ @Override
public long longValue() throws IOException {
return values.apply(docsWithField.docID());
}
@@ -214,12 +219,16 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
}
};
}
-
- private DocIdSetIterator getNumericDocsWithField(FieldInfo fieldInfo) throws IOException {
+
+ private static abstract class DocValuesIterator extends DocIdSetIterator {
+ abstract boolean advanceExact(int target) throws IOException;
+ }
+
+ private DocValuesIterator getNumericDocsWithField(FieldInfo fieldInfo) throws IOException {
final OneField field = fields.get(fieldInfo.name);
final IndexInput in = data.clone();
final BytesRefBuilder scratch = new BytesRefBuilder();
- return new DocIdSetIterator() {
+ return new DocValuesIterator() {
int doc = -1;
@@ -250,6 +259,15 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
}
return doc = NO_MORE_DOCS;
}
+
+ @Override
+ boolean advanceExact(int target) throws IOException {
+ this.doc = target;
+ in.seek(field.dataStartFilePointer + (1+field.pattern.length()+2)*target);
+ SimpleTextUtil.readLine(in, scratch); // data
+ SimpleTextUtil.readLine(in, scratch); // 'T' or 'F'
+ return scratch.byteAt(0) == (byte) 'T';
+ }
};
}
@@ -265,7 +283,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
final BytesRefBuilder scratch = new BytesRefBuilder();
final DecimalFormat decoder = new DecimalFormat(field.pattern, new DecimalFormatSymbols(Locale.ROOT));
- DocIdSetIterator docsWithField = getBinaryDocsWithField(fieldInfo);
+ DocValuesIterator docsWithField = getBinaryDocsWithField(fieldInfo);
IntFunction<BytesRef> values = new IntFunction<BytesRef>() {
final BytesRefBuilder term = new BytesRefBuilder();
@@ -317,19 +335,24 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ return docsWithField.advanceExact(target);
+ }
+
+ @Override
public BytesRef binaryValue() throws IOException {
return values.apply(docsWithField.docID());
}
};
}
- private DocIdSetIterator getBinaryDocsWithField(FieldInfo fieldInfo) throws IOException {
+ private DocValuesIterator getBinaryDocsWithField(FieldInfo fieldInfo) throws IOException {
final OneField field = fields.get(fieldInfo.name);
final IndexInput in = data.clone();
final BytesRefBuilder scratch = new BytesRefBuilder();
final DecimalFormat decoder = new DecimalFormat(field.pattern, new DecimalFormatSymbols(Locale.ROOT));
- return new DocIdSetIterator() {
+ return new DocValuesIterator() {
int doc = -1;
@@ -371,6 +394,26 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
}
return doc = NO_MORE_DOCS;
}
+
+ @Override
+ boolean advanceExact(int target) throws IOException {
+ this.doc = target;
+ in.seek(field.dataStartFilePointer + (9+field.pattern.length() + field.maxLength+2)*target);
+ SimpleTextUtil.readLine(in, scratch);
+ assert StringHelper.startsWith(scratch.get(), LENGTH);
+ int len;
+ try {
+ len = decoder.parse(new String(scratch.bytes(), LENGTH.length, scratch.length() - LENGTH.length, StandardCharsets.UTF_8)).intValue();
+ } catch (ParseException pe) {
+ throw new CorruptIndexException("failed to parse int length", in, pe);
+ }
+ // skip past bytes
+ byte bytes[] = new byte[len];
+ in.readBytes(bytes, 0, len);
+ SimpleTextUtil.readLine(in, scratch); // newline
+ SimpleTextUtil.readLine(in, scratch); // 'T' or 'F'
+ return scratch.byteAt(0) == (byte) 'T';
+ }
};
}
@@ -424,7 +467,20 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
}
return doc = NO_MORE_DOCS;
}
-
+
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ this.doc = target;
+ in.seek(field.dataStartFilePointer + field.numValues * (9 + field.pattern.length() + field.maxLength) + target * (1 + field.ordPattern.length()));
+ SimpleTextUtil.readLine(in, scratch);
+ try {
+ ord = (int) ordDecoder.parse(scratch.get().utf8ToString()).longValue()-1;
+ } catch (ParseException pe) {
+ throw new CorruptIndexException("failed to parse ord", in, pe);
+ }
+ return ord >= 0;
+ }
+
@Override
public int ordValue() {
return ord;
@@ -488,6 +544,15 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
return doc;
}
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ if (binary.advanceExact(target)) {
+ setCurrentDoc();
+ return true;
+ }
+ return false;
+ }
+
long values[];
int index;
@@ -570,6 +635,20 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ in.seek(field.dataStartFilePointer + field.numValues * (9 + field.pattern.length() + field.maxLength) + target * (1 + field.ordPattern.length()));
+ SimpleTextUtil.readLine(in, scratch);
+ String ordList = scratch.get().utf8ToString().trim();
+ doc = target;
+ if (ordList.isEmpty() == false) {
+ currentOrds = ordList.split(",");
+ currentIndex = 0;
+ return true;
+ }
+ return false;
+ }
+
+ @Override
public long nextOrd() throws IOException {
if (currentIndex == currentOrds.length) {
return NO_MORE_ORDS;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesWriter.java
----------------------------------------------------------------------
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesWriter.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesWriter.java
index 2649ae6..8c6bdde 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesWriter.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesWriter.java
@@ -342,6 +342,15 @@ class SimpleTextDocValuesWriter extends DocValuesConsumer {
return doc;
}
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ if (values.advanceExact(target)) {
+ setCurrentDoc();
+ return true;
+ }
+ return false;
+ }
+
final StringBuilder builder = new StringBuilder();
BytesRef binaryValue;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java b/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java
index e04d5b9..e61724f 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java
@@ -228,6 +228,11 @@ public abstract class DocValuesConsumer implements Closeable {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
public long cost() {
return finalCost;
}
@@ -320,6 +325,11 @@ public abstract class DocValuesConsumer implements Closeable {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
public long cost() {
return finalCost;
}
@@ -417,6 +427,11 @@ public abstract class DocValuesConsumer implements Closeable {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
public int docValueCount() {
return currentSub.values.docValueCount();
}
@@ -575,6 +590,11 @@ public abstract class DocValuesConsumer implements Closeable {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
public long cost() {
return finalCost;
}
@@ -732,6 +752,11 @@ public abstract class DocValuesConsumer implements Closeable {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
public long nextOrd() throws IOException {
long subOrd = currentSub.values.nextOrd();
if (subOrd == NO_MORE_ORDS) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/codecs/NormsConsumer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/NormsConsumer.java b/lucene/core/src/java/org/apache/lucene/codecs/NormsConsumer.java
index 3a6ce22..51abb69 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/NormsConsumer.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/NormsConsumer.java
@@ -158,6 +158,11 @@ public abstract class NormsConsumer implements Closeable {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
public long cost() {
return 0;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/codecs/lucene70/IndexedDISI.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/IndexedDISI.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/IndexedDISI.java
index 3ea3141..24eaf7a 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/IndexedDISI.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/IndexedDISI.java
@@ -112,6 +112,9 @@ final class IndexedDISI extends DocIdSetIterator {
private int doc = -1;
private int index = -1;
+ // SPARSE variables
+ boolean exists;
+
// DENSE variables
private long word;
private int wordIndex = -1;
@@ -129,7 +132,7 @@ final class IndexedDISI extends DocIdSetIterator {
@Override
public int advance(int target) throws IOException {
final int targetBlock = target & 0xFFFF0000;
- if (block != targetBlock) {
+ if (block < targetBlock) {
advanceBlock(targetBlock);
}
if (block == targetBlock) {
@@ -138,7 +141,19 @@ final class IndexedDISI extends DocIdSetIterator {
}
readBlockHeader();
}
- return doc = method.readFirstDoc(this);
+ boolean found = method.advanceWithinBlock(this, block);
+ assert found;
+ return doc;
+ }
+
+ public boolean advanceExact(int target) throws IOException {
+ final int targetBlock = target & 0xFFFF0000;
+ if (block < targetBlock) {
+ advanceBlock(targetBlock);
+ }
+ boolean found = block == targetBlock && method.advanceExactWithinBlock(this, target);
+ this.doc = target;
+ return found;
}
private void advanceBlock(int targetBlock) throws IOException {
@@ -186,11 +201,6 @@ final class IndexedDISI extends DocIdSetIterator {
enum Method {
SPARSE {
@Override
- int readFirstDoc(IndexedDISI disi) throws IOException {
- disi.index++;
- return disi.block | Short.toUnsignedInt(disi.slice.readShort());
- }
- @Override
boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException {
final int targetInBlock = target & 0xFFFF;
// TODO: binary search
@@ -199,23 +209,37 @@ final class IndexedDISI extends DocIdSetIterator {
disi.index++;
if (doc >= targetInBlock) {
disi.doc = disi.block | doc;
+ disi.exists = true;
return true;
}
}
return false;
}
- },
- DENSE {
@Override
- int readFirstDoc(IndexedDISI disi) throws IOException {
- do {
- disi.word = disi.slice.readLong();
- disi.wordIndex++;
- } while (disi.word == 0L);
- disi.index = disi.numberOfOnes;
- disi.numberOfOnes += Long.bitCount(disi.word);
- return disi.block | (disi.wordIndex << 6) | Long.numberOfTrailingZeros(disi.word);
+ boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException {
+ final int targetInBlock = target & 0xFFFF;
+ // TODO: binary search
+ if (target == disi.doc) {
+ return disi.exists;
+ }
+ for (; disi.index < disi.nextBlockIndex;) {
+ int doc = Short.toUnsignedInt(disi.slice.readShort());
+ disi.index++;
+ if (doc >= targetInBlock) {
+ if (doc != targetInBlock) {
+ disi.index--;
+ disi.slice.seek(disi.slice.getFilePointer() - Short.BYTES);
+ break;
+ }
+ disi.exists = true;
+ return true;
+ }
+ }
+ disi.exists = false;
+ return false;
}
+ },
+ DENSE {
@Override
boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException {
final int targetInBlock = target & 0xFFFF;
@@ -244,26 +268,42 @@ final class IndexedDISI extends DocIdSetIterator {
}
return false;
}
- },
- ALL {
@Override
- int readFirstDoc(IndexedDISI disi) {
- return disi.block;
+ boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException {
+ final int targetInBlock = target & 0xFFFF;
+ final int targetWordIndex = targetInBlock >>> 6;
+ for (int i = disi.wordIndex + 1; i <= targetWordIndex; ++i) {
+ disi.word = disi.slice.readLong();
+ disi.numberOfOnes += Long.bitCount(disi.word);
+ }
+ disi.wordIndex = targetWordIndex;
+
+ long leftBits = disi.word >>> target;
+ disi.index = disi.numberOfOnes - Long.bitCount(leftBits);
+ return (leftBits & 1L) != 0;
}
+ },
+ ALL {
@Override
boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException {
disi.doc = target;
disi.index = target - disi.gap;
return true;
}
+ @Override
+ boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException {
+ disi.index = target - disi.gap;
+ return true;
+ }
};
- /** Read the first document of the current block. */
- abstract int readFirstDoc(IndexedDISI disi) throws IOException;
-
/** Advance to the first doc from the block that is equal to or greater than {@code target}.
* Return true if there is such a doc and false otherwise. */
abstract boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException;
+
+ /** Advance the iterator exactly to the position corresponding to the given {@code target}
+ * and return whether this document exists. */
+ abstract boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException;
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
index 637c8ee..19815ba 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
@@ -375,6 +375,12 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) {
+ doc = target;
+ return true;
+ }
+
+ @Override
public long cost() {
return maxDoc;
}
@@ -392,6 +398,11 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ return disi.advanceExact(target);
+ }
+
+ @Override
public int nextDoc() throws IOException {
return disi.nextDoc();
}
@@ -521,6 +532,12 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ doc = target;
+ return true;
+ }
+
+ @Override
public BytesRef binaryValue() throws IOException {
return bytesRefs.get(doc);
}
@@ -551,6 +568,11 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ return disi.advanceExact(target);
+ }
+
+ @Override
public BytesRef binaryValue() throws IOException {
return bytesRefs.get(disi.index());
}
@@ -616,6 +638,12 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) {
+ doc = target;
+ return true;
+ }
+
+ @Override
public int ordValue() {
return (int) ords.get(doc);
}
@@ -646,6 +674,11 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ return disi.advanceExact(target);
+ }
+
+ @Override
public int ordValue() {
return (int) ords.get(disi.index());
}
@@ -960,6 +993,15 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ start = addresses.get(target);
+ end = addresses.get(target + 1L);
+ count = (int) (end - start);
+ doc = target;
+ return true;
+ }
+
+ @Override
public long nextValue() throws IOException {
return values.get(start++);
}
@@ -1001,6 +1043,12 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ set = false;
+ return disi.advanceExact(target);
+ }
+
+ @Override
public long nextValue() throws IOException {
set();
return values.get(start++);
@@ -1073,6 +1121,14 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ start = addresses.get(target);
+ end = addresses.get(target + 1L);
+ doc = target;
+ return true;
+ }
+
+ @Override
public long nextOrd() throws IOException {
if (start == end) {
return NO_MORE_ORDS;
@@ -1113,6 +1169,12 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ set = false;
+ return disi.advanceExact(target);
+ }
+
+ @Override
public long nextOrd() throws IOException {
if (set == false) {
final int index = disi.index();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java
index e3f6f79..c97f1c3 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java
@@ -160,6 +160,12 @@ final class Lucene70NormsProducer extends NormsProducer {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ this.doc = target;
+ return true;
+ }
+
+ @Override
public long cost() {
return maxDoc;
}
@@ -177,6 +183,11 @@ final class Lucene70NormsProducer extends NormsProducer {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ return disi.advanceExact(target);
+ }
+
+ @Override
public int nextDoc() throws IOException {
return disi.nextDoc();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/BinaryDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/BinaryDocValues.java b/lucene/core/src/java/org/apache/lucene/index/BinaryDocValues.java
index 66397e4..6d23cf1 100644
--- a/lucene/core/src/java/org/apache/lucene/index/BinaryDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/BinaryDocValues.java
@@ -19,13 +19,12 @@ package org.apache.lucene.index;
import java.io.IOException;
-import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.BytesRef;
/**
* A per-document numeric value.
*/
-public abstract class BinaryDocValues extends DocIdSetIterator {
+public abstract class BinaryDocValues extends DocValuesIterator {
/** Sole constructor. (For invocation by subclass
* constructors, typically implicit.) */
@@ -33,6 +32,8 @@ public abstract class BinaryDocValues extends DocIdSetIterator {
/**
* Returns the binary value for the current document ID.
+ * It is illegal to call this method after {@link #advanceExact(int)}
+ * returned {@code false}.
* @return binary value
*/
public abstract BytesRef binaryValue() throws IOException;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java
index ff2e67c..9611a03 100644
--- a/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java
@@ -154,6 +154,11 @@ class BinaryDocValuesWriter extends DocValuesWriter {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
public long cost() {
return docsWithField.cost();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
index aec7537..f3d3562 100644
--- a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
+++ b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
@@ -2062,13 +2062,83 @@ public final class CheckIndex implements Closeable {
return status;
}
+ @FunctionalInterface
+ private static interface DocValuesIteratorSupplier {
+ DocValuesIterator get(FieldInfo fi) throws IOException;
+ }
+
+ private static void checkDVIterator(FieldInfo fi, int maxDoc, DocValuesIteratorSupplier producer) throws IOException {
+ String field = fi.name;
+
+ // Check advance
+ DocValuesIterator it1 = producer.get(fi);
+ DocValuesIterator it2 = producer.get(fi);
+ int i = 0;
+ for (int doc = it1.nextDoc(); ; doc = it1.nextDoc()) {
+
+ if (i++ % 10 == 1) {
+ int doc2 = it2.advance(doc - 1);
+ if (doc2 < doc - 1) {
+ throw new RuntimeException("dv iterator field=" + field + ": doc=" + (doc-1) + " went backwords (got: " + doc2 + ")");
+ }
+ if (doc2 == doc - 1) {
+ doc2 = it2.nextDoc();
+ }
+ if (doc2 != doc) {
+ throw new RuntimeException("dv iterator field=" + field + ": doc=" + doc + " was not found through advance() (got: " + doc2 + ")");
+ }
+ if (it2.docID() != doc) {
+ throw new RuntimeException("dv iterator field=" + field + ": doc=" + doc + " reports wrong doc ID (got: " + it2.docID() + ")");
+ }
+ }
+
+ if (doc == NO_MORE_DOCS) {
+ break;
+ }
+ }
+
+ // Check advanceExact
+ it1 = producer.get(fi);
+ it2 = producer.get(fi);
+ i = 0;
+ int lastDoc = -1;
+ for (int doc = it1.nextDoc(); doc != NO_MORE_DOCS ; doc = it1.nextDoc()) {
+
+ if (i++ % 13 == 1) {
+ boolean found = it2.advanceExact(doc - 1);
+ if ((doc - 1 == lastDoc) != found) {
+ throw new RuntimeException("dv iterator field=" + field + ": doc=" + (doc-1) + " disagrees about whether document exists (got: " + found + ")");
+ }
+ if (it2.docID() != doc - 1) {
+ throw new RuntimeException("dv iterator field=" + field + ": doc=" + (doc-1) + " reports wrong doc ID (got: " + it2.docID() + ")");
+ }
+
+ boolean found2 = it2.advanceExact(doc - 1);
+ if (found != found2) {
+ throw new RuntimeException("dv iterator field=" + field + ": doc=" + (doc-1) + " has unstable advanceExact");
+ }
+
+ if (i % 1 == 0) {
+ int doc2 = it2.nextDoc();
+ if (doc != doc2) {
+ throw new RuntimeException("dv iterator field=" + field + ": doc=" + doc + " was not found through advance() (got: " + doc2 + ")");
+ }
+ if (it2.docID() != doc) {
+ throw new RuntimeException("dv iterator field=" + field + ": doc=" + doc + " reports wrong doc ID (got: " + it2.docID() + ")");
+ }
+ }
+ }
+
+ lastDoc = doc;
+ }
+ }
+
private static void checkBinaryDocValues(String fieldName, int maxDoc, BinaryDocValues bdv) throws IOException {
int doc;
if (bdv.docID() != -1) {
throw new RuntimeException("binary dv iterator for field: " + fieldName + " should start at docID=-1, but got " + bdv.docID());
}
// TODO: we could add stats to DVs, e.g. total doc count w/ a value for this field
- // TODO: check advance too
while ((doc = bdv.nextDoc()) != NO_MORE_DOCS) {
BytesRef value = bdv.binaryValue();
value.isValid();
@@ -2083,7 +2153,6 @@ public final class CheckIndex implements Closeable {
FixedBitSet seenOrds = new FixedBitSet(dv.getValueCount());
int maxOrd2 = -1;
int docID;
- // TODO: check advance too
while ((docID = dv.nextDoc()) != NO_MORE_DOCS) {
int ord = dv.ordValue();
if (ord == -1) {
@@ -2119,7 +2188,6 @@ public final class CheckIndex implements Closeable {
LongBitSet seenOrds = new LongBitSet(dv.getValueCount());
long maxOrd2 = -1;
int docID;
- // TODO: check advance too
while ((docID = dv.nextDoc()) != NO_MORE_DOCS) {
long lastOrd = -1;
long ord;
@@ -2164,7 +2232,6 @@ public final class CheckIndex implements Closeable {
if (ndv.docID() != -1) {
throw new RuntimeException("dv iterator for field: " + fieldName + " should start at docID=-1, but got " + ndv.docID());
}
- // TODO: check advance too
while (true) {
int docID = ndv.nextDoc();
if (docID == NO_MORE_DOCS) {
@@ -2191,7 +2258,6 @@ public final class CheckIndex implements Closeable {
throw new RuntimeException("dv iterator for field: " + fieldName + " should start at docID=-1, but got " + ndv.docID());
}
// TODO: we could add stats to DVs, e.g. total doc count w/ a value for this field
- // TODO: check advance too
while ((doc = ndv.nextDoc()) != NO_MORE_DOCS) {
ndv.longValue();
}
@@ -2201,23 +2267,28 @@ public final class CheckIndex implements Closeable {
switch(fi.getDocValuesType()) {
case SORTED:
status.totalSortedFields++;
+ checkDVIterator(fi, maxDoc, dvReader::getSorted);
checkBinaryDocValues(fi.name, maxDoc, dvReader.getSorted(fi));
checkSortedDocValues(fi.name, maxDoc, dvReader.getSorted(fi));
break;
case SORTED_NUMERIC:
status.totalSortedNumericFields++;
+ checkDVIterator(fi, maxDoc, dvReader::getSortedNumeric);
checkSortedNumericDocValues(fi.name, maxDoc, dvReader.getSortedNumeric(fi));
break;
case SORTED_SET:
status.totalSortedSetFields++;
+ checkDVIterator(fi, maxDoc, dvReader::getSortedSet);
checkSortedSetDocValues(fi.name, maxDoc, dvReader.getSortedSet(fi));
break;
case BINARY:
status.totalBinaryFields++;
+ checkDVIterator(fi, maxDoc, dvReader::getBinary);
checkBinaryDocValues(fi.name, maxDoc, dvReader.getBinary(fi));
break;
case NUMERIC:
status.totalNumericFields++;
+ checkDVIterator(fi, maxDoc, dvReader::getNumeric);
checkNumericDocValues(fi.name, dvReader.getNumeric(fi));
break;
default:
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/DocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/DocValues.java b/lucene/core/src/java/org/apache/lucene/index/DocValues.java
index 3377f45..b25d484 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DocValues.java
@@ -35,26 +35,27 @@ public final class DocValues {
*/
public static final BinaryDocValues emptyBinary() {
return new BinaryDocValues() {
- private boolean exhausted = false;
+ private int doc = -1;
@Override
public int advance(int target) {
- assert exhausted == false;
- assert target >= 0;
- exhausted = true;
- return NO_MORE_DOCS;
+ return doc = NO_MORE_DOCS;
+ }
+
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ doc = target;
+ return true;
}
@Override
public int docID() {
- return exhausted ? NO_MORE_DOCS : -1;
+ return doc;
}
@Override
public int nextDoc() {
- assert exhausted == false;
- exhausted = true;
- return NO_MORE_DOCS;
+ return doc = NO_MORE_DOCS;
}
@Override
@@ -75,26 +76,27 @@ public final class DocValues {
*/
public static final NumericDocValues emptyNumeric() {
return new NumericDocValues() {
- private boolean exhausted = false;
+ private int doc = -1;
@Override
public int advance(int target) {
- assert exhausted == false;
- assert target >= 0;
- exhausted = true;
- return NO_MORE_DOCS;
+ return doc = NO_MORE_DOCS;
+ }
+
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ doc = target;
+ return false;
}
@Override
public int docID() {
- return exhausted ? NO_MORE_DOCS : -1;
+ return doc;
}
@Override
public int nextDoc() {
- assert exhausted == false;
- exhausted = true;
- return NO_MORE_DOCS;
+ return doc = NO_MORE_DOCS;
}
@Override
@@ -140,26 +142,27 @@ public final class DocValues {
final BytesRef empty = new BytesRef();
return new SortedDocValues() {
- private boolean exhausted = false;
+ private int doc = -1;
@Override
public int advance(int target) {
- assert exhausted == false;
- assert target >= 0;
- exhausted = true;
- return NO_MORE_DOCS;
+ return doc = NO_MORE_DOCS;
+ }
+
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ doc = target;
+ return false;
}
@Override
public int docID() {
- return exhausted ? NO_MORE_DOCS : -1;
+ return doc;
}
@Override
public int nextDoc() {
- assert exhausted == false;
- exhausted = true;
- return NO_MORE_DOCS;
+ return doc = NO_MORE_DOCS;
}
@Override
@@ -191,26 +194,27 @@ public final class DocValues {
public static final SortedNumericDocValues emptySortedNumeric(int maxDoc) {
return new SortedNumericDocValues() {
- private boolean exhausted = false;
+ private int doc = -1;
@Override
public int advance(int target) {
- assert exhausted == false;
- assert target >= 0;
- exhausted = true;
- return NO_MORE_DOCS;
+ return doc = NO_MORE_DOCS;
+ }
+
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ doc = target;
+ return false;
}
@Override
public int docID() {
- return exhausted ? NO_MORE_DOCS : -1;
+ return doc;
}
@Override
public int nextDoc() {
- assert exhausted == false;
- exhausted = true;
- return NO_MORE_DOCS;
+ return doc = NO_MORE_DOCS;
}
@Override
@@ -237,26 +241,27 @@ public final class DocValues {
final BytesRef empty = new BytesRef();
return new SortedSetDocValues() {
- private boolean exhausted = false;
+ private int doc = -1;
@Override
public int advance(int target) {
- assert exhausted == false;
- assert target >= 0;
- exhausted = true;
- return NO_MORE_DOCS;
+ return doc = NO_MORE_DOCS;
+ }
+
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ doc = target;
+ return false;
}
@Override
public int docID() {
- return exhausted ? NO_MORE_DOCS : -1;
+ return doc;
}
@Override
public int nextDoc() {
- assert exhausted == false;
- exhausted = true;
- return NO_MORE_DOCS;
+ return doc = NO_MORE_DOCS;
}
@Override
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/DocValuesIterator.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/DocValuesIterator.java b/lucene/core/src/java/org/apache/lucene/index/DocValuesIterator.java
new file mode 100644
index 0000000..d53e26a
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/index/DocValuesIterator.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.index;
+
+import java.io.IOException;
+
+import org.apache.lucene.search.DocIdSetIterator;
+
+abstract class DocValuesIterator extends DocIdSetIterator {
+
+ /** Advance the iterator to exactly {@code target} and return whether
+ * {@code target} has a value.
+ * {@code target} must be greater than or equal to the current
+ * {@link #docID() doc ID} and must be a valid doc ID, ie. ≥ 0 and
+ * < {@code maxDoc}.
+ * After this method returns, {@link #docID()} retuns {@code target}. */
+ public abstract boolean advanceExact(int target) throws IOException;
+
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/FilterBinaryDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/FilterBinaryDocValues.java b/lucene/core/src/java/org/apache/lucene/index/FilterBinaryDocValues.java
index 66c4323..650ad04 100644
--- a/lucene/core/src/java/org/apache/lucene/index/FilterBinaryDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/FilterBinaryDocValues.java
@@ -50,6 +50,11 @@ public abstract class FilterBinaryDocValues extends BinaryDocValues {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ return in.advanceExact(target);
+ }
+
+ @Override
public long cost() {
return in.cost();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/FilterNumericDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/FilterNumericDocValues.java b/lucene/core/src/java/org/apache/lucene/index/FilterNumericDocValues.java
index 0058fa6..bd00cf2 100644
--- a/lucene/core/src/java/org/apache/lucene/index/FilterNumericDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/FilterNumericDocValues.java
@@ -48,6 +48,11 @@ public abstract class FilterNumericDocValues extends NumericDocValues {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ return in.advanceExact(target);
+ }
+
+ @Override
public long cost() {
return in.cost();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/LegacyBinaryDocValuesWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LegacyBinaryDocValuesWrapper.java b/lucene/core/src/java/org/apache/lucene/index/LegacyBinaryDocValuesWrapper.java
index 13bc207..919b1ff 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LegacyBinaryDocValuesWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LegacyBinaryDocValuesWrapper.java
@@ -17,6 +17,8 @@
package org.apache.lucene.index;
+import java.io.IOException;
+
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
@@ -71,6 +73,12 @@ public final class LegacyBinaryDocValuesWrapper extends BinaryDocValues {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return docsWithField.get(target);
+ }
+
+ @Override
public long cost() {
return 0;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/LegacyNumericDocValuesWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LegacyNumericDocValuesWrapper.java b/lucene/core/src/java/org/apache/lucene/index/LegacyNumericDocValuesWrapper.java
index a72efe8..aaccc05 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LegacyNumericDocValuesWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LegacyNumericDocValuesWrapper.java
@@ -17,6 +17,8 @@
package org.apache.lucene.index;
+import java.io.IOException;
+
import org.apache.lucene.util.Bits;
/**
@@ -70,6 +72,13 @@ public final class LegacyNumericDocValuesWrapper extends NumericDocValues {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ value = values.get(docID);
+ return value != 0 || docsWithField.get(docID);
+ }
+
+ @Override
public long cost() {
// TODO
return 0;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/LegacySortedDocValuesWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LegacySortedDocValuesWrapper.java b/lucene/core/src/java/org/apache/lucene/index/LegacySortedDocValuesWrapper.java
index d8ef2f4..ecc114b 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LegacySortedDocValuesWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LegacySortedDocValuesWrapper.java
@@ -17,6 +17,8 @@
package org.apache.lucene.index;
+import java.io.IOException;
+
import org.apache.lucene.util.BytesRef;
/**
@@ -71,6 +73,13 @@ public final class LegacySortedDocValuesWrapper extends SortedDocValues {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ ord = values.getOrd(docID);
+ return ord != -1;
+ }
+
+ @Override
public long cost() {
return 0;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java b/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
index bf3c6cd..cfb61e3 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
@@ -17,6 +17,8 @@
package org.apache.lucene.index;
+import java.io.IOException;
+
/**
* Wraps a {@link LegacySortedNumericDocValues} into a {@link SortedNumericDocValues}.
*
@@ -72,6 +74,13 @@ public final class LegacySortedNumericDocValuesWrapper extends SortedNumericDocV
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ values.setDocument(docID);
+ return values.count() != 0;
+ }
+
+ @Override
public long cost() {
return 0;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/LegacySortedSetDocValuesWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LegacySortedSetDocValuesWrapper.java b/lucene/core/src/java/org/apache/lucene/index/LegacySortedSetDocValuesWrapper.java
index 45d12d2..0e96e02 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LegacySortedSetDocValuesWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LegacySortedSetDocValuesWrapper.java
@@ -17,6 +17,8 @@
package org.apache.lucene.index;
+import java.io.IOException;
+
import org.apache.lucene.util.BytesRef;
/**
@@ -72,6 +74,14 @@ public final class LegacySortedSetDocValuesWrapper extends SortedSetDocValues {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ values.setDocument(docID);
+ ord = values.nextOrd();
+ return ord != NO_MORE_ORDS;
+ }
+
+ @Override
public long cost() {
return 0;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java b/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
index aeb49c5..51d684d 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
@@ -139,6 +139,27 @@ public class MultiDocValues {
}
@Override
+ public boolean advanceExact(int targetDocID) throws IOException {
+ if (targetDocID <= docID) {
+ throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
+ }
+ int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
+ if (readerIndex >= nextLeaf) {
+ if (readerIndex == leaves.size()) {
+ throw new IllegalArgumentException("Out of range: " + targetDocID);
+ }
+ currentLeaf = leaves.get(readerIndex);
+ currentValues = currentLeaf.reader().getNormValues(field);
+ nextLeaf = readerIndex+1;
+ }
+ docID = targetDocID;
+ if (currentValues == null) {
+ return false;
+ }
+ return currentValues.advanceExact(targetDocID - currentLeaf.docBase);
+ }
+
+ @Override
public long longValue() throws IOException {
return currentValues.longValue();
}
@@ -244,6 +265,26 @@ public class MultiDocValues {
}
@Override
+ public boolean advanceExact(int targetDocID) throws IOException {
+ if (targetDocID <= docID) {
+ throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
+ }
+ int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
+ if (readerIndex >= nextLeaf) {
+ if (readerIndex == leaves.size()) {
+ throw new IllegalArgumentException("Out of range: " + targetDocID);
+ }
+ currentLeaf = leaves.get(readerIndex);
+ currentValues = currentLeaf.reader().getNumericDocValues(field);
+ nextLeaf = readerIndex+1;
+ }
+ docID = targetDocID;
+ if (currentValues == null) {
+ return false;
+ }
+ return currentValues.advanceExact(targetDocID - currentLeaf.docBase);
+ }
+ @Override
public long longValue() throws IOException {
return currentValues.longValue();
}
@@ -348,6 +389,27 @@ public class MultiDocValues {
}
@Override
+ public boolean advanceExact(int targetDocID) throws IOException {
+ if (targetDocID <= docID) {
+ throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
+ }
+ int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
+ if (readerIndex >= nextLeaf) {
+ if (readerIndex == leaves.size()) {
+ throw new IllegalArgumentException("Out of range: " + targetDocID);
+ }
+ currentLeaf = leaves.get(readerIndex);
+ currentValues = currentLeaf.reader().getBinaryDocValues(field);
+ nextLeaf = readerIndex+1;
+ }
+ docID = targetDocID;
+ if (currentValues == null) {
+ return false;
+ }
+ return currentValues.advanceExact(targetDocID - currentLeaf.docBase);
+ }
+
+ @Override
public BytesRef binaryValue() throws IOException {
return currentValues.binaryValue();
}
@@ -462,6 +524,27 @@ public class MultiDocValues {
}
@Override
+ public boolean advanceExact(int targetDocID) throws IOException {
+ if (targetDocID <= docID) {
+ throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
+ }
+ int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
+ if (readerIndex >= nextLeaf) {
+ if (readerIndex == leaves.size()) {
+ throw new IllegalArgumentException("Out of range: " + targetDocID);
+ }
+ currentLeaf = leaves.get(readerIndex);
+ currentValues = values[readerIndex];
+ nextLeaf = readerIndex+1;
+ }
+ docID = targetDocID;
+ if (currentValues == null) {
+ return false;
+ }
+ return currentValues.advanceExact(targetDocID - currentLeaf.docBase);
+ }
+
+ @Override
public long cost() {
return finalTotalCost;
}
@@ -923,6 +1006,27 @@ public class MultiDocValues {
}
@Override
+ public boolean advanceExact(int targetDocID) throws IOException {
+ if (targetDocID <= docID) {
+ throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
+ }
+ int readerIndex = ReaderUtil.subIndex(targetDocID, docStarts);
+ if (readerIndex >= nextLeaf) {
+ if (readerIndex == values.length) {
+ throw new IllegalArgumentException("Out of range: " + targetDocID);
+ }
+ currentDocStart = docStarts[readerIndex];
+ currentValues = values[readerIndex];
+ nextLeaf = readerIndex+1;
+ }
+ docID = targetDocID;
+ if (currentValues == null) {
+ return false;
+ }
+ return currentValues.advanceExact(targetDocID - currentDocStart);
+ }
+
+ @Override
public int ordValue() {
return (int) mapping.getGlobalOrds(nextLeaf-1).get(currentValues.ordValue());
}
@@ -1029,6 +1133,27 @@ public class MultiDocValues {
}
@Override
+ public boolean advanceExact(int targetDocID) throws IOException {
+ if (targetDocID < docID) {
+ throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
+ }
+ int readerIndex = ReaderUtil.subIndex(targetDocID, docStarts);
+ if (readerIndex >= nextLeaf) {
+ if (readerIndex == values.length) {
+ throw new IllegalArgumentException("Out of range: " + targetDocID);
+ }
+ currentDocStart = docStarts[readerIndex];
+ currentValues = values[readerIndex];
+ nextLeaf = readerIndex+1;
+ }
+ docID = targetDocID;
+ if (currentValues == null) {
+ return false;
+ }
+ return currentValues.advanceExact(targetDocID - currentDocStart);
+ }
+
+ @Override
public long nextOrd() throws IOException {
long segmentOrd = currentValues.nextOrd();
if (segmentOrd == NO_MORE_ORDS) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/NormValuesWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/NormValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/NormValuesWriter.java
index 46b8c1c..b0d05e4 100644
--- a/lucene/core/src/java/org/apache/lucene/index/NormValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/NormValuesWriter.java
@@ -133,6 +133,11 @@ class NormValuesWriter {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
public long cost() {
return docsWithField.cost();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/NumericDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/NumericDocValues.java b/lucene/core/src/java/org/apache/lucene/index/NumericDocValues.java
index 5ae2e47..29b9918 100644
--- a/lucene/core/src/java/org/apache/lucene/index/NumericDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/NumericDocValues.java
@@ -19,12 +19,10 @@ package org.apache.lucene.index;
import java.io.IOException;
-import org.apache.lucene.search.DocIdSetIterator;
-
/**
* A per-document numeric value.
*/
-public abstract class NumericDocValues extends DocIdSetIterator {
+public abstract class NumericDocValues extends DocValuesIterator {
/** Sole constructor. (For invocation by subclass
* constructors, typically implicit.) */
@@ -32,7 +30,10 @@ public abstract class NumericDocValues extends DocIdSetIterator {
/**
* Returns the numeric value for the current document ID.
+ * It is illegal to call this method after {@link #advanceExact(int)}
+ * returned {@code false}.
* @return numeric value
*/
public abstract long longValue() throws IOException;
+
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/NumericDocValuesWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/NumericDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/NumericDocValuesWriter.java
index adfa706..24a7010 100644
--- a/lucene/core/src/java/org/apache/lucene/index/NumericDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/NumericDocValuesWriter.java
@@ -119,6 +119,11 @@ class NumericDocValuesWriter extends DocValuesWriter {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
public long cost() {
return docsWithField.cost();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java b/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
index 894c81a..3cd465c 100644
--- a/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
+++ b/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
@@ -360,6 +360,11 @@ class ReadersAndUpdates {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
public long cost() {
// TODO
return 0;
@@ -462,6 +467,11 @@ class ReadersAndUpdates {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
public long cost() {
return currentValues.cost();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SingletonSortedNumericDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SingletonSortedNumericDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SingletonSortedNumericDocValues.java
index d95f0c0..5dbdec8 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SingletonSortedNumericDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SingletonSortedNumericDocValues.java
@@ -27,7 +27,6 @@ import java.io.IOException;
*/
final class SingletonSortedNumericDocValues extends SortedNumericDocValues {
private final NumericDocValues in;
- private long value;
public SingletonSortedNumericDocValues(NumericDocValues in) {
if (in.docID() != -1) {
@@ -51,30 +50,27 @@ final class SingletonSortedNumericDocValues extends SortedNumericDocValues {
@Override
public int nextDoc() throws IOException {
- int docID = in.nextDoc();
- if (docID != NO_MORE_DOCS) {
- value = in.longValue();
- }
- return docID;
+ return in.nextDoc();
}
@Override
public int advance(int target) throws IOException {
- int docID = in.advance(target);
- if (docID != NO_MORE_DOCS) {
- value = in.longValue();
- }
- return docID;
+ return in.advance(target);
+ }
+
+ @Override
+ public boolean advanceExact(int target) throws IOException {
+ return in.advanceExact(target);
}
-
+
@Override
public long cost() {
return in.cost();
}
@Override
- public long nextValue() {
- return value;
+ public long nextValue() throws IOException {
+ return in.longValue();
}
@Override
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java
index cc7360e..f16cdf1 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java
@@ -29,7 +29,6 @@ import org.apache.lucene.util.BytesRef;
*/
final class SingletonSortedSetDocValues extends SortedSetDocValues {
private final SortedDocValues in;
- private long currentOrd;
private long ord;
/** Creates a multi-valued view over the provided SortedDocValues */
@@ -55,8 +54,8 @@ final class SingletonSortedSetDocValues extends SortedSetDocValues {
@Override
public long nextOrd() {
- long v = currentOrd;
- currentOrd = NO_MORE_ORDS;
+ long v = ord;
+ ord = NO_MORE_ORDS;
return v;
}
@@ -64,7 +63,7 @@ final class SingletonSortedSetDocValues extends SortedSetDocValues {
public int nextDoc() throws IOException {
int docID = in.nextDoc();
if (docID != NO_MORE_DOCS) {
- currentOrd = ord = in.ordValue();
+ ord = in.ordValue();
}
return docID;
}
@@ -73,12 +72,21 @@ final class SingletonSortedSetDocValues extends SortedSetDocValues {
public int advance(int target) throws IOException {
int docID = in.advance(target);
if (docID != NO_MORE_DOCS) {
- currentOrd = ord = in.ordValue();
+ ord = in.ordValue();
}
return docID;
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ if (in.advanceExact(target)) {
+ ord = in.ordValue();
+ return true;
+ }
+ return false;
+ }
+
+ @Override
public BytesRef lookupOrd(long ord) throws IOException {
// cast is ok: single-valued cannot exceed Integer.MAX_VALUE
return in.lookupOrd((int) ord);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java
index 7ff084f..e2d7dfd 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java
@@ -40,6 +40,8 @@ public abstract class SortedDocValues extends BinaryDocValues {
/**
* Returns the ordinal for the current docID.
+ * It is illegal to call this method after {@link #advanceExact(int)}
+ * returned {@code false}.
* @return ordinal for the document: this is dense, starts at 0, then
* increments by 1 for the next value in sorted order.
*/
@@ -107,4 +109,5 @@ public abstract class SortedDocValues extends BinaryDocValues {
public TermsEnum termsEnum() throws IOException {
return new SortedDocValuesTermsEnum(this);
}
+
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java
index 885ee89..7e43e49 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java
@@ -166,6 +166,11 @@ class SortedDocValuesWriter extends DocValuesWriter {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
public long cost() {
return docsWithField.cost();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValues.java
index 8c11495..a76b46d 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValues.java
@@ -18,14 +18,12 @@ package org.apache.lucene.index;
import java.io.IOException;
-import org.apache.lucene.search.DocIdSetIterator;
-
/**
* A list of per-document numeric values, sorted
* according to {@link Long#compare(long, long)}.
*/
-public abstract class SortedNumericDocValues extends DocIdSetIterator {
+public abstract class SortedNumericDocValues extends DocValuesIterator {
/** Sole constructor. (For invocation by subclass
* constructors, typically implicit.) */
@@ -40,6 +38,8 @@ public abstract class SortedNumericDocValues extends DocIdSetIterator {
/**
* Retrieves the number of values for the current document. This must always
* be greater than zero.
+ * It is illegal to call this method after {@link #advanceExact(int)}
+ * returned {@code false}.
*/
public abstract int docValueCount();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java
index e154547..3f50623 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java
@@ -155,6 +155,11 @@ class SortedNumericDocValuesWriter extends DocValuesWriter {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
public int docValueCount() {
return valueCount;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValues.java
index 439843b..6d02c25 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValues.java
@@ -19,7 +19,6 @@ package org.apache.lucene.index;
import java.io.IOException;
-import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.BytesRef;
/**
@@ -30,7 +29,7 @@ import org.apache.lucene.util.BytesRef;
* dictionary value (ordinal) can be retrieved for each document. Ordinals
* are dense and in increasing sorted order.
*/
-public abstract class SortedSetDocValues extends DocIdSetIterator {
+public abstract class SortedSetDocValues extends DocValuesIterator {
/** Sole constructor. (For invocation by subclass
* constructors, typically implicit.) */
@@ -43,6 +42,8 @@ public abstract class SortedSetDocValues extends DocIdSetIterator {
/**
* Returns the next ordinal for the current document.
+ * It is illegal to call this method after {@link #advanceExact(int)}
+ * returned {@code false}.
* @return next ordinal for the document, or {@link #NO_MORE_ORDS}.
* ordinals are dense, start at 0, then increment by 1 for
* the next value in sorted order.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java
index e7d915f..35157d4 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java
@@ -226,6 +226,11 @@ class SortedSetDocValuesWriter extends DocValuesWriter {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
public long getValueCount() {
return ordMap.length;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java b/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java
index 8139ed1..4fb5027 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java
@@ -186,6 +186,12 @@ class SortingLeafReader extends FilterLeafReader {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return dvs.docsWithField.get(target);
+ }
+
+ @Override
public BytesRef binaryValue() {
return dvs.values[docID];
}
@@ -255,6 +261,12 @@ class SortingLeafReader extends FilterLeafReader {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return dvs.docsWithField.get(target);
+ }
+
+ @Override
public long longValue() {
return dvs.values[docID];
}
@@ -395,6 +407,12 @@ class SortingLeafReader extends FilterLeafReader {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return ords[target] != -1;
+ }
+
+ @Override
public int ordValue() {
return ords[docID];
}
@@ -468,6 +486,13 @@ class SortingLeafReader extends FilterLeafReader {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ ordUpto = 0;
+ return ords[docID] != null;
+ }
+
+ @Override
public long nextOrd() {
if (ordUpto == ords[docID].length) {
return NO_MORE_ORDS;
@@ -539,6 +564,13 @@ class SortingLeafReader extends FilterLeafReader {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ upto = 0;
+ return values[docID] != null;
+ }
+
+ @Override
public long nextValue() {
if (upto == values[docID].length) {
throw new AssertionError();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java b/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
index b6c17c0..8216201 100644
--- a/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
+++ b/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
@@ -178,11 +178,7 @@ public abstract class FieldComparator<T> {
}
private double getValueForDoc(int doc) throws IOException {
- int curDocID = currentReaderValues.docID();
- if (doc > curDocID) {
- curDocID = currentReaderValues.advance(doc);
- }
- if (doc == curDocID) {
+ if (currentReaderValues.advanceExact(doc)) {
return Double.longBitsToDouble(currentReaderValues.longValue());
} else {
return missingValue;
@@ -242,11 +238,7 @@ public abstract class FieldComparator<T> {
}
private float getValueForDoc(int doc) throws IOException {
- int curDocID = currentReaderValues.docID();
- if (doc > curDocID) {
- curDocID = currentReaderValues.advance(doc);
- }
- if (doc == curDocID) {
+ if (currentReaderValues.advanceExact(doc)) {
return Float.intBitsToFloat((int) currentReaderValues.longValue());
} else {
return missingValue;
@@ -308,11 +300,7 @@ public abstract class FieldComparator<T> {
}
private int getValueForDoc(int doc) throws IOException {
- int curDocID = currentReaderValues.docID();
- if (doc > curDocID) {
- curDocID = currentReaderValues.advance(doc);
- }
- if (doc == curDocID) {
+ if (currentReaderValues.advanceExact(doc)) {
return (int) currentReaderValues.longValue();
} else {
return missingValue;
@@ -372,11 +360,7 @@ public abstract class FieldComparator<T> {
}
private long getValueForDoc(int doc) throws IOException {
- int curDocID = currentReaderValues.docID();
- if (doc > curDocID) {
- curDocID = currentReaderValues.advance(doc);
- }
- if (doc == curDocID) {
+ if (currentReaderValues.advanceExact(doc)) {
return currentReaderValues.longValue();
} else {
return missingValue;
@@ -656,15 +640,11 @@ public abstract class FieldComparator<T> {
}
private int getOrdForDoc(int doc) throws IOException {
- int curDocID = termsIndex.docID();
- if (doc > curDocID) {
- if (termsIndex.advance(doc) == doc) {
- return termsIndex.ordValue();
- }
- } else if (doc == curDocID) {
+ if (termsIndex.advanceExact(doc)) {
return termsIndex.ordValue();
+ } else {
+ return -1;
}
- return -1;
}
@Override
@@ -864,11 +844,7 @@ public abstract class FieldComparator<T> {
}
private BytesRef getValueForDoc(int doc) throws IOException {
- int curDocID = docTerms.docID();
- if (doc > curDocID) {
- curDocID = docTerms.advance(doc);
- }
- if (doc == curDocID) {
+ if (docTerms.advanceExact(doc)) {
return docTerms.binaryValue();
} else {
return null;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/search/SortedNumericSelector.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/SortedNumericSelector.java b/lucene/core/src/java/org/apache/lucene/search/SortedNumericSelector.java
index 43e97e7..705454e 100644
--- a/lucene/core/src/java/org/apache/lucene/search/SortedNumericSelector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/SortedNumericSelector.java
@@ -132,6 +132,15 @@ public class SortedNumericSelector {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ if (in.advanceExact(target)) {
+ value = in.nextValue();
+ return true;
+ }
+ return false;
+ }
+
+ @Override
public long cost() {
return in.cost();
}
@@ -182,6 +191,15 @@ public class SortedNumericSelector {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ if (in.advanceExact(target)) {
+ setValue();
+ return true;
+ }
+ return false;
+ }
+
+ @Override
public long cost() {
return in.cost();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/search/SortedSetSelector.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/SortedSetSelector.java b/lucene/core/src/java/org/apache/lucene/search/SortedSetSelector.java
index f10dbf7..2d6c351 100644
--- a/lucene/core/src/java/org/apache/lucene/search/SortedSetSelector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/SortedSetSelector.java
@@ -118,6 +118,15 @@ public class SortedSetSelector {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ if (in.advanceExact(target)) {
+ setOrd();
+ return true;
+ }
+ return false;
+ }
+
+ @Override
public long cost() {
return in.cost();
}
@@ -180,6 +189,15 @@ public class SortedSetSelector {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ if (in.advanceExact(target)) {
+ setOrd();
+ return true;
+ }
+ return false;
+ }
+
+ @Override
public long cost() {
return in.cost();
}
@@ -249,6 +267,15 @@ public class SortedSetSelector {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ if (in.advanceExact(target)) {
+ setOrd();
+ return true;
+ }
+ return false;
+ }
+
+ @Override
public long cost() {
return in.cost();
}
@@ -330,6 +357,15 @@ public class SortedSetSelector {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ if (in.advanceExact(target)) {
+ setOrd();
+ return true;
+ }
+ return false;
+ }
+
+ @Override
public long cost() {
return in.cost();
}
[14/50] [abbrv] lucene-solr:jira/solr-8593: Fixed Interval Facet
count issue in cases of open/close intervals on the same values
Posted by kr...@apache.org.
Fixed Interval Facet count issue in cases of open/close intervals on the same values
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ce57e8a8
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ce57e8a8
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ce57e8a8
Branch: refs/heads/jira/solr-8593
Commit: ce57e8a8f4274db9ad1a78f06d37a7c9e02b3fb8
Parents: c9132ac
Author: Tomas Fernandez Lobbe <tf...@apache.org>
Authored: Mon Oct 24 19:49:54 2016 -0700
Committer: Tomas Fernandez Lobbe <tf...@apache.org>
Committed: Mon Oct 24 19:49:54 2016 -0700
----------------------------------------------------------------------
solr/CHANGES.txt | 5 ++++-
.../java/org/apache/solr/request/IntervalFacets.java | 12 +++++++++++-
.../org/apache/solr/request/TestIntervalFaceting.java | 7 ++++++-
3 files changed, 21 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ce57e8a8/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 4355b80..475ba7f 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -208,6 +208,9 @@ Bug Fixes
* SOLR-9518: Kerberos Delegation Tokens don't work without a chrooted ZK (Ishan Chattopadhyaya,via noble)
+* SOLR-9687: Fixed Interval Facet count issue in cases of open/close intervals on the same values
+ (Andy Chillrud, Tom�s Fern�ndez L�bbe)
+
Optimizations
----------------------
@@ -3290,7 +3293,7 @@ Bug Fixes
while accessing other collections. (Shai Erera)
* SOLR-7412: Fixed range.facet.other parameter for distributed requests.
- (Will Miller, Tom�s Fern�ndes L�bbe)
+ (Will Miller, Tom�s Fern�ndez L�bbe)
* SOLR-6087: SolrIndexSearcher makes no DelegatingCollector.finish() call when IndexSearcher
throws an expected exception. (Christine Poerschke via shalin)
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ce57e8a8/solr/core/src/java/org/apache/solr/request/IntervalFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/request/IntervalFacets.java b/solr/core/src/java/org/apache/solr/request/IntervalFacets.java
index dfe0f77..14bf700 100644
--- a/solr/core/src/java/org/apache/solr/request/IntervalFacets.java
+++ b/solr/core/src/java/org/apache/solr/request/IntervalFacets.java
@@ -157,7 +157,17 @@ public class IntervalFacets implements Iterable<FacetInterval> {
if (o2.start == null) {
return 1;
}
- return o1.start.compareTo(o2.start);
+ int startComparison = o1.start.compareTo(o2.start);
+ if (startComparison == 0) {
+ if (o1.startOpen != o2.startOpen) {
+ if (!o1.startOpen) {
+ return -1;
+ } else {
+ return 1;
+ }
+ }
+ }
+ return startComparison;
}
});
return sortedIntervals;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ce57e8a8/solr/core/src/test/org/apache/solr/request/TestIntervalFaceting.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/request/TestIntervalFaceting.java b/solr/core/src/test/org/apache/solr/request/TestIntervalFaceting.java
index 68eac48..5a4510f 100644
--- a/solr/core/src/test/org/apache/solr/request/TestIntervalFaceting.java
+++ b/solr/core/src/test/org/apache/solr/request/TestIntervalFaceting.java
@@ -943,6 +943,9 @@ public class TestIntervalFaceting extends SolrTestCaseJ4 {
assertIntervalQuery(field, "(0,2]", "2");
assertIntervalQuery(field, "[*,5]", "6");
assertIntervalQuery(field, "[*,3)", "3", "[2,5)", "3", "[6,8)", "2", "[3,*]", "7", "[10,10]", "1", "[10,10]", "1", "[10,10]", "1");
+ assertIntervalQuery(field, "(5,*]", "4", "[5,5]", "1", "(*,5)", "5");
+ assertIntervalQuery(field, "[5,5]", "1", "(*,5)", "5", "(5,*]", "4");
+ assertIntervalQuery(field, "(5,*]", "4", "(*,5)", "5", "[5,5]", "1");
}
@@ -955,7 +958,9 @@ public class TestIntervalFaceting extends SolrTestCaseJ4 {
assertIntervalQuery(field, "[*,bird)", "2", "[bird,cat)", "1", "[cat,dog)", "2", "[dog,*]", "4");
assertIntervalQuery(field, "[*,*]", "9", "[*,dog)", "5", "[*,dog]", "8", "[dog,*]", "4");
assertIntervalQuery(field, field + ":dog", 3, "[*,*]", "3", "[*,dog)", "0", "[*,dog]", "3", "[dog,*]", "3", "[bird,cat]", "0");
-
+ assertIntervalQuery(field, "(*,dog)", "5", "[dog, dog]", "3", "(dog,*)", "1");
+ assertIntervalQuery(field, "[dog, dog]", "3", "(dog,*)", "1", "(*,dog)", "5");
+ assertIntervalQuery(field, "(dog,*)", "1", "(*,dog)", "5", "[dog, dog]", "3");
}
/**
[05/50] [abbrv] lucene-solr:jira/solr-8593: Merge remote-tracking
branch 'origin/master'
Posted by kr...@apache.org.
Merge remote-tracking branch 'origin/master'
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c9de11d0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c9de11d0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c9de11d0
Branch: refs/heads/jira/solr-8593
Commit: c9de11d02464a146c6ab2aa561622876d081a070
Parents: 184b0f2 9aca4c9
Author: Noble Paul <no...@apache.org>
Authored: Mon Oct 24 16:45:53 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Mon Oct 24 16:45:53 2016 +0530
----------------------------------------------------------------------
.../codecs/lucene53/Lucene53NormsProducer.java | 6 +
.../lucene54/Lucene54DocValuesProducer.java | 65 +++-
.../lucene54/TestLucene54DocValuesFormat.java | 5 +-
.../simpletext/SimpleTextDocValuesReader.java | 95 +++++-
.../simpletext/SimpleTextDocValuesWriter.java | 9 +
.../apache/lucene/codecs/DocValuesConsumer.java | 25 ++
.../org/apache/lucene/codecs/NormsConsumer.java | 5 +
.../lucene/codecs/lucene70/IndexedDISI.java | 88 +++--
.../lucene70/Lucene70DocValuesProducer.java | 62 ++++
.../codecs/lucene70/Lucene70NormsProducer.java | 11 +
.../apache/lucene/index/BinaryDocValues.java | 5 +-
.../lucene/index/BinaryDocValuesWriter.java | 5 +
.../org/apache/lucene/index/CheckIndex.java | 81 ++++-
.../java/org/apache/lucene/index/DocValues.java | 95 +++---
.../apache/lucene/index/DocValuesIterator.java | 33 ++
.../lucene/index/FilterBinaryDocValues.java | 5 +
.../lucene/index/FilterNumericDocValues.java | 5 +
.../index/LegacyBinaryDocValuesWrapper.java | 8 +
.../index/LegacyNumericDocValuesWrapper.java | 9 +
.../index/LegacySortedDocValuesWrapper.java | 9 +
.../LegacySortedNumericDocValuesWrapper.java | 9 +
.../index/LegacySortedSetDocValuesWrapper.java | 10 +
.../org/apache/lucene/index/MultiDocValues.java | 125 +++++++
.../apache/lucene/index/NormValuesWriter.java | 5 +
.../apache/lucene/index/NumericDocValues.java | 7 +-
.../lucene/index/NumericDocValuesWriter.java | 5 +
.../apache/lucene/index/ReadersAndUpdates.java | 10 +
.../index/SingletonSortedNumericDocValues.java | 24 +-
.../index/SingletonSortedSetDocValues.java | 18 +-
.../apache/lucene/index/SortedDocValues.java | 3 +
.../lucene/index/SortedDocValuesWriter.java | 5 +
.../lucene/index/SortedNumericDocValues.java | 6 +-
.../index/SortedNumericDocValuesWriter.java | 5 +
.../apache/lucene/index/SortedSetDocValues.java | 5 +-
.../lucene/index/SortedSetDocValuesWriter.java | 5 +
.../apache/lucene/index/SortingLeafReader.java | 32 ++
.../apache/lucene/search/FieldComparator.java | 40 +--
.../lucene/search/SortedNumericSelector.java | 18 +
.../apache/lucene/search/SortedSetSelector.java | 36 ++
.../search/similarities/BM25Similarity.java | 8 +-
.../search/similarities/SimilarityBase.java | 6 +-
.../search/similarities/TFIDFSimilarity.java | 8 +-
.../lucene/codecs/lucene70/TestIndexedDISI.java | 28 +-
.../lucene70/TestLucene70DocValuesFormat.java | 4 +-
.../SortedSetDocValuesFacetCounts.java | 15 +-
.../lucene/search/join/BlockJoinSelector.java | 104 +++++-
.../search/join/GenericTermsCollector.java | 7 +
.../search/join/TestBlockJoinSelector.java | 12 +
.../apache/lucene/index/memory/MemoryIndex.java | 6 +
.../search/TestDiversifiedTopDocsCollector.java | 9 +
.../lucene/index/AssertingLeafReader.java | 89 ++++-
.../index/BaseDocValuesFormatTestCase.java | 331 ++++++++++++-------
.../index/BaseIndexFileFormatTestCase.java | 12 +
.../lucene/index/BaseNormsFormatTestCase.java | 101 +++---
.../apache/solr/request/DocValuesFacets.java | 20 +-
.../request/PerSegmentSingleValuedFaceting.java | 10 +-
.../apache/solr/search/SolrIndexSearcher.java | 6 +-
.../facet/FacetFieldProcessorByArrayDV.java | 30 +-
.../apache/solr/uninverting/FieldCacheImpl.java | 18 +
59 files changed, 1405 insertions(+), 413 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c9de11d0/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
[18/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9536:
OldBackupDirectory timestamp field needs to be initialized to avoid NPE.
Posted by kr...@apache.org.
SOLR-9536: OldBackupDirectory timestamp field needs to be initialized to avoid NPE.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e152575f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e152575f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e152575f
Branch: refs/heads/jira/solr-8593
Commit: e152575f5ea5ea798ca989c852afb763189dee60
Parents: 27ba8e2
Author: markrmiller <ma...@apache.org>
Authored: Tue Oct 25 12:39:37 2016 -0400
Committer: markrmiller <ma...@apache.org>
Committed: Tue Oct 25 12:39:59 2016 -0400
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +++
.../core/src/java/org/apache/solr/handler/OldBackupDirectory.java | 2 +-
2 files changed, 4 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e152575f/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 2f4827b..b693543 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -214,6 +214,9 @@ Bug Fixes
* SOLR-9441: Solr collection backup on HDFS can only be manipulated by the Solr process owner.
(Hrishikesh Gadre via Mark Miller)
+* SOLR-9536: OldBackupDirectory timestamp field needs to be initialized to avoid NPE.
+ (Hrishikesh Gadre via Mark Miller)
+
Optimizations
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e152575f/solr/core/src/java/org/apache/solr/handler/OldBackupDirectory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/OldBackupDirectory.java b/solr/core/src/java/org/apache/solr/handler/OldBackupDirectory.java
index 2b19116..79c5f09 100644
--- a/solr/core/src/java/org/apache/solr/handler/OldBackupDirectory.java
+++ b/solr/core/src/java/org/apache/solr/handler/OldBackupDirectory.java
@@ -32,7 +32,7 @@ class OldBackupDirectory implements Comparable<OldBackupDirectory> {
private URI basePath;
private String dirName;
- private Optional<Date> timestamp;
+ private Optional<Date> timestamp = Optional.empty();
public OldBackupDirectory(URI basePath, String dirName) {
this.dirName = Preconditions.checkNotNull(dirName);
[36/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9481: Fix test
errors by using coreContainer.getSolrHome instead of
SolrResourceLoader.locateSolrHome() in SecurityConfHandlerLocal
Posted by kr...@apache.org.
SOLR-9481: Fix test errors by using coreContainer.getSolrHome instead of SolrResourceLoader.locateSolrHome() in SecurityConfHandlerLocal
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/24446f50
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/24446f50
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/24446f50
Branch: refs/heads/jira/solr-8593
Commit: 24446f5085468627136e38ca8f874f383be9d3f3
Parents: feb1a5d
Author: Jan H�ydahl <ja...@apache.org>
Authored: Wed Oct 26 16:39:22 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Wed Oct 26 16:39:22 2016 +0200
----------------------------------------------------------------------
.../org/apache/solr/handler/admin/SecurityConfHandlerLocal.java | 3 ++-
.../solr/handler/admin/SecurityConfHandlerLocalForTesting.java | 4 ----
.../test/org/apache/solr/security/BasicAuthStandaloneTest.java | 1 -
3 files changed, 2 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/24446f50/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
index d6745c5..985a070 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
@@ -23,6 +23,7 @@ import java.io.OutputStream;
import java.lang.invoke.MethodHandles;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.Collections;
import org.apache.solr.common.SolrException;
@@ -43,7 +44,7 @@ public class SecurityConfHandlerLocal extends SecurityConfHandler {
public SecurityConfHandlerLocal(CoreContainer coreContainer) {
super(coreContainer);
- securityJsonPath = SolrResourceLoader.locateSolrHome().resolve("security.json");
+ securityJsonPath = Paths.get(coreContainer.getSolrHome()).resolve("security.json");
}
/**
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/24446f50/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerLocalForTesting.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerLocalForTesting.java b/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerLocalForTesting.java
index 92a18b1..e56dd5b 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerLocalForTesting.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerLocalForTesting.java
@@ -20,20 +20,16 @@ package org.apache.solr.handler.admin;
import java.io.IOException;
import org.apache.solr.core.CoreContainer;
-import org.apache.solr.core.SolrResourceLoader;
/**
* Wrapper for use in tests
*/
public class SecurityConfHandlerLocalForTesting extends SecurityConfHandlerLocal {
-
public SecurityConfHandlerLocalForTesting(CoreContainer coreContainer) {
super(coreContainer);
}
public boolean persistConf(SecurityConfig securityConfig) throws IOException {
- // Set JSON_PATH again since the test may have
- securityJsonPath = SolrResourceLoader.locateSolrHome().resolve("security.json");
return super.persistConf(securityConfig);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/24446f50/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
index 829ce9c..33c0ab3 100644
--- a/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
+++ b/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
@@ -70,7 +70,6 @@ public class BasicAuthStandaloneTest extends AbstractSolrTestCase {
super.setUp();
instance = new SolrInstance("inst", null);
instance.setUp();
- System.setProperty("solr.solr.home", instance.getHomeDir().toString());
jetty = createJetty(instance);
initCore("solrconfig.xml", "schema.xml", instance.getHomeDir().toString());
securityConfHandler = new SecurityConfHandlerLocalForTesting(jetty.getCoreContainer());
[02/50] [abbrv] lucene-solr:jira/solr-8593: LUCENE-7462: Give doc
values APIs an `advanceExact` method.
Posted by kr...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java b/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
index 0cb86db..ff390b3 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
@@ -244,11 +244,7 @@ public class BM25Similarity extends Similarity {
if (norms == null) {
norm = k1;
} else {
- int normsDocID = norms.docID();
- if (normsDocID < doc) {
- normsDocID = norms.advance(doc);
- }
- if (normsDocID == doc) {
+ if (norms.advanceExact(doc)) {
norm = cache[(byte)norms.longValue() & 0xFF];
} else {
norm = cache[0];
@@ -310,7 +306,7 @@ public class BM25Similarity extends Similarity {
"tfNorm, computed from:", subs);
} else {
byte norm;
- if (norms.advance(doc) == doc) {
+ if (norms.advanceExact(doc)) {
norm = (byte) norms.longValue();
} else {
norm = 0;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java b/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
index ed837c4..925dc59 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
@@ -279,11 +279,7 @@ public abstract class SimilarityBase extends Similarity {
if (norms == null) {
return 1F;
}
- int normsDocID = norms.docID();
- if (normsDocID < doc) {
- normsDocID = norms.advance(doc);
- }
- if (normsDocID == doc) {
+ if (norms.advanceExact(doc)) {
return decodeNormValue((byte) norms.longValue());
} else {
return decodeNormValue((byte) 0);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java b/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
index 6cd87b5..cd8acd6 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
@@ -599,11 +599,7 @@ public abstract class TFIDFSimilarity extends Similarity {
return raw;
} else {
long normValue;
- int normsDocID = norms.docID();
- if (normsDocID < doc) {
- normsDocID = norms.advance(doc);
- }
- if (normsDocID == doc) {
+ if (norms.advanceExact(doc)) {
normValue = norms.longValue();
} else {
normValue = 0;
@@ -649,7 +645,7 @@ public abstract class TFIDFSimilarity extends Similarity {
private Explanation explainField(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) throws IOException {
Explanation tfExplanation = Explanation.match(tf(freq.getValue()), "tf(freq="+freq.getValue()+"), with freq of:", freq);
float norm;
- if (norms != null && norms.advance(doc) == doc) {
+ if (norms != null && norms.advanceExact(doc)) {
norm = decodeNormValue(norms.longValue());
} else {
norm = 1f;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestIndexedDISI.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestIndexedDISI.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestIndexedDISI.java
index 18b4590..64bfbd5 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestIndexedDISI.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestIndexedDISI.java
@@ -153,7 +153,7 @@ public class TestIndexedDISI extends LuceneTestCase {
public void testRandom() throws IOException {
try (Directory dir = newDirectory()) {
- for (int i = 0; i < 100; ++i) {
+ for (int i = 0; i < 10; ++i) {
doTestRandom(dir);
}
}
@@ -217,6 +217,32 @@ public class TestIndexedDISI extends LuceneTestCase {
}
}
+ for (int step : new int[] {10, 100, 1000, 10000, 100000}) {
+ try (IndexInput in = dir.openInput("foo", IOContext.DEFAULT)) {
+ IndexedDISI disi = new IndexedDISI(in, 0L, length, cardinality);
+ BitSetIterator disi2 = new BitSetIterator(set, cardinality);
+ int index = -1;
+ for (int target = 0; target < set.length(); ) {
+ target += TestUtil.nextInt(random(), 0, step);
+ int doc = disi2.docID();
+ while (doc < target) {
+ doc = disi2.nextDoc();
+ index++;
+ }
+
+ boolean exists = disi.advanceExact(target);
+ assertEquals(doc == target, exists);
+ if (exists) {
+ assertEquals(index, disi.index());
+ } else if (random().nextBoolean()) {
+ assertEquals(doc, disi.nextDoc());
+ assertEquals(index, disi.index());
+ target = doc;
+ }
+ }
+ }
+ }
+
dir.deleteFile("foo");
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70DocValuesFormat.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70DocValuesFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70DocValuesFormat.java
index 5ad701e..8661298 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70DocValuesFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70DocValuesFormat.java
@@ -104,7 +104,7 @@ public class TestLucene70DocValuesFormat extends BaseCompressingDocValuesFormatT
public void testSortedVariableLengthBigVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
- doTestSortedVsStoredFields(atLeast(300), 1, 32766);
+ doTestSortedVsStoredFields(atLeast(300), 1d, 1, 32766);
}
}
@@ -112,7 +112,7 @@ public class TestLucene70DocValuesFormat extends BaseCompressingDocValuesFormatT
public void testSortedVariableLengthManyVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
- doTestSortedVsStoredFields(TestUtil.nextInt(random(), 1024, 2049), 1, 500);
+ doTestSortedVsStoredFields(TestUtil.nextInt(random(), 1024, 2049), 1d, 1, 500);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java b/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java
index 1219494..4fff6a6 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java
@@ -199,10 +199,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
int doc;
while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
//System.out.println(" doc=" + doc);
- if (doc > segValues.docID()) {
- segValues.advance(doc);
- }
- if (doc == segValues.docID()) {
+ if (segValues.advanceExact(doc)) {
int term = (int) segValues.nextOrd();
while (term != SortedSetDocValues.NO_MORE_ORDS) {
//System.out.println(" segOrd=" + segOrd + " ord=" + term + " globalOrd=" + ordinalMap.getGlobalOrd(segOrd, term));
@@ -219,10 +216,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
int doc;
while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
//System.out.println(" doc=" + doc);
- if (doc > segValues.docID()) {
- segValues.advance(doc);
- }
- if (doc == segValues.docID()) {
+ if (segValues.advanceExact(doc)) {
int term = (int) segValues.nextOrd();
while (term != SortedSetDocValues.NO_MORE_ORDS) {
//System.out.println(" ord=" + term);
@@ -246,10 +240,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
// just aggregate directly into counts:
int doc;
while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- if (doc > segValues.docID()) {
- segValues.advance(doc);
- }
- if (doc == segValues.docID()) {
+ if (segValues.advanceExact(doc)) {
int term = (int) segValues.nextOrd();
while (term != SortedSetDocValues.NO_MORE_ORDS) {
counts[term]++;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/join/src/java/org/apache/lucene/search/join/BlockJoinSelector.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/BlockJoinSelector.java b/lucene/join/src/java/org/apache/lucene/search/join/BlockJoinSelector.java
index a2e0c55..359b3cb 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/BlockJoinSelector.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/BlockJoinSelector.java
@@ -102,7 +102,7 @@ public class BlockJoinSelector {
}
return new SortedDocValues() {
- private int ord;
+ private int ord = -1;
private int docID = -1;
@Override
@@ -169,6 +169,60 @@ public class BlockJoinSelector {
}
@Override
+ public boolean advanceExact(int targetParentDocID) throws IOException {
+ if (targetParentDocID < docID) {
+ throw new IllegalArgumentException("target must be after the current document: current=" + docID + " target=" + targetParentDocID);
+ }
+ int previousDocId = docID;
+ docID = targetParentDocID;
+ if (targetParentDocID == previousDocId) {
+ return ord != -1;
+ }
+ docID = targetParentDocID;
+ ord = -1;
+ if (parents.get(targetParentDocID) == false) {
+ return false;
+ }
+ int prevParentDocId = docID == 0 ? -1 : parents.prevSetBit(docID - 1);
+ int childDoc = values.docID();
+ if (childDoc <= prevParentDocId) {
+ childDoc = values.advance(prevParentDocId + 1);
+ }
+ if (childDoc >= docID) {
+ return false;
+ }
+
+ boolean hasValue = false;
+ for (int doc = values.docID(); doc < docID; doc = values.nextDoc()) {
+ if (children.get(doc)) {
+ ord = values.ordValue();
+ hasValue = true;
+ values.nextDoc();
+ break;
+ }
+ }
+ if (hasValue == false) {
+ return false;
+ }
+
+ for (int doc = values.docID(); doc < docID; doc = values.nextDoc()) {
+ if (children.get(doc)) {
+ switch (selection) {
+ case MIN:
+ ord = Math.min(ord, values.ordValue());
+ break;
+ case MAX:
+ ord = Math.max(ord, values.ordValue());
+ break;
+ default:
+ throw new AssertionError();
+ }
+ }
+ }
+ return true;
+ }
+
+ @Override
public int ordValue() {
return ord;
}
@@ -288,6 +342,54 @@ public class BlockJoinSelector {
}
@Override
+ public boolean advanceExact(int targetParentDocID) throws IOException {
+ if (targetParentDocID <= parentDocID) {
+ throw new IllegalArgumentException("target must be after the current document: current=" + parentDocID + " target=" + targetParentDocID);
+ }
+ parentDocID = targetParentDocID;
+ if (parents.get(targetParentDocID) == false) {
+ return false;
+ }
+ int prevParentDocId = parentDocID == 0 ? -1 : parents.prevSetBit(parentDocID - 1);
+ int childDoc = values.docID();
+ if (childDoc <= prevParentDocId) {
+ childDoc = values.advance(prevParentDocId + 1);
+ }
+ if (childDoc >= parentDocID) {
+ return false;
+ }
+
+ boolean hasValue = false;
+ for (int doc = values.docID(); doc < parentDocID; doc = values.nextDoc()) {
+ if (children.get(doc)) {
+ value = values.longValue();
+ hasValue = true;
+ values.nextDoc();
+ break;
+ }
+ }
+ if (hasValue == false) {
+ return false;
+ }
+
+ for (int doc = values.docID(); doc < parentDocID; doc = values.nextDoc()) {
+ if (children.get(doc)) {
+ switch (selection) {
+ case MIN:
+ value = Math.min(value, values.longValue());
+ break;
+ case MAX:
+ value = Math.max(value, values.longValue());
+ break;
+ default:
+ throw new AssertionError();
+ }
+ }
+ }
+ return true;
+ }
+
+ @Override
public long longValue() {
return value;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/join/src/java/org/apache/lucene/search/join/GenericTermsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/GenericTermsCollector.java b/lucene/join/src/java/org/apache/lucene/search/join/GenericTermsCollector.java
index 3ad0fe3..47b1b62 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/GenericTermsCollector.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/GenericTermsCollector.java
@@ -74,6 +74,13 @@ interface GenericTermsCollector extends Collector {
}
@Override
+ public boolean advanceExact(int dest) throws IOException {
+ boolean exists = target.advanceExact(dest);
+ out.println("\nadvanceExact(" + dest + ") -> exists# "+exists);
+ return exists;
+ }
+
+ @Override
public long cost() {
return target.cost();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSelector.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSelector.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSelector.java
index 41f994c..04cb771 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSelector.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSelector.java
@@ -150,6 +150,12 @@ public class TestBlockJoinSelector extends LuceneTestCase {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return ords[docID] != -1;
+ }
+
+ @Override
public int ordValue() {
assert ords[docID] != -1;
return ords[docID];
@@ -257,6 +263,12 @@ public class TestBlockJoinSelector extends LuceneTestCase {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return docsWithValue.get(docID);
+ }
+
+ @Override
public long longValue() {
return values[docID];
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
----------------------------------------------------------------------
diff --git a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
index ccbbf24..218d26c 100644
--- a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
+++ b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
@@ -970,6 +970,12 @@ public class MemoryIndex {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return docID == 0;
+ }
+
+ @Override
public long cost() {
return 1;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java b/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
index b64afc5..043141a 100644
--- a/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
+++ b/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
@@ -144,6 +144,11 @@ public class TestDiversifiedTopDocsCollector extends LuceneTestCase {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ return sdv.advanceExact(target + context.docBase);
+ }
+
+ @Override
public long cost() {
return 0;
}
@@ -187,6 +192,10 @@ public class TestDiversifiedTopDocsCollector extends LuceneTestCase {
return vals.advance(target);
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ return vals.advanceExact(target);
+ }
+ @Override
public long cost() {
return vals.cost();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java b/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
index 6686ec4..37c549e 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
@@ -398,6 +398,7 @@ public class AssertingLeafReader extends FilterLeafReader {
private final NumericDocValues in;
private final int maxDoc;
private int lastDocID = -1;
+ private boolean exists;
public AssertingNumericDocValues(NumericDocValues in, int maxDoc) {
this.in = in;
@@ -420,6 +421,7 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID == NO_MORE_DOCS || docID < maxDoc;
assert docID == in.docID();
lastDocID = docID;
+ exists = docID != NO_MORE_DOCS;
return docID;
}
@@ -432,10 +434,23 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID >= target;
assert docID == NO_MORE_DOCS || docID < maxDoc;
lastDocID = docID;
+ exists = docID != NO_MORE_DOCS;
return docID;
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ assertThread("Numeric doc values", creationThread);
+ assert target >= 0;
+ assert target >= in.docID();
+ assert target < maxDoc;
+ exists = in.advanceExact(target);
+ assert in.docID() == target;
+ lastDocID = target;
+ return exists;
+ }
+
+ @Override
public long cost() {
assertThread("Numeric doc values", creationThread);
long cost = in.cost();
@@ -446,8 +461,7 @@ public class AssertingLeafReader extends FilterLeafReader {
@Override
public long longValue() throws IOException {
assertThread("Numeric doc values", creationThread);
- assert in.docID() != -1;
- assert in.docID() != NO_MORE_DOCS;
+ assert exists;
return in.longValue();
}
@@ -463,6 +477,7 @@ public class AssertingLeafReader extends FilterLeafReader {
private final BinaryDocValues in;
private final int maxDoc;
private int lastDocID = -1;
+ private boolean exists;
public AssertingBinaryDocValues(BinaryDocValues in, int maxDoc) {
this.in = in;
@@ -485,6 +500,7 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID == NO_MORE_DOCS || docID < maxDoc;
assert docID == in.docID();
lastDocID = docID;
+ exists = docID != NO_MORE_DOCS;
return docID;
}
@@ -497,10 +513,23 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID >= target;
assert docID == NO_MORE_DOCS || docID < maxDoc;
lastDocID = docID;
+ exists = docID != NO_MORE_DOCS;
return docID;
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ assertThread("Numeric doc values", creationThread);
+ assert target >= 0;
+ assert target >= in.docID();
+ assert target < maxDoc;
+ exists = in.advanceExact(target);
+ assert in.docID() == target;
+ lastDocID = target;
+ return exists;
+ }
+
+ @Override
public long cost() {
assertThread("Binary doc values", creationThread);
long cost = in.cost();
@@ -511,8 +540,7 @@ public class AssertingLeafReader extends FilterLeafReader {
@Override
public BytesRef binaryValue() throws IOException {
assertThread("Binary doc values", creationThread);
- assert in.docID() != -1;
- assert in.docID() != NO_MORE_DOCS;
+ assert exists;
return in.binaryValue();
}
@@ -529,6 +557,7 @@ public class AssertingLeafReader extends FilterLeafReader {
private final int maxDoc;
private final int valueCount;
private int lastDocID = -1;
+ private boolean exists;
public AssertingSortedDocValues(SortedDocValues in, int maxDoc) {
this.in = in;
@@ -551,6 +580,7 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID == NO_MORE_DOCS || docID < maxDoc;
assert docID == in.docID();
lastDocID = docID;
+ exists = docID != NO_MORE_DOCS;
return docID;
}
@@ -563,10 +593,23 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID >= target;
assert docID == NO_MORE_DOCS || docID < maxDoc;
lastDocID = docID;
+ exists = docID != NO_MORE_DOCS;
return docID;
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ assertThread("Numeric doc values", creationThread);
+ assert target >= 0;
+ assert target >= in.docID();
+ assert target < maxDoc;
+ exists = in.advanceExact(target);
+ assert in.docID() == target;
+ lastDocID = target;
+ return exists;
+ }
+
+ @Override
public long cost() {
assertThread("Sorted doc values", creationThread);
long cost = in.cost();
@@ -577,6 +620,7 @@ public class AssertingLeafReader extends FilterLeafReader {
@Override
public int ordValue() {
assertThread("Sorted doc values", creationThread);
+ assert exists;
int ord = in.ordValue();
assert ord >= -1 && ord < valueCount;
return ord;
@@ -625,6 +669,7 @@ public class AssertingLeafReader extends FilterLeafReader {
private final int maxDoc;
private int lastDocID = -1;
private int valueUpto;
+ private boolean exists;
public AssertingSortedNumericDocValues(SortedNumericDocValues in, int maxDoc) {
this.in = in;
@@ -645,6 +690,7 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID == in.docID();
lastDocID = docID;
valueUpto = 0;
+ exists = docID != NO_MORE_DOCS;
return docID;
}
@@ -659,10 +705,24 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID == NO_MORE_DOCS || docID < maxDoc;
lastDocID = docID;
valueUpto = 0;
+ exists = docID != NO_MORE_DOCS;
return docID;
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ assertThread("Numeric doc values", creationThread);
+ assert target >= 0;
+ assert target >= in.docID();
+ assert target < maxDoc;
+ exists = in.advanceExact(target);
+ assert in.docID() == target;
+ lastDocID = target;
+ valueUpto = 0;
+ return exists;
+ }
+
+ @Override
public long cost() {
assertThread("Sorted numeric doc values", creationThread);
long cost = in.cost();
@@ -673,6 +733,7 @@ public class AssertingLeafReader extends FilterLeafReader {
@Override
public long nextValue() throws IOException {
assertThread("Sorted numeric doc values", creationThread);
+ assert exists;
assert valueUpto < in.docValueCount(): "valueUpto=" + valueUpto + " in.docValueCount()=" + in.docValueCount();
valueUpto++;
return in.nextValue();
@@ -681,6 +742,7 @@ public class AssertingLeafReader extends FilterLeafReader {
@Override
public int docValueCount() {
assertThread("Sorted numeric doc values", creationThread);
+ assert exists;
assert in.docValueCount() > 0;
return in.docValueCount();
}
@@ -693,7 +755,8 @@ public class AssertingLeafReader extends FilterLeafReader {
private final int maxDoc;
private final long valueCount;
private int lastDocID = -1;
- long lastOrd = NO_MORE_ORDS;
+ private long lastOrd = NO_MORE_ORDS;
+ private boolean exists;
public AssertingSortedSetDocValues(SortedSetDocValues in, int maxDoc) {
this.in = in;
@@ -717,6 +780,7 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID == in.docID();
lastDocID = docID;
lastOrd = -2;
+ exists = docID != NO_MORE_DOCS;
return docID;
}
@@ -731,10 +795,24 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID == NO_MORE_DOCS || docID < maxDoc;
lastDocID = docID;
lastOrd = -2;
+ exists = docID != NO_MORE_DOCS;
return docID;
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ assertThread("Numeric doc values", creationThread);
+ assert target >= 0;
+ assert target >= in.docID();
+ assert target < maxDoc;
+ exists = in.advanceExact(target);
+ assert in.docID() == target;
+ lastDocID = target;
+ lastOrd = -2;
+ return exists;
+ }
+
+ @Override
public long cost() {
assertThread("Sorted set doc values", creationThread);
long cost = in.cost();
@@ -746,6 +824,7 @@ public class AssertingLeafReader extends FilterLeafReader {
public long nextOrd() throws IOException {
assertThread("Sorted set doc values", creationThread);
assert lastOrd != NO_MORE_ORDS;
+ assert exists;
long ord = in.nextOrd();
assert ord < valueCount;
assert ord == NO_MORE_ORDS || ord > lastOrd;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
index b9bf745..d55f212 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
@@ -30,6 +30,8 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.CountDownLatch;
+import java.util.function.LongSupplier;
+import java.util.function.Supplier;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
@@ -556,7 +558,6 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
assert ireader.leaves().size() == 1;
BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv");
- BytesRef scratch = new BytesRef();
for(int i=0;i<2;i++) {
Document doc2 = ireader.leaves().get(0).reader().document(i);
String expected;
@@ -1185,20 +1186,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
dir.close();
}
- static abstract class LongProducer {
- abstract long next();
- }
-
- private void doTestNumericsVsStoredFields(final long minValue, final long maxValue) throws Exception {
- doTestNumericsVsStoredFields(new LongProducer() {
- @Override
- long next() {
- return TestUtil.nextLong(random(), minValue, maxValue);
- }
- });
- }
-
- private void doTestNumericsVsStoredFields(LongProducer longs) throws Exception {
+ private void doTestNumericsVsStoredFields(double density, LongSupplier longs) throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
@@ -1216,8 +1204,12 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// for numbers of values <= 256, all storage layouts are tested
assert numDocs > 256;
for (int i = 0; i < numDocs; i++) {
+ if (random().nextDouble() > density) {
+ writer.addDocument(new Document());
+ continue;
+ }
idField.setStringValue(Integer.toString(i));
- long value = longs.next();
+ long value = longs.getAsLong();
storedField.setStringValue(Long.toString(value));
dvField.setLongValue(value);
writer.addDocument(doc);
@@ -1241,20 +1233,28 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// compare
DirectoryReader ir = DirectoryReader.open(dir);
+ TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
- NumericDocValues docValues = r.getNumericDocValues("dv");
+ NumericDocValues docValues = DocValues.getNumeric(r, "dv");
+ docValues.nextDoc();
for (int i = 0; i < r.maxDoc(); i++) {
- long storedValue = Long.parseLong(r.document(i).get("stored"));
- assertEquals(i, docValues.nextDoc());
- assertEquals(storedValue, docValues.longValue());
+ String storedValue = r.document(i).get("stored");
+ if (storedValue == null) {
+ assertTrue(docValues.docID() > i);
+ } else {
+ assertEquals(i, docValues.docID());
+ assertEquals(Long.parseLong(storedValue), docValues.longValue());
+ docValues.nextDoc();
+ }
}
+ assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
}
ir.close();
dir.close();
}
- private void doTestSortedNumericsVsStoredFields(LongProducer counts, LongProducer values) throws Exception {
+ private void doTestSortedNumericsVsStoredFields(LongSupplier counts, LongSupplier values) throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
@@ -1268,10 +1268,10 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
Document doc = new Document();
doc.add(new StringField("id", Integer.toString(i), Field.Store.NO));
- int valueCount = (int) counts.next();
+ int valueCount = (int) counts.getAsLong();
long valueArray[] = new long[valueCount];
for (int j = 0; j < valueCount; j++) {
- long value = values.next();
+ long value = values.getAsLong();
valueArray[j] = value;
doc.add(new SortedNumericDocValuesField("dv", value));
}
@@ -1300,6 +1300,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// compare
DirectoryReader ir = DirectoryReader.open(dir);
+ TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
SortedNumericDocValues docValues = DocValues.getSortedNumeric(r, "dv");
@@ -1326,39 +1327,74 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
public void testBooleanNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
- doTestNumericsVsStoredFields(0, 1);
+ doTestNumericsVsStoredFields(1, () -> random().nextInt(2));
}
}
-
+
+ public void testSparseBooleanNumericsVsStoredFields() throws Exception {
+ int numIterations = atLeast(1);
+ for (int i = 0; i < numIterations; i++) {
+ doTestNumericsVsStoredFields(random().nextDouble(), () -> random().nextInt(2));
+ }
+ }
+
public void testByteNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
- doTestNumericsVsStoredFields(Byte.MIN_VALUE, Byte.MAX_VALUE);
+ doTestNumericsVsStoredFields(1, () -> TestUtil.nextInt(random(), Byte.MIN_VALUE, Byte.MAX_VALUE));
}
}
-
+
+ public void testSparseByteNumericsVsStoredFields() throws Exception {
+ int numIterations = atLeast(1);
+ for (int i = 0; i < numIterations; i++) {
+ doTestNumericsVsStoredFields(random().nextDouble(), () -> TestUtil.nextInt(random(), Byte.MIN_VALUE, Byte.MAX_VALUE));
+ }
+ }
+
public void testShortNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
- doTestNumericsVsStoredFields(Short.MIN_VALUE, Short.MAX_VALUE);
+ doTestNumericsVsStoredFields(1, () -> TestUtil.nextInt(random(), Short.MIN_VALUE, Short.MAX_VALUE));
}
}
-
+
+ public void testSparseShortNumericsVsStoredFields() throws Exception {
+ int numIterations = atLeast(1);
+ for (int i = 0; i < numIterations; i++) {
+ doTestNumericsVsStoredFields(random().nextDouble(), () -> TestUtil.nextInt(random(), Short.MIN_VALUE, Short.MAX_VALUE));
+ }
+ }
+
public void testIntNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
- doTestNumericsVsStoredFields(Integer.MIN_VALUE, Integer.MAX_VALUE);
+ doTestNumericsVsStoredFields(1, random()::nextInt);
+ }
+ }
+
+ public void testSparseIntNumericsVsStoredFields() throws Exception {
+ int numIterations = atLeast(1);
+ for (int i = 0; i < numIterations; i++) {
+ doTestNumericsVsStoredFields(random().nextDouble(), random()::nextInt);
}
}
public void testLongNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
- doTestNumericsVsStoredFields(Long.MIN_VALUE, Long.MAX_VALUE);
+ doTestNumericsVsStoredFields(1, random()::nextLong);
}
}
- private void doTestBinaryVsStoredFields(int minLength, int maxLength) throws Exception {
+ public void testSparseLongNumericsVsStoredFields() throws Exception {
+ int numIterations = atLeast(1);
+ for (int i = 0; i < numIterations; i++) {
+ doTestNumericsVsStoredFields(random().nextDouble(), random()::nextLong);
+ }
+ }
+
+ private void doTestBinaryVsStoredFields(double density, Supplier<byte[]> bytes) throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
@@ -1373,15 +1409,12 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// index some docs
int numDocs = atLeast(300);
for (int i = 0; i < numDocs; i++) {
- idField.setStringValue(Integer.toString(i));
- final int length;
- if (minLength == maxLength) {
- length = minLength; // fixed length
- } else {
- length = TestUtil.nextInt(random(), minLength, maxLength);
+ if (random().nextDouble() > density) {
+ writer.addDocument(new Document());
+ continue;
}
- byte buffer[] = new byte[length];
- random().nextBytes(buffer);
+ idField.setStringValue(Integer.toString(i));
+ byte[] buffer = bytes.get();
storedField.setBytesValue(buffer);
dvField.setBytesValue(buffer);
writer.addDocument(doc);
@@ -1399,28 +1432,44 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// compare
DirectoryReader ir = writer.getReader();
+ TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
- BinaryDocValues docValues = r.getBinaryDocValues("dv");
+ BinaryDocValues docValues = DocValues.getBinary(r, "dv");
+ docValues.nextDoc();
for (int i = 0; i < r.maxDoc(); i++) {
BytesRef binaryValue = r.document(i).getBinaryValue("stored");
- assertEquals(i, docValues.nextDoc());
- assertEquals(binaryValue, docValues.binaryValue());
+ if (binaryValue == null) {
+ assertTrue(docValues.docID() > i);
+ } else {
+ assertEquals(i, docValues.docID());
+ assertEquals(binaryValue, docValues.binaryValue());
+ docValues.nextDoc();
+ }
}
+ assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
}
ir.close();
// compare again
writer.forceMerge(1);
ir = writer.getReader();
+ TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
- BinaryDocValues docValues = r.getBinaryDocValues("dv");
+ BinaryDocValues docValues = DocValues.getBinary(r, "dv");
+ docValues.nextDoc();
for (int i = 0; i < r.maxDoc(); i++) {
BytesRef binaryValue = r.document(i).getBinaryValue("stored");
- assertEquals(i, docValues.nextDoc());
- assertEquals(binaryValue, docValues.binaryValue());
+ if (binaryValue == null) {
+ assertTrue(docValues.docID() > i);
+ } else {
+ assertEquals(i, docValues.docID());
+ assertEquals(binaryValue, docValues.binaryValue());
+ docValues.nextDoc();
+ }
}
+ assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
}
ir.close();
writer.close();
@@ -1428,21 +1477,46 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
}
public void testBinaryFixedLengthVsStoredFields() throws Exception {
+ doTestBinaryFixedLengthVsStoredFields(1);
+ }
+
+ public void testSparseBinaryFixedLengthVsStoredFields() throws Exception {
+ doTestBinaryFixedLengthVsStoredFields(random().nextDouble());
+ }
+
+ private void doTestBinaryFixedLengthVsStoredFields(double density) throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
int fixedLength = TestUtil.nextInt(random(), 0, 10);
- doTestBinaryVsStoredFields(fixedLength, fixedLength);
+ doTestBinaryVsStoredFields(density, () -> {
+ byte buffer[] = new byte[fixedLength];
+ random().nextBytes(buffer);
+ return buffer;
+ });
}
}
-
+
public void testBinaryVariableLengthVsStoredFields() throws Exception {
+ doTestBinaryVariableLengthVsStoredFields(1);
+ }
+
+ public void testSparseBinaryVariableLengthVsStoredFields() throws Exception {
+ doTestBinaryVariableLengthVsStoredFields(random().nextDouble());
+ }
+
+ public void doTestBinaryVariableLengthVsStoredFields(double density) throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
- doTestBinaryVsStoredFields(0, 10);
+ doTestBinaryVsStoredFields(density, () -> {
+ final int length = random().nextInt(10);
+ byte buffer[] = new byte[length];
+ random().nextBytes(buffer);
+ return buffer;
+ });
}
}
- protected void doTestSortedVsStoredFields(int numDocs, int minLength, int maxLength) throws Exception {
+ protected void doTestSortedVsStoredFields(int numDocs, double density, Supplier<byte[]> bytes) throws Exception {
Directory dir = newFSDirectory(createTempDir("dvduel"));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
@@ -1456,15 +1530,12 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// index some docs
for (int i = 0; i < numDocs; i++) {
- idField.setStringValue(Integer.toString(i));
- final int length;
- if (minLength == maxLength) {
- length = minLength; // fixed length
- } else {
- length = TestUtil.nextInt(random(), minLength, maxLength);
+ if (random().nextDouble() > density) {
+ writer.addDocument(new Document());
+ continue;
}
- byte buffer[] = new byte[length];
- random().nextBytes(buffer);
+ idField.setStringValue(Integer.toString(i));
+ byte[] buffer = bytes.get();
storedField.setBytesValue(buffer);
dvField.setBytesValue(buffer);
writer.addDocument(doc);
@@ -1482,28 +1553,44 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// compare
DirectoryReader ir = writer.getReader();
+ TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
BinaryDocValues docValues = DocValues.getBinary(r, "dv");
+ docValues.nextDoc();
for (int i = 0; i < r.maxDoc(); i++) {
BytesRef binaryValue = r.document(i).getBinaryValue("stored");
- assertEquals(i, docValues.nextDoc());
- assertEquals(binaryValue, docValues.binaryValue());
+ if (binaryValue == null) {
+ assertTrue(docValues.docID() > i);
+ } else {
+ assertEquals(i, docValues.docID());
+ assertEquals(binaryValue, docValues.binaryValue());
+ docValues.nextDoc();
+ }
}
+ assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
}
ir.close();
writer.forceMerge(1);
// compare again
ir = writer.getReader();
+ TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
BinaryDocValues docValues = DocValues.getBinary(r, "dv");
+ docValues.nextDoc();
for (int i = 0; i < r.maxDoc(); i++) {
BytesRef binaryValue = r.document(i).getBinaryValue("stored");
- assertEquals(i, docValues.nextDoc());
- assertEquals(binaryValue, docValues.binaryValue());
+ if (binaryValue == null) {
+ assertTrue(docValues.docID() > i);
+ } else {
+ assertEquals(i, docValues.docID());
+ assertEquals(binaryValue, docValues.binaryValue());
+ docValues.nextDoc();
+ }
}
+ assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
}
ir.close();
writer.close();
@@ -1514,17 +1601,41 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
int fixedLength = TestUtil.nextInt(random(), 1, 10);
- doTestSortedVsStoredFields(atLeast(300), fixedLength, fixedLength);
+ doTestSortedVsStoredFields(atLeast(300), 1, fixedLength, fixedLength);
}
}
- public void testSortedVariableLengthVsStoredFields() throws Exception {
+ public void testSparseSortedFixedLengthVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
- doTestSortedVsStoredFields(atLeast(300), 1, 10);
+ int fixedLength = TestUtil.nextInt(random(), 1, 10);
+ doTestSortedVsStoredFields(atLeast(300), random().nextDouble(), fixedLength, fixedLength);
}
}
+ public void testSortedVariableLengthVsStoredFields() throws Exception {
+ int numIterations = atLeast(1);
+ for (int i = 0; i < numIterations; i++) {
+ doTestSortedVsStoredFields(atLeast(300), 1, 1, 10);
+ }
+ }
+
+ public void testSparseSortedVariableLengthVsStoredFields() throws Exception {
+ int numIterations = atLeast(1);
+ for (int i = 0; i < numIterations; i++) {
+ doTestSortedVsStoredFields(atLeast(300), random().nextDouble(), 1, 10);
+ }
+ }
+
+ protected void doTestSortedVsStoredFields(int numDocs, double density, int minLength, int maxLength) throws Exception {
+ doTestSortedVsStoredFields(numDocs, density, () -> {
+ int length = TestUtil.nextInt(random(), minLength, maxLength);
+ byte[] buffer = new byte[length];
+ random().nextBytes(buffer);
+ return buffer;
+ });
+ }
+
public void testSortedSetOneValue() throws IOException {
Directory directory = newDirectory();
RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory);
@@ -2001,6 +2112,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// compare
DirectoryReader ir = writer.getReader();
+ TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
SortedSetDocValues docValues = r.getSortedSetDocValues("dv");
@@ -2029,6 +2141,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// compare again
ir = writer.getReader();
+ TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
SortedSetDocValues docValues = r.getSortedSetDocValues("dv");
@@ -2067,18 +2180,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedNumericsVsStoredFields(
- new LongProducer() {
- @Override
- long next() {
- return 1;
- }
- },
- new LongProducer() {
- @Override
- long next() {
- return TestUtil.nextLong(random(), Long.MIN_VALUE, Long.MAX_VALUE);
- }
- }
+ () -> 1,
+ random()::nextLong
);
}
}
@@ -2087,18 +2190,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedNumericsVsStoredFields(
- new LongProducer() {
- @Override
- long next() {
- return random().nextBoolean() ? 0 : 1;
- }
- },
- new LongProducer() {
- @Override
- long next() {
- return TestUtil.nextLong(random(), Long.MIN_VALUE, Long.MAX_VALUE);
- }
- }
+ () -> random().nextBoolean() ? 0 : 1,
+ random()::nextLong
);
}
}
@@ -2107,18 +2200,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedNumericsVsStoredFields(
- new LongProducer() {
- @Override
- long next() {
- return TestUtil.nextLong(random(), 0, 50);
- }
- },
- new LongProducer() {
- @Override
- long next() {
- return TestUtil.nextLong(random(), Long.MIN_VALUE, Long.MAX_VALUE);
- }
- }
+ () -> TestUtil.nextLong(random(), 0, 50),
+ random()::nextLong
);
}
}
@@ -2131,18 +2214,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedNumericsVsStoredFields(
- new LongProducer() {
- @Override
- long next() {
- return TestUtil.nextLong(random(), 0, 6);
- }
- },
- new LongProducer() {
- @Override
- long next() {
- return values[random().nextInt(values.length)];
- }
- }
+ () -> TestUtil.nextLong(random(), 0, 6),
+ () -> values[random().nextInt(values.length)]
);
}
}
@@ -2198,22 +2271,31 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
}
public void testGCDCompression() throws Exception {
+ doTestGCDCompression(1);
+ }
+
+ public void testSparseGCDCompression() throws Exception {
+ doTestGCDCompression(random().nextDouble());
+ }
+
+ private void doTestGCDCompression(double density) throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
final long min = - (((long) random().nextInt(1 << 30)) << 32);
final long mul = random().nextInt() & 0xFFFFFFFFL;
- final LongProducer longs = new LongProducer() {
- @Override
- long next() {
- return min + mul * random().nextInt(1 << 20);
- }
+ final LongSupplier longs = () -> {
+ return min + mul * random().nextInt(1 << 20);
};
- doTestNumericsVsStoredFields(longs);
+ doTestNumericsVsStoredFields(density, longs);
}
}
public void testZeros() throws Exception {
- doTestNumericsVsStoredFields(0, 0);
+ doTestNumericsVsStoredFields(1, () -> 0);
+ }
+
+ public void testSparseZeros() throws Exception {
+ doTestNumericsVsStoredFields(random().nextDouble(), () -> 0);
}
public void testZeroOrMin() throws Exception {
@@ -2221,13 +2303,10 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// the GCD of 0 and MIN_VALUE is negative
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
- final LongProducer longs = new LongProducer() {
- @Override
- long next() {
- return random().nextBoolean() ? 0 : Long.MIN_VALUE;
- }
+ final LongSupplier longs = () -> {
+ return random().nextBoolean() ? 0 : Long.MIN_VALUE;
};
- doTestNumericsVsStoredFields(longs);
+ doTestNumericsVsStoredFields(1, longs);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
index d56e6cb..7a7abc0 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
@@ -362,6 +362,12 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return target == 0;
+ }
+
+ @Override
public long cost() {
return 1;
}
@@ -415,6 +421,12 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return target == 0;
+ }
+
+ @Override
public long cost() {
return 1;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
index 64e99da..cd62218 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
@@ -21,6 +21,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
+import java.util.function.LongSupplier;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
@@ -59,9 +60,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(1, new LongProducer() {
+ doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
}
});
@@ -73,9 +74,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+ doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
}
});
@@ -86,9 +87,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(1, new LongProducer() {
+ doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return TestUtil.nextLong(r, Short.MIN_VALUE, Short.MAX_VALUE);
}
});
@@ -100,9 +101,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+ doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return TestUtil.nextLong(r, Short.MIN_VALUE, Short.MAX_VALUE);
}
});
@@ -113,9 +114,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(1, new LongProducer() {
+ doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return TestUtil.nextLong(r, Long.MIN_VALUE, Long.MAX_VALUE);
}
});
@@ -127,9 +128,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+ doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return TestUtil.nextLong(r, Long.MIN_VALUE, Long.MAX_VALUE);
}
});
@@ -140,9 +141,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(1, new LongProducer() {
+ doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
int thingToDo = r.nextInt(3);
switch (thingToDo) {
case 0: return Long.MIN_VALUE;
@@ -159,9 +160,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+ doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
int thingToDo = r.nextInt(3);
switch (thingToDo) {
case 0: return Long.MIN_VALUE;
@@ -177,9 +178,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(1, new LongProducer() {
+ doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return r.nextBoolean() ? 20 : 3;
}
});
@@ -191,9 +192,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+ doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return r.nextBoolean() ? 20 : 3;
}
});
@@ -204,9 +205,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(1, new LongProducer() {
+ doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return r.nextBoolean() ? 1000000L : -5000;
}
});
@@ -218,9 +219,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+ doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return r.nextBoolean() ? 1000000L : -5000;
}
});
@@ -230,9 +231,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
public void testAllZeros() throws Exception {
int iterations = atLeast(1);
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(1, new LongProducer() {
+ doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return 0;
}
});
@@ -243,9 +244,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
assumeTrue("Requires sparse norms support", codecSupportsSparsity());
int iterations = atLeast(1);
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+ doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return 0;
}
});
@@ -256,9 +257,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
- doTestNormsVersusDocValues(1, new LongProducer() {
+ doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return r.nextInt(100) == 0 ? TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE) : 0;
}
});
@@ -270,9 +271,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
final Random r = random();
for (int i = 0; i < iterations; i++) {
final long commonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
- doTestNormsVersusDocValues(1, new LongProducer() {
+ doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return r.nextInt(100) == 0 ? TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE) : commonValue;
}
});
@@ -285,9 +286,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
final Random r = random();
for (int i = 0; i < iterations; i++) {
final long commonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
- doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+ doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return r.nextInt(100) == 0 ? TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE) : commonValue;
}
});
@@ -300,9 +301,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
for (int i = 0; i < iterations; i++) {
final long commonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
final long uncommonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
- doTestNormsVersusDocValues(1, new LongProducer() {
+ doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return r.nextInt(100) == 0 ? uncommonValue : commonValue;
}
});
@@ -316,9 +317,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
for (int i = 0; i < iterations; i++) {
final long commonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
final long uncommonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
- doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+ doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return r.nextInt(100) == 0 ? uncommonValue : commonValue;
}
});
@@ -337,9 +338,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
for (int j = 0; j < numOtherValues; ++j) {
otherValues[j] = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
}
- doTestNormsVersusDocValues(1, new LongProducer() {
+ doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return r.nextInt(100) == 0 ? otherValues[r.nextInt(numOtherValues - 1)] : commonValues[r.nextInt(N - 1)];
}
});
@@ -358,9 +359,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
for (int j = 0; j < numOtherValues; ++j) {
otherValues[j] = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
}
- doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+ doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return r.nextInt(100) == 0 ? otherValues[r.nextInt(numOtherValues - 1)] : commonValues[r.nextInt(N - 1)];
}
});
@@ -386,9 +387,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
for (int j = 0; j < numOtherValues; ++j) {
otherValues[j] = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
}
- doTestNormsVersusDocValues(1, new LongProducer() {
+ doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return r.nextInt(100) == 0 ? otherValues[r.nextInt(numOtherValues - 1)] : commonValues[r.nextInt(N - 1)];
}
});
@@ -417,9 +418,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
for (int j = 0; j < numOtherValues; ++j) {
otherValues[j] = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
}
- doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+ doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
- long next() {
+ public long getAsLong() {
return r.nextInt(100) == 0 ? otherValues[r.nextInt(numOtherValues - 1)] : commonValues[r.nextInt(N - 1)];
}
});
@@ -427,7 +428,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
}
}
- private void doTestNormsVersusDocValues(double density, LongProducer longs) throws Exception {
+ private void doTestNormsVersusDocValues(double density, LongSupplier longs) throws Exception {
int numDocs = atLeast(500);
final FixedBitSet docsWithField = new FixedBitSet(numDocs);
final int numDocsWithField = Math.max(1, (int) (density * numDocs));
@@ -445,7 +446,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
}
long norms[] = new long[numDocsWithField];
for (int i = 0; i < numDocsWithField; i++) {
- norms[i] = longs.next();
+ norms[i] = longs.getAsLong();
}
Directory dir = newDirectory();
@@ -519,10 +520,6 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
}
}
- static abstract class LongProducer {
- abstract long next();
- }
-
static class CannedNormSimilarity extends Similarity {
final long norms[];
int index = 0;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/solr/core/src/java/org/apache/solr/request/DocValuesFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/request/DocValuesFacets.java b/solr/core/src/java/org/apache/solr/request/DocValuesFacets.java
index 09ad836..0dab34b 100644
--- a/solr/core/src/java/org/apache/solr/request/DocValuesFacets.java
+++ b/solr/core/src/java/org/apache/solr/request/DocValuesFacets.java
@@ -272,11 +272,8 @@ public class DocValuesFacets {
final LongValues ordmap = map == null ? null : map.getGlobalOrds(subIndex);
int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- if (doc > si.docID()) {
- si.advance(doc);
- }
int term;
- if (doc == si.docID()) {
+ if (si.advanceExact(doc)) {
term = si.ordValue();
} else {
term = -1;
@@ -301,10 +298,7 @@ public class DocValuesFacets {
int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- if (doc > si.docID()) {
- si.advance(doc);
- }
- if (doc == si.docID()) {
+ if (si.advanceExact(doc)) {
segCounts[1+si.ordValue()]++;
} else {
segCounts[0]++;
@@ -334,10 +328,7 @@ public class DocValuesFacets {
final LongValues ordMap = map == null ? null : map.getGlobalOrds(subIndex);
int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- if (doc > si.docID()) {
- si.advance(doc);
- }
- if (doc == si.docID()) {
+ if (si.advanceExact(doc)) {
// strange do-while to collect the missing count (first ord is NO_MORE_ORDS)
int term = (int) si.nextOrd();
do {
@@ -365,10 +356,7 @@ public class DocValuesFacets {
int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- if (doc > si.docID()) {
- si.advance(doc);
- }
- if (doc == si.docID()) {
+ if (si.advanceExact(doc)) {
int term = (int) si.nextOrd();
do {
segCounts[1+term]++;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/solr/core/src/java/org/apache/solr/request/PerSegmentSingleValuedFaceting.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/request/PerSegmentSingleValuedFaceting.java b/solr/core/src/java/org/apache/solr/request/PerSegmentSingleValuedFaceting.java
index 0c88b80..3db7b4c 100644
--- a/solr/core/src/java/org/apache/solr/request/PerSegmentSingleValuedFaceting.java
+++ b/solr/core/src/java/org/apache/solr/request/PerSegmentSingleValuedFaceting.java
@@ -293,11 +293,8 @@ class PerSegmentSingleValuedFaceting {
// specialized version when collecting counts for all terms
int doc;
while ((doc = iter.nextDoc()) < DocIdSetIterator.NO_MORE_DOCS) {
- if (doc > si.docID()) {
- si.advance(doc);
- }
int t;
- if (doc == si.docID()) {
+ if (si.advanceExact(doc)) {
t = 1+si.ordValue();
} else {
t = 0;
@@ -309,11 +306,8 @@ class PerSegmentSingleValuedFaceting {
// version that adjusts term numbers because we aren't collecting the full range
int doc;
while ((doc = iter.nextDoc()) < DocIdSetIterator.NO_MORE_DOCS) {
- if (doc > si.docID()) {
- si.advance(doc);
- }
int term;
- if (doc == si.docID()) {
+ if (si.advanceExact(doc)) {
term = si.ordValue();
} else {
term = -1;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 933477b..5ac1975 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -795,7 +795,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
continue;
}
Long val;
- if (ndv.advance(localId) == localId) {
+ if (ndv.advanceExact(localId)) {
val = ndv.longValue();
} else {
continue;
@@ -820,7 +820,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
continue;
}
BytesRef value;
- if (bdv.advance(localId) == localId) {
+ if (bdv.advanceExact(localId)) {
value = BytesRef.deepCopyOf(bdv.binaryValue());
} else {
continue;
@@ -832,7 +832,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
if (sdv == null) {
continue;
}
- if (sdv.advance(localId) == localId) {
+ if (sdv.advanceExact(localId)) {
final BytesRef bRef = sdv.binaryValue();
// Special handling for Boolean fields since they're stored as 'T' and 'F'.
if (schemaField.getType() instanceof BoolField) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java
index fb60945..88adf67 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java
@@ -186,10 +186,7 @@ class FacetFieldProcessorByArrayDV extends FacetFieldProcessorByArray {
int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- if (doc > singleDv.docID()) {
- singleDv.advance(doc);
- }
- if (doc == singleDv.docID()) {
+ if (singleDv.advanceExact(doc)) {
counts[ singleDv.ordValue() + 1 ]++;
} else {
counts[ 0 ]++;
@@ -211,10 +208,7 @@ class FacetFieldProcessorByArrayDV extends FacetFieldProcessorByArray {
int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- if (doc > multiDv.docID()) {
- multiDv.advance(doc);
- }
- if (doc == multiDv.docID()) {
+ if (multiDv.advanceExact(doc)) {
for(;;) {
int segOrd = (int)multiDv.nextOrd();
if (segOrd < 0) break;
@@ -247,10 +241,7 @@ class FacetFieldProcessorByArrayDV extends FacetFieldProcessorByArray {
private void collectDocs(SortedDocValues singleDv, DocIdSetIterator disi, LongValues toGlobal) throws IOException {
int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- if (doc > singleDv.docID()) {
- singleDv.advance(doc);
- }
- if (doc == singleDv.docID()) {
+ if (singleDv.advanceExact(doc)) {
int segOrd = singleDv.ordValue();
collect(doc, segOrd, toGlobal);
}
@@ -260,10 +251,7 @@ class FacetFieldProcessorByArrayDV extends FacetFieldProcessorByArray {
private void collectCounts(SortedDocValues singleDv, DocIdSetIterator disi, LongValues toGlobal) throws IOException {
int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- if (doc > singleDv.docID()) {
- singleDv.advance(doc);
- }
- if (doc == singleDv.docID()) {
+ if (singleDv.advanceExact(doc)) {
int segOrd = singleDv.ordValue();
int ord = (int)toGlobal.get(segOrd);
countAcc.incrementCount(ord, 1);
@@ -274,10 +262,7 @@ class FacetFieldProcessorByArrayDV extends FacetFieldProcessorByArray {
private void collectDocs(SortedSetDocValues multiDv, DocIdSetIterator disi, LongValues toGlobal) throws IOException {
int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- if (doc > multiDv.docID()) {
- multiDv.advance(doc);
- }
- if (doc == multiDv.docID()) {
+ if (multiDv.advanceExact(doc)) {
for(;;) {
int segOrd = (int)multiDv.nextOrd();
if (segOrd < 0) break;
@@ -290,10 +275,7 @@ class FacetFieldProcessorByArrayDV extends FacetFieldProcessorByArray {
private void collectCounts(SortedSetDocValues multiDv, DocIdSetIterator disi, LongValues toGlobal) throws IOException {
int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- if (doc > multiDv.docID()) {
- multiDv.advance(doc);
- }
- if (doc == multiDv.docID()) {
+ if (multiDv.advanceExact(doc)) {
for(;;) {
int segOrd = (int)multiDv.nextOrd();
if (segOrd < 0) break;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/solr/core/src/java/org/apache/solr/uninverting/FieldCacheImpl.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/uninverting/FieldCacheImpl.java b/solr/core/src/java/org/apache/solr/uninverting/FieldCacheImpl.java
index b63e5e9..2224010 100644
--- a/solr/core/src/java/org/apache/solr/uninverting/FieldCacheImpl.java
+++ b/solr/core/src/java/org/apache/solr/uninverting/FieldCacheImpl.java
@@ -692,6 +692,12 @@ class FieldCacheImpl implements FieldCache {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return docsWithField.get(docID);
+ }
+
+ @Override
public long cost() {
return values.size();
}
@@ -821,6 +827,12 @@ class FieldCacheImpl implements FieldCache {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return docToTermOrd.get(docID) != 0;
+ }
+
+ @Override
public long cost() {
return 0;
}
@@ -1022,6 +1034,12 @@ class FieldCacheImpl implements FieldCache {
}
@Override
+ public boolean advanceExact(int target) throws IOException {
+ docID = target;
+ return docsWithField.get(docID);
+ }
+
+ @Override
public long cost() {
return 0;
}
[48/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-1085: Add support
for MoreLikeThis queries and responses in SolrJ client
Posted by kr...@apache.org.
SOLR-1085: Add support for MoreLikeThis queries and responses in SolrJ client
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2172f3e0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2172f3e0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2172f3e0
Branch: refs/heads/jira/solr-8593
Commit: 2172f3e0081e3e59ce8b02c2bb5654a592f79f74
Parents: 0feca1a
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Thu Oct 27 17:41:25 2016 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Thu Oct 27 17:41:25 2016 +0530
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +
.../component/MoreLikeThisComponent.java | 2 +-
.../org/apache/solr/client/solrj/SolrQuery.java | 248 +++++++++++++++++++
.../client/solrj/response/QueryResponse.java | 8 +
.../solr/common/params/MoreLikeThisParams.java | 3 +
.../solr/client/solrj/SolrExampleTests.java | 21 +-
.../apache/solr/client/solrj/SolrQueryTest.java | 25 ++
7 files changed, 299 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2172f3e0/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index ae1d709..4ef1061 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -170,6 +170,9 @@ New Features
* SOLR-9559: Add ExecutorStream to execute stored Streaming Expressions (Joel Bernstein)
+* SOLR-1085: Add support for MoreLikeThis queries and responses in SolrJ client.
+ (Maurice Jumelet, Bill Mitchell, Cao Manh Dat via shalin)
+
Bug Fixes
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2172f3e0/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java b/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
index 6ccdd12..7cf6d39 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
@@ -173,7 +173,7 @@ public class MoreLikeThisComponent extends SearchComponent {
&& rb.req.getParams().getBool(COMPONENT_NAME, false)) {
Map<Object,SolrDocumentList> tempResults = new LinkedHashMap<>();
- int mltcount = rb.req.getParams().getInt(MoreLikeThisParams.DOC_COUNT, 5);
+ int mltcount = rb.req.getParams().getInt(MoreLikeThisParams.DOC_COUNT, MoreLikeThisParams.DEFAULT_DOC_COUNT);
String keyName = rb.req.getSchema().getUniqueKeyField().getName();
for (ShardRequest sreq : rb.finished) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2172f3e0/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java
index 7eee7be..e6d3d69 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java
@@ -27,6 +27,7 @@ import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.common.params.HighlightParams;
import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.MoreLikeThisParams;
import org.apache.solr.common.params.StatsParams;
import org.apache.solr.common.params.TermsParams;
@@ -801,6 +802,253 @@ public class SolrQuery extends ModifiableSolrParams
return this;
}
+
+ /**
+ * Add field for MoreLikeThis. Automatically
+ * enables MoreLikeThis.
+ *
+ * @param field the names of the field to be added
+ * @return this
+ */
+ public SolrQuery addMoreLikeThisField(String field) {
+ this.setMoreLikeThis(true);
+ return addValueToParam(MoreLikeThisParams.SIMILARITY_FIELDS, field);
+ }
+
+ public SolrQuery setMoreLikeThisFields(String... fields) {
+ if( fields == null || fields.length == 0 ) {
+ this.remove( MoreLikeThisParams.SIMILARITY_FIELDS );
+ this.setMoreLikeThis(false);
+ return this;
+ }
+
+ StringBuilder sb = new StringBuilder();
+ sb.append(fields[0]);
+ for (int i = 1; i < fields.length; i++) {
+ sb.append(',');
+ sb.append(fields[i]);
+ }
+ this.set(MoreLikeThisParams.SIMILARITY_FIELDS, sb.toString());
+ this.setMoreLikeThis(true);
+ return this;
+ }
+
+ /**
+ * @return an array with the fields used to compute similarity.
+ */
+ public String[] getMoreLikeThisFields() {
+ String fl = this.get(MoreLikeThisParams.SIMILARITY_FIELDS);
+ if(fl==null || fl.length()==0) {
+ return null;
+ }
+ return fl.split(",");
+ }
+
+ /**
+ * Sets the frequency below which terms will be ignored in the source doc
+ *
+ * @param mintf the minimum term frequency
+ * @return this
+ */
+ public SolrQuery setMoreLikeThisMinTermFreq(int mintf) {
+ this.set(MoreLikeThisParams.MIN_TERM_FREQ, mintf);
+ return this;
+ }
+
+ /**
+ * Gets the frequency below which terms will be ignored in the source doc
+ */
+ public int getMoreLikeThisMinTermFreq() {
+ return this.getInt(MoreLikeThisParams.MIN_TERM_FREQ, 2);
+ }
+
+ /**
+ * Sets the frequency at which words will be ignored which do not occur in
+ * at least this many docs.
+ *
+ * @param mindf the minimum document frequency
+ * @return this
+ */
+ public SolrQuery setMoreLikeThisMinDocFreq(int mindf) {
+ this.set(MoreLikeThisParams.MIN_DOC_FREQ, mindf);
+ return this;
+ }
+
+ /**
+ * Gets the frequency at which words will be ignored which do not occur in
+ * at least this many docs.
+ */
+ public int getMoreLikeThisMinDocFreq() {
+ return this.getInt(MoreLikeThisParams.MIN_DOC_FREQ, 5);
+ }
+
+ /**
+ * Sets the minimum word length below which words will be ignored.
+ *
+ * @param minwl the minimum word length
+ * @return this
+ */
+ public SolrQuery setMoreLikeThisMinWordLen(int minwl) {
+ this.set(MoreLikeThisParams.MIN_WORD_LEN, minwl);
+ return this;
+ }
+
+ /**
+ * Gets the minimum word length below which words will be ignored.
+ */
+ public int getMoreLikeThisMinWordLen() {
+ return this.getInt(MoreLikeThisParams.MIN_WORD_LEN, 0);
+ }
+
+ /**
+ * Sets the maximum word length above which words will be ignored.
+ *
+ * @param maxwl the maximum word length
+ * @return this
+ */
+ public SolrQuery setMoreLikeThisMaxWordLen(int maxwl) {
+ this.set(MoreLikeThisParams.MAX_WORD_LEN, maxwl);
+ return this;
+ }
+
+ /**
+ * Gets the maximum word length above which words will be ignored.
+ */
+ public int getMoreLikeThisMaxWordLen() {
+ return this.getInt(MoreLikeThisParams.MAX_WORD_LEN, 0);
+ }
+
+ /**
+ * Sets the maximum number of query terms that will be included in any
+ * generated query.
+ *
+ * @param maxqt the maximum number of query terms
+ * @return this
+ */
+ public SolrQuery setMoreLikeThisMaxQueryTerms(int maxqt) {
+ this.set(MoreLikeThisParams.MAX_QUERY_TERMS, maxqt);
+ return this;
+ }
+
+ /**
+ * Gets the maximum number of query terms that will be included in any
+ * generated query.
+ */
+ public int getMoreLikeThisMaxQueryTerms() {
+ return this.getInt(MoreLikeThisParams.MAX_QUERY_TERMS, 25);
+ }
+
+ /**
+ * Sets the maximum number of tokens to parse in each example doc field
+ * that is not stored with TermVector support.
+ *
+ * @param maxntp the maximum number of tokens to parse
+ * @return this
+ */
+ public SolrQuery setMoreLikeThisMaxTokensParsed(int maxntp) {
+ this.set(MoreLikeThisParams.MAX_NUM_TOKENS_PARSED, maxntp);
+ return this;
+ }
+
+ /**
+ * Gets the maximum number of tokens to parse in each example doc field
+ * that is not stored with TermVector support.
+ */
+ public int getMoreLikeThisMaxTokensParsed() {
+ return this.getInt(MoreLikeThisParams.MAX_NUM_TOKENS_PARSED, 5000);
+ }
+
+ /**
+ * Sets if the query will be boosted by the interesting term relevance.
+ *
+ * @param b set to true to boost the query with the interesting term relevance
+ * @return this
+ */
+ public SolrQuery setMoreLikeThisBoost(boolean b) {
+ this.set(MoreLikeThisParams.BOOST, b);
+ return this;
+ }
+
+ /**
+ * Gets if the query will be boosted by the interesting term relevance.
+ */
+ public boolean getMoreLikeThisBoost() {
+ return this.getBool(MoreLikeThisParams.BOOST, false);
+ }
+
+ /**
+ * Sets the query fields and their boosts using the same format as that
+ * used in DisMaxQParserPlugin. These fields must also be added
+ * using {@link #addMoreLikeThisField(String)}.
+ *
+ * @param qf the query fields
+ * @return this
+ */
+ public SolrQuery setMoreLikeThisQF(String qf) {
+ this.set(MoreLikeThisParams.QF, qf);
+ return this;
+ }
+
+ /**
+ * Gets the query fields and their boosts.
+ */
+ public String getMoreLikeThisQF() {
+ return this.get(MoreLikeThisParams.QF);
+ }
+
+ /**
+ * Sets the number of similar documents to return for each result.
+ *
+ * @param count the number of similar documents to return for each result
+ * @return this
+ */
+ public SolrQuery setMoreLikeThisCount(int count) {
+ this.set(MoreLikeThisParams.DOC_COUNT, count);
+ return this;
+ }
+
+ /**
+ * Gets the number of similar documents to return for each result.
+ */
+ public int getMoreLikeThisCount() {
+ return this.getInt(MoreLikeThisParams.DOC_COUNT, MoreLikeThisParams.DEFAULT_DOC_COUNT);
+ }
+
+ /**
+ * Enable/Disable MoreLikeThis. After enabling MoreLikeThis, the fields
+ * used for computing similarity must be specified calling
+ * {@link #addMoreLikeThisField(String)}.
+ *
+ * @param b flag to indicate if MoreLikeThis should be enabled. if b==false
+ * removes all mlt.* parameters
+ * @return this
+ */
+ public SolrQuery setMoreLikeThis(boolean b) {
+ if(b) {
+ this.set(MoreLikeThisParams.MLT, true);
+ } else {
+ this.remove(MoreLikeThisParams.MLT);
+ this.remove(MoreLikeThisParams.SIMILARITY_FIELDS);
+ this.remove(MoreLikeThisParams.MIN_TERM_FREQ);
+ this.remove(MoreLikeThisParams.MIN_DOC_FREQ);
+ this.remove(MoreLikeThisParams.MIN_WORD_LEN);
+ this.remove(MoreLikeThisParams.MAX_WORD_LEN);
+ this.remove(MoreLikeThisParams.MAX_QUERY_TERMS);
+ this.remove(MoreLikeThisParams.MAX_NUM_TOKENS_PARSED);
+ this.remove(MoreLikeThisParams.BOOST);
+ this.remove(MoreLikeThisParams.QF);
+ this.remove(MoreLikeThisParams.DOC_COUNT);
+ }
+ return this;
+ }
+
+ /**
+ * @return true if MoreLikeThis is enabled, false otherwise
+ */
+ public boolean getMoreLikeThis() {
+ return this.getBool(MoreLikeThisParams.MLT, false);
+ }
+
public SolrQuery setFields(String ... fields) {
if( fields == null || fields.length == 0 ) {
this.remove( CommonParams.FL );
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2172f3e0/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java
index debb079..eb595aa 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java
@@ -51,6 +51,7 @@ public class QueryResponse extends SolrResponseBase
private Map<String,NamedList<Object>> _suggestInfo = null;
private NamedList<Object> _statsInfo = null;
private NamedList<NamedList<Number>> _termsInfo = null;
+ private NamedList<SolrDocumentList> _moreLikeThisInfo = null;
private String _cursorMarkNext = null;
// Grouping response
@@ -168,6 +169,9 @@ public class QueryResponse extends SolrResponseBase
_termsInfo = (NamedList<NamedList<Number>>) res.getVal( i );
extractTermsInfo( _termsInfo );
}
+ else if ( "moreLikeThis".equals( n ) ) {
+ _moreLikeThisInfo = (NamedList<SolrDocumentList>) res.getVal( i );
+ }
else if ( CursorMarkParams.CURSOR_MARK_NEXT.equals( n ) ) {
_cursorMarkNext = (String) res.getVal( i );
}
@@ -547,6 +551,10 @@ public class QueryResponse extends SolrResponseBase
public TermsResponse getTermsResponse() {
return _termsResponse;
}
+
+ public NamedList<SolrDocumentList> getMoreLikeThis() {
+ return _moreLikeThisInfo;
+ }
/**
* See also: {@link #getLimitingFacets()}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2172f3e0/solr/solrj/src/java/org/apache/solr/common/params/MoreLikeThisParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/MoreLikeThisParams.java b/solr/solrj/src/java/org/apache/solr/common/params/MoreLikeThisParams.java
index b41cbfd..c898fdb 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/MoreLikeThisParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/MoreLikeThisParams.java
@@ -50,6 +50,9 @@ public interface MoreLikeThisParams
// Do you want to include the original document in the results or not
public final static String INTERESTING_TERMS = PREFIX + "interestingTerms"; // false,details,(list or true)
+
+ // the default doc count
+ public final static int DEFAULT_DOC_COUNT = 5;
public enum TermStyle {
NONE,
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2172f3e0/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
index 0f91adf..88227ba 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
@@ -1996,37 +1996,38 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
// test with mlt.fl having comma separated values
SolrQuery q = new SolrQuery("*:*");
q.setRows(20);
- q.setParam("mlt", "true");
- q.setParam("mlt.mintf", "0");
- q.setParam("mlt.count", "2");
- q.setParam("mlt.fl", "x_s,y_s,z_s");
+ q.setMoreLikeThisFields("x_s", "y_s", "z_s");
+ q.setMoreLikeThisMinTermFreq(0);
+ q.setMoreLikeThisCount(2);
QueryResponse response = client.query(q);
assertEquals(20, response.getResults().getNumFound());
- NamedList<Object> moreLikeThis = (NamedList<Object>) response.getResponse().get("moreLikeThis");
+ NamedList<SolrDocumentList> moreLikeThis = response.getMoreLikeThis();
assertNotNull("MoreLikeThis response should not have been null", moreLikeThis);
for (int i=0; i<20; i++) {
String id = "testMoreLikeThis" + i;
- SolrDocumentList mltResp = (SolrDocumentList) moreLikeThis.get(id);
+ SolrDocumentList mltResp = moreLikeThis.get(id);
assertNotNull("MoreLikeThis response for id=" + id + " should not be null", mltResp);
assertTrue("MoreLikeThis response for id=" + id + " had numFound=0", mltResp.getNumFound() > 0);
+ assertTrue("MoreLikeThis response for id=" + id + " had not returned exactly 2 documents", mltResp.size() == 2);
}
// now test with multiple mlt.fl parameters
q = new SolrQuery("*:*");
q.setRows(20);
q.setParam("mlt", "true");
- q.setParam("mlt.mintf", "0");
- q.setParam("mlt.count", "2");
q.setParam("mlt.fl", "x_s", "y_s", "z_s");
+ q.setMoreLikeThisMinTermFreq(0);
+ q.setMoreLikeThisCount(2);
response = client.query(q);
assertEquals(20, response.getResults().getNumFound());
- moreLikeThis = (NamedList<Object>) response.getResponse().get("moreLikeThis");
+ moreLikeThis = response.getMoreLikeThis();
assertNotNull("MoreLikeThis response should not have been null", moreLikeThis);
for (int i=0; i<20; i++) {
String id = "testMoreLikeThis" + i;
- SolrDocumentList mltResp = (SolrDocumentList) moreLikeThis.get(id);
+ SolrDocumentList mltResp = moreLikeThis.get(id);
assertNotNull("MoreLikeThis response for id=" + id + " should not be null", mltResp);
assertTrue("MoreLikeThis response for id=" + id + " had numFound=0", mltResp.getNumFound() > 0);
+ assertTrue("MoreLikeThis response for id=" + id + " had not returned exactly 2 documents", mltResp.size() == 2);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2172f3e0/solr/solrj/src/test/org/apache/solr/client/solrj/SolrQueryTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrQueryTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrQueryTest.java
index 816a2cc..d27847f 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrQueryTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrQueryTest.java
@@ -431,4 +431,29 @@ public class SolrQueryTest extends LuceneTestCase {
assertNull(solrQuery.getParams("f.field3.facet.interval.set"));
}
+
+ public void testMoreLikeThis() {
+ SolrQuery solrQuery = new SolrQuery();
+ solrQuery.addMoreLikeThisField("mlt1");
+ assertTrue(solrQuery.getMoreLikeThis());
+
+ solrQuery.addMoreLikeThisField("mlt2");
+ solrQuery.addMoreLikeThisField("mlt3");
+ solrQuery.addMoreLikeThisField("mlt4");
+ assertEquals(4, solrQuery.getMoreLikeThisFields().length);
+ solrQuery.setMoreLikeThisFields(null);
+ assertTrue(null == solrQuery.getMoreLikeThisFields());
+ assertFalse(solrQuery.getMoreLikeThis());
+
+ assertEquals(true, solrQuery.setMoreLikeThisBoost(true).getMoreLikeThisBoost());
+ assertEquals("qf", solrQuery.setMoreLikeThisQF("qf").getMoreLikeThisQF());
+ assertEquals(10, solrQuery.setMoreLikeThisMaxTokensParsed(10).getMoreLikeThisMaxTokensParsed());
+ assertEquals(11, solrQuery.setMoreLikeThisMinTermFreq(11).getMoreLikeThisMinTermFreq());
+ assertEquals(12, solrQuery.setMoreLikeThisMinDocFreq(12).getMoreLikeThisMinDocFreq());
+ assertEquals(13, solrQuery.setMoreLikeThisMaxWordLen(13).getMoreLikeThisMaxWordLen());
+ assertEquals(14, solrQuery.setMoreLikeThisMinWordLen(14).getMoreLikeThisMinWordLen());
+ assertEquals(15, solrQuery.setMoreLikeThisMaxQueryTerms(15).getMoreLikeThisMaxQueryTerms());
+ assertEquals(16, solrQuery.setMoreLikeThisCount(16).getMoreLikeThisCount());
+
+ }
}
[46/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-2094:
XPathEntityProcessor should reinitialize the XPathRecordReader instance if
the 'forEach' or 'xpath' attributes are templates & it is not a root entity
Posted by kr...@apache.org.
SOLR-2094: XPathEntityProcessor should reinitialize the XPathRecordReader instance if the 'forEach' or 'xpath' attributes are templates & it is not a root entity
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d6b6e747
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d6b6e747
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d6b6e747
Branch: refs/heads/jira/solr-8593
Commit: d6b6e74703d5f2d29c110d3a7d9491306af9be2c
Parents: d9c4846
Author: Noble Paul <no...@apache.org>
Authored: Thu Oct 27 14:52:24 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Thu Oct 27 14:52:24 2016 +0530
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +
.../dataimport/XPathEntityProcessor.java | 12 +-
.../dataimport/MockStringDataSource.java | 54 ++++++++
.../solr/handler/dataimport/TestDocBuilder.java | 129 ++++++++++++++++---
4 files changed, 181 insertions(+), 17 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d6b6e747/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 7d9a4fa..ae1d709 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -234,6 +234,9 @@ Bug Fixes
(Cao Manh Dat, Lance Norskog, Webster Homer, hossman, yonik)
* SOLR-9692: blockUnknown property makes inter-node communication impossible (noble)
+
+* SOLR-2094: XPathEntityProcessor should reinitialize the XPathRecordReader instance if
+ the 'forEach' or 'xpath' attributes are templates & it is not a root entity (Cao Manh Dat, noble)
Optimizations
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d6b6e747/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
index 637e1ef..cc28dc4 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
@@ -85,12 +85,14 @@ public class XPathEntityProcessor extends EntityProcessorBase {
protected int blockingQueueSize = 1000;
protected Thread publisherThread;
+
+ protected boolean reinitXPathReader = true;
@Override
@SuppressWarnings("unchecked")
public void init(Context context) {
super.init(context);
- if (xpathReader == null)
+ if (reinitXPathReader)
initXpathReader(context.getVariableResolver());
pk = context.getEntityAttribute("pk");
dataSource = context.getDataSource();
@@ -99,6 +101,7 @@ public class XPathEntityProcessor extends EntityProcessorBase {
}
private void initXpathReader(VariableResolver resolver) {
+ reinitXPathReader = false;
useSolrAddXml = Boolean.parseBoolean(context
.getEntityAttribute(USE_SOLR_ADD_SCHEMA));
streamRows = Boolean.parseBoolean(context
@@ -147,11 +150,12 @@ public class XPathEntityProcessor extends EntityProcessorBase {
xpathReader.addField("name", "/add/doc/field/@name", true);
xpathReader.addField("value", "/add/doc/field", true);
} else {
- String forEachXpath = context.getEntityAttribute(FOR_EACH);
+ String forEachXpath = context.getResolvedEntityAttribute(FOR_EACH);
if (forEachXpath == null)
throw new DataImportHandlerException(SEVERE,
"Entity : " + context.getEntityAttribute("name")
+ " must have a 'forEach' attribute");
+ if (forEachXpath.equals(context.getEntityAttribute(FOR_EACH))) reinitXPathReader = true;
try {
xpathReader = new XPathRecordReader(forEachXpath);
@@ -164,6 +168,10 @@ public class XPathEntityProcessor extends EntityProcessorBase {
}
String xpath = field.get(XPATH);
xpath = context.replaceTokens(xpath);
+ //!xpath.equals(field.get(XPATH) means the field xpath has a template
+ //in that case ensure that the XPathRecordReader is reinitialized
+ //for each xml
+ if (!xpath.equals(field.get(XPATH)) && !context.isRootEntity()) reinitXPathReader = true;
xpathReader.addField(field.get(DataImporter.COLUMN),
xpath,
Boolean.parseBoolean(field.get(DataImporter.MULTI_VALUED)),
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d6b6e747/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockStringDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockStringDataSource.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockStringDataSource.java
new file mode 100644
index 0000000..7c9a6d1
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockStringDataSource.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.handler.dataimport;
+
+
+import java.io.Reader;
+import java.io.StringReader;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+public class MockStringDataSource extends DataSource<Reader> {
+
+ private static Map<String, String> cache = new HashMap<>();
+
+ public static void setData(String query,
+ String data) {
+ cache.put(query, data);
+ }
+
+ public static void clearCache() {
+ cache.clear();
+ }
+ @Override
+ public void init(Context context, Properties initProps) {
+
+ }
+
+ @Override
+ public Reader getData(String query) {
+ return new StringReader(cache.get(query));
+ }
+
+ @Override
+ public void close() {
+ cache.clear();
+
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d6b6e747/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java
index 527dad0..39dd891 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java
@@ -39,9 +39,10 @@ public class TestDocBuilder extends AbstractDataImportHandlerTestCase {
@After
public void tearDown() throws Exception {
MockDataSource.clearCache();
+ MockStringDataSource.clearCache();
super.tearDown();
}
-
+
@Test
public void loadClass() throws Exception {
@SuppressWarnings("unchecked")
@@ -180,6 +181,52 @@ public class TestDocBuilder extends AbstractDataImportHandlerTestCase {
assertEquals(3, di.getDocBuilder().importStatistics.rowsCount.get());
}
+ @Test
+ public void templateXPath() {
+ DataImporter di = new DataImporter();
+ di.loadAndInit(dc_variableXpath);
+ DIHConfiguration cfg = di.getConfig();
+
+ RequestInfo rp = new RequestInfo(null, createMap("command", "full-import"), null);
+ List<Map<String, Object>> l = new ArrayList<>();
+ l.add(createMap("id", 1, "name", "iphone", "manufacturer", "Apple"));
+ l.add(createMap("id", 2, "name", "ipad", "manufacturer", "Apple"));
+ l.add(createMap("id", 3, "name", "pixel", "manufacturer", "Google"));
+
+ MockDataSource.setIterator("select * from x", l.iterator());
+
+ List<Map<String,Object>> nestedData = new ArrayList<>();
+ nestedData.add(createMap("founded", "Cupertino, California, U.S", "year", "1976", "year2", "1976"));
+ nestedData.add(createMap("founded", "Cupertino, California, U.S", "year", "1976", "year2", "1976"));
+ nestedData.add(createMap("founded", "Menlo Park, California, U.S", "year", "1998", "year2", "1998"));
+
+ MockStringDataSource.setData("companies.xml", xml_attrVariableXpath);
+ MockStringDataSource.setData("companies2.xml", xml_variableXpath);
+ MockStringDataSource.setData("companies3.xml", xml_variableForEach);
+
+ SolrWriterImpl swi = new SolrWriterImpl();
+ di.runCmd(rp, swi);
+ assertEquals(Boolean.TRUE, swi.deleteAllCalled);
+ assertEquals(Boolean.TRUE, swi.commitCalled);
+ assertEquals(Boolean.TRUE, swi.finishCalled);
+ assertEquals(3, swi.docs.size());
+ for (int i = 0; i < l.size(); i++) {
+ SolrInputDocument doc = swi.docs.get(i);
+
+ Map<String, Object> map = l.get(i);
+ for (Map.Entry<String, Object> entry : map.entrySet()) {
+ assertEquals(entry.getValue(), doc.getFieldValue(entry.getKey()));
+ }
+
+ map = nestedData.get(i);
+ for (Map.Entry<String, Object> entry : map.entrySet()) {
+ assertEquals(entry.getValue(), doc.getFieldValue(entry.getKey()));
+ }
+ }
+ assertEquals(1, di.getDocBuilder().importStatistics.queryCount.get());
+ assertEquals(3, di.getDocBuilder().importStatistics.docCount.get());
+ }
+
static class SolrWriterImpl extends SolrWriter {
List<SolrInputDocument> docs = new ArrayList<>();
@@ -215,21 +262,73 @@ public class TestDocBuilder extends AbstractDataImportHandlerTestCase {
}
public static final String dc_singleEntity = "<dataConfig>\n"
- + "<dataSource type=\"MockDataSource\"/>\n"
- + " <document name=\"X\" >\n"
- + " <entity name=\"x\" query=\"select * from x\">\n"
- + " <field column=\"id\"/>\n"
- + " <field column=\"desc\"/>\n"
- + " <field column=\"desc\" name=\"desc_s\" />" + " </entity>\n"
- + " </document>\n" + "</dataConfig>";
+ + "<dataSource type=\"MockDataSource\"/>\n"
+ + " <document name=\"X\" >\n"
+ + " <entity name=\"x\" query=\"select * from x\">\n"
+ + " <field column=\"id\"/>\n"
+ + " <field column=\"desc\"/>\n"
+ + " <field column=\"desc\" name=\"desc_s\" />" + " </entity>\n"
+ + " </document>\n" + "</dataConfig>";
public static final String dc_deltaConfig = "<dataConfig>\n"
- + "<dataSource type=\"MockDataSource\"/>\n"
- + " <document name=\"X\" >\n"
- + " <entity name=\"x\" query=\"select * from x\" deltaQuery=\"select id from x\">\n"
- + " <field column=\"id\"/>\n"
- + " <field column=\"desc\"/>\n"
- + " <field column=\"desc\" name=\"desc_s\" />" + " </entity>\n"
- + " </document>\n" + "</dataConfig>";
+ + "<dataSource type=\"MockDataSource\"/>\n"
+ + " <document name=\"X\" >\n"
+ + " <entity name=\"x\" query=\"select * from x\" deltaQuery=\"select id from x\">\n"
+ + " <field column=\"id\"/>\n"
+ + " <field column=\"desc\"/>\n"
+ + " <field column=\"desc\" name=\"desc_s\" />" + " </entity>\n"
+ + " </document>\n" + "</dataConfig>";
+
+ public static final String dc_variableXpath = "<dataConfig>\n"
+ + "<dataSource type=\"MockDataSource\"/>\n"
+ + "<dataSource name=\"xml\" type=\"MockStringDataSource\"/>\n"
+ + " <document name=\"X\" >\n"
+ + " <entity name=\"x\" query=\"select * from x\">\n"
+ + " <field column=\"id\"/>\n"
+ + " <field column=\"name\"/>\n"
+ + " <field column=\"manufacturer\"/>"
+ + " <entity name=\"c1\" url=\"companies.xml\" dataSource=\"xml\" forEach=\"/companies/company\" processor=\"XPathEntityProcessor\">"
+ + " <field column=\"year\" xpath=\"/companies/company/year[@name='p_${x.manufacturer}_s']\" />"
+ + " </entity>"
+ + " <entity name=\"c2\" url=\"companies2.xml\" dataSource=\"xml\" forEach=\"/companies/company\" processor=\"XPathEntityProcessor\">"
+ + " <field column=\"founded\" xpath=\"/companies/company/p_${x.manufacturer}_s/founded\" />"
+ + " </entity>"
+ + " <entity name=\"c3\" url=\"companies3.xml\" dataSource=\"xml\" forEach=\"/companies/${x.manufacturer}\" processor=\"XPathEntityProcessor\">"
+ + " <field column=\"year2\" xpath=\"/companies/${x.manufacturer}/year\" />"
+ + " </entity>"
+ + " </entity>\n"
+ + " </document>\n" + "</dataConfig>";
+
+
+ public static final String xml_variableForEach = "<companies>\n" +
+ "\t<Apple>\n" +
+ "\t\t<year>1976</year>\n" +
+ "\t</Apple>\n" +
+ "\t<Google>\n" +
+ "\t\t<year>1998</year>\n" +
+ "\t</Google>\n" +
+ "</companies>";
+
+ public static final String xml_variableXpath = "<companies>\n" +
+ "\t<company>\n" +
+ "\t\t<p_Apple_s>\n" +
+ "\t\t\t<founded>Cupertino, California, U.S</founded>\n" +
+ "\t\t</p_Apple_s>\t\t\n" +
+ "\t</company>\n" +
+ "\t<company>\n" +
+ "\t\t<p_Google_s>\n" +
+ "\t\t\t<founded>Menlo Park, California, U.S</founded>\n" +
+ "\t\t</p_Google_s>\n" +
+ "\t</company>\n" +
+ "</companies>";
+
+ public static final String xml_attrVariableXpath = "<companies>\n" +
+ "\t<company>\n" +
+ "\t\t<year name='p_Apple_s'>1976</year>\n" +
+ "\t</company>\n" +
+ "\t<company>\n" +
+ "\t\t<year name='p_Google_s'>1998</year>\t\t\n" +
+ "\t</company>\n" +
+ "</companies>";
}
[10/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9634: correct name
of deprecated/removed method in solr/CHANGES.txt
Posted by kr...@apache.org.
SOLR-9634: correct name of deprecated/removed method in solr/CHANGES.txt
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/37871de2
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/37871de2
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/37871de2
Branch: refs/heads/jira/solr-8593
Commit: 37871de29bc5bd329eeb2f6867f3f8ca3b96e84f
Parents: 97339e2
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Oct 24 18:58:26 2016 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Oct 24 18:58:26 2016 +0100
----------------------------------------------------------------------
solr/CHANGES.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37871de2/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index e223b4d..3bb28c4 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -98,7 +98,7 @@ Upgrade Notes
* The create/deleteCollection methods on MiniSolrCloudCluster have been
deprecated. Clients should instead use the CollectionAdminRequest API. In
- addition, MiniSolrCloudCluster#uploadConfigSet(File, String) has been
+ addition, MiniSolrCloudCluster#uploadConfigDir(File, String) has been
deprecated in favour of #uploadConfigSet(Path, String)
* The bin/solr.in.sh (bin/solr.in.cmd on Windows) is now completely commented by default. Previously, this wasn't so,
[27/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-2212: Add a factory
class corresponding to Lucene's NoMergePolicy
Posted by kr...@apache.org.
SOLR-2212: Add a factory class corresponding to Lucene's NoMergePolicy
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/768c7e26
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/768c7e26
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/768c7e26
Branch: refs/heads/jira/solr-8593
Commit: 768c7e2648557d10f231f49a7c76eb040cbbcb0e
Parents: b8d9647
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Wed Oct 26 11:28:53 2016 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Wed Oct 26 11:28:53 2016 +0530
----------------------------------------------------------------------
solr/CHANGES.txt | 2 ++
.../apache/solr/index/NoMergePolicyFactory.java | 34 ++++++++++++++++++++
.../conf/solrconfig-nomergepolicyfactory.xml | 32 ++++++++++++++++++
.../apache/solr/core/TestMergePolicyConfig.java | 20 ++++++++++++
4 files changed, 88 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/768c7e26/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 9dfed73..ba680a1 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -164,6 +164,8 @@ New Features
SOLR_HOME on every node. Editing config through API is supported but affects only that one node.
(janhoy)
+* SOLR-2212: Add a factory class corresponding to Lucene's NoMergePolicy. (Lance Norskog, Cao Manh Dat via shalin)
+
Bug Fixes
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/768c7e26/solr/core/src/java/org/apache/solr/index/NoMergePolicyFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/index/NoMergePolicyFactory.java b/solr/core/src/java/org/apache/solr/index/NoMergePolicyFactory.java
new file mode 100644
index 0000000..66fa18e
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/index/NoMergePolicyFactory.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.index;
+
+import org.apache.lucene.index.MergePolicy;
+import org.apache.lucene.index.NoMergePolicy;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.schema.IndexSchema;
+
+public class NoMergePolicyFactory extends SimpleMergePolicyFactory {
+ public NoMergePolicyFactory(SolrResourceLoader resourceLoader, MergePolicyFactoryArgs args, IndexSchema schema) {
+ super(resourceLoader, args, schema);
+ }
+
+ @Override
+ protected MergePolicy getMergePolicyInstance() {
+ return NoMergePolicy.INSTANCE;
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/768c7e26/solr/core/src/test-files/solr/collection1/conf/solrconfig-nomergepolicyfactory.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-nomergepolicyfactory.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-nomergepolicyfactory.xml
new file mode 100644
index 0000000..a9e3801
--- /dev/null
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-nomergepolicyfactory.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" ?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<config>
+ <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
+ <directoryFactory name="DirectoryFactory" class="${solr.directoryFactory:solr.RAMDirectoryFactory}"/>
+ <schemaFactory class="ClassicIndexSchemaFactory"/>
+
+ <indexConfig>
+ <useCompoundFile>${useCompoundFile:false}</useCompoundFile>
+ <mergePolicyFactory class="org.apache.solr.index.NoMergePolicyFactory" />
+ </indexConfig>
+
+ <requestHandler name="standard" class="solr.StandardRequestHandler"></requestHandler>
+
+</config>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/768c7e26/solr/core/src/test/org/apache/solr/core/TestMergePolicyConfig.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestMergePolicyConfig.java b/solr/core/src/test/org/apache/solr/core/TestMergePolicyConfig.java
index fd13a8e..f8e232a 100644
--- a/solr/core/src/test/org/apache/solr/core/TestMergePolicyConfig.java
+++ b/solr/core/src/test/org/apache/solr/core/TestMergePolicyConfig.java
@@ -24,6 +24,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.LogByteSizeMergePolicy;
import org.apache.lucene.index.LogDocMergePolicy;
import org.apache.lucene.index.LogMergePolicy;
+import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.SegmentReader;
import org.apache.lucene.index.TieredMergePolicy;
import org.apache.solr.SolrTestCaseJ4;
@@ -128,6 +129,25 @@ public class TestMergePolicyConfig extends SolrTestCaseJ4 {
assertCompoundSegments(h.getCore(), false);
}
+ public void testNoMergePolicyFactoryConfig() throws Exception {
+ initCore("solrconfig-nomergepolicyfactory.xml","schema-minimal.xml");
+ IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore());
+ NoMergePolicy mergePolicy = assertAndCast(NoMergePolicy.class,
+ iwc.getMergePolicy());
+
+ assertCommitSomeNewDocs();
+
+ assertCommitSomeNewDocs();
+ assertNumSegments(h.getCore(), 2);
+
+ assertU(optimize());
+ assertNumSegments(h.getCore(), 2);
+ deleteCore();
+ initCore("solrconfig-nomergepolicyfactory.xml","schema-minimal.xml");
+ iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore());
+ assertEquals(mergePolicy, iwc.getMergePolicy());
+ }
+
public void testLogMergePolicyConfig() throws Exception {
final Class<? extends LogMergePolicy> mpClass = random().nextBoolean()
[40/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9654: tests: fix
overrequest test by fixing doc->shard mapping
Posted by kr...@apache.org.
SOLR-9654: tests: fix overrequest test by fixing doc->shard mapping
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/53507b4e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/53507b4e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/53507b4e
Branch: refs/heads/jira/solr-8593
Commit: 53507b4e79573af3fdc64207b9e47d62f7c99028
Parents: e448360
Author: yonik <yo...@apache.org>
Authored: Wed Oct 26 18:35:09 2016 -0400
Committer: yonik <yo...@apache.org>
Committed: Wed Oct 26 18:35:09 2016 -0400
----------------------------------------------------------------------
.../solr/search/facet/TestJsonFacets.java | 94 ++++++++++++--------
.../java/org/apache/solr/SolrTestCaseHS.java | 1 +
2 files changed, 59 insertions(+), 36 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/53507b4e/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
index c8ed0e6..5527a3d 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
@@ -27,6 +27,7 @@ import java.util.Random;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import com.tdunning.math.stats.AVLTreeDigest;
+import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.common.SolrException;
import org.apache.solr.util.hll.HLL;
import org.apache.lucene.util.LuceneTestCase;
@@ -1146,47 +1147,68 @@ public class TestJsonFacets extends SolrTestCaseHS {
"} "
);
+ client.testJQ(params(p, "q", "*:*"
+ , "json.facet", "{" +
+ // "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0}" + // overrequest=0 test needs predictable layout
+ "cat1:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:1}" +
+ ",catDef:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:-1}" + // -1 is default overrequest
+ ",catBig:{type:terms, field:${cat_s}, sort:'count desc', offset:1, limit:2147483647, overrequest:2147483647}" + // make sure overflows don't mess us up
+ "}"
+ )
+ , "facets=={ count:6" +
+ // ", cat0:{ buckets:[ {val:B,count:3} ] }"
+ ", cat1:{ buckets:[ {val:B,count:3} ] }" +
+ ", catDef:{ buckets:[ {val:B,count:3} ] }" +
+ ", catBig:{ buckets:[ {val:A,count:2} ] }" +
+ "}"
+ );
-/*
- if (!client.local()) {
- client.testJQ(params(p, "q", "*:*"
- , "json.facet", "{" +
- "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0}" +
- ",cat1:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:1}" +
- ",catDef:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:-1}" + // -1 is default overrequest
- ",catBig:{type:terms, field:${cat_s}, sort:'count desc', offset:1, limit:2147483647, overrequest:2147483647}" + // make sure overflows don't mess us up
- "}"
- )
- , "facets=={ count:6" +
- ", cat0:{ buckets:[ {val:A,count:2} ] }" + // with no overrequest, we incorrectly conclude that A is the top bucket
- ", cat1:{ buckets:[ {val:B,count:3} ] }" +
- ", catDef:{ buckets:[ {val:B,count:3} ] }" +
- ", catBig:{ buckets:[ {val:A,count:2} ] }" +
- "}"
- );
- } else {
- // In non-distrib mode, should still be able to specify overrequest, but it shouldn't matter.
- client.testJQ(params(p, "q", "*:*"
- , "json.facet", "{" +
- "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0}" +
- ",cat1:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:1}" +
- ",catDef:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:-1}" + // -1 is default overrequest
- ",catBig:{type:terms, field:${cat_s}, sort:'count desc', offset:1, limit:2147483647, overrequest:2147483647}" + // make sure overflows don't mess us up
- "}"
- )
- , "facets=={ count:6" +
- ", cat0:{ buckets:[ {val:B,count:3} ] }" + // only change from distrib-mode test above
- ", cat1:{ buckets:[ {val:B,count:3} ] }" +
- ", catDef:{ buckets:[ {val:B,count:3} ] }" +
- ", catBig:{ buckets:[ {val:A,count:2} ] }" +
- "}"
- );
- }
-*/
}
+ @Test
+ public void testOverrequest() throws Exception {
+ initServers();
+ Client client = servers.getClient(random().nextInt());
+ client.queryDefaults().set( "shards", servers.getShards(), "debugQuery", Boolean.toString(random().nextBoolean()) );
+
+ List<SolrClient> clients = client.getClientProvider().all();
+ assertTrue(clients.size() >= 3);
+
+ client.deleteByQuery("*:*", null);
+
+ ModifiableSolrParams p = params("cat_s", "cat_s");
+ String cat_s = p.get("cat_s");
+
+ clients.get(0).add( sdoc("id", "1", cat_s, "A") ); // A will win tiebreak
+ clients.get(0).add( sdoc("id", "2", cat_s, "B") );
+
+ clients.get(1).add( sdoc("id", "3", cat_s, "B") );
+ clients.get(1).add( sdoc("id", "4", cat_s, "A") ); // A will win tiebreak
+ clients.get(2).add( sdoc("id", "5", cat_s, "B") );
+ clients.get(2).add( sdoc("id", "6", cat_s, "B") );
+
+ client.commit();
+
+ // Shard responses should be A=1, A=1, B=2, merged should be "A=2, B=2" hence A wins tiebreak
+
+ client.testJQ(params(p, "q", "*:*",
+ "json.facet", "{" +
+ "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0}" +
+ ",cat1:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:1}" +
+ ",catDef:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:-1}" + // -1 is default overrequest
+ ",catBig:{type:terms, field:${cat_s}, sort:'count desc', offset:1, limit:2147483647, overrequest:2147483647}" + // make sure overflows don't mess us up
+ "}"
+ )
+ , "facets=={ count:6" +
+ ", cat0:{ buckets:[ {val:A,count:2} ] }" + // with no overrequest, we incorrectly conclude that A is the top bucket
+ ", cat1:{ buckets:[ {val:B,count:4} ] }" +
+ ", catDef:{ buckets:[ {val:B,count:4} ] }" +
+ ", catBig:{ buckets:[ {val:A,count:2} ] }" +
+ "}"
+ );
+ }
@Test
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/53507b4e/solr/test-framework/src/java/org/apache/solr/SolrTestCaseHS.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseHS.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseHS.java
index 118c194..aba2603 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseHS.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseHS.java
@@ -285,6 +285,7 @@ public class SolrTestCaseHS extends SolrTestCaseJ4 {
public boolean local() {
return provider == null;
}
+ public ClientProvider getClientProvider() { return provider; }
public void testJQ(SolrParams args, String... tests) throws Exception {
if (queryDefaults != null) {
[38/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9481: Fix precommit
test, unused import
Posted by kr...@apache.org.
SOLR-9481: Fix precommit test, unused import
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3d21029b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3d21029b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3d21029b
Branch: refs/heads/jira/solr-8593
Commit: 3d21029b334a498d59799b167e5278acc6013636
Parents: 7794fbd
Author: Jan H�ydahl <ja...@apache.org>
Authored: Wed Oct 26 23:16:22 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Wed Oct 26 23:16:22 2016 +0200
----------------------------------------------------------------------
.../org/apache/solr/handler/admin/SecurityConfHandlerLocal.java | 1 -
1 file changed, 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3d21029b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
index 985a070..34a635f 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
@@ -29,7 +29,6 @@ import java.util.Collections;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.Utils;
import org.apache.solr.core.CoreContainer;
-import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.util.CommandOperation;
import org.slf4j.Logger;
[45/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9692: blockUnknown
property makes inter-node communication impossible
Posted by kr...@apache.org.
SOLR-9692: blockUnknown property makes inter-node communication impossible
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d9c4846e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d9c4846e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d9c4846e
Branch: refs/heads/jira/solr-8593
Commit: d9c4846ee17c18114843e01a6319b31033a4d3a9
Parents: 7b3d29d
Author: Noble Paul <no...@apache.org>
Authored: Thu Oct 27 14:35:18 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Thu Oct 27 14:35:18 2016 +0530
----------------------------------------------------------------------
solr/CHANGES.txt | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d9c4846e/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 0c3ec3a..7d9a4fa 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -232,6 +232,8 @@ Bug Fixes
* SOLR-4164: group.limit=-1 was not supported for grouping in distributed mode.
(Cao Manh Dat, Lance Norskog, Webster Homer, hossman, yonik)
+
+* SOLR-9692: blockUnknown property makes inter-node communication impossible (noble)
Optimizations
----------------------
@@ -396,7 +398,7 @@ Bug Fixes
* SOLR-9389: HDFS Transaction logs stay open for writes which leaks Xceivers. (Tim Owen via Mark Miller)
-* SOLR-9692: blockUnknown property makes inter-node communication impossible (noble)
+* SOLR-9188: blockUnknown property makes inter-node communication impossible (noble)
* SOLR-9455: Deleting a sub-shard in recovery state can mark parent shard as inactive. (shalin)
[44/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9533,
SOLR-9559: Undate CHANGES.txt
Posted by kr...@apache.org.
SOLR-9533, SOLR-9559: Undate CHANGES.txt
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/7b3d29dd
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/7b3d29dd
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/7b3d29dd
Branch: refs/heads/jira/solr-8593
Commit: 7b3d29dda77404b9d2772c0df4bc2fd4d600ed5e
Parents: d215f9e
Author: Joel Bernstein <jb...@apache.org>
Authored: Wed Oct 26 23:11:20 2016 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Wed Oct 26 23:11:20 2016 -0400
----------------------------------------------------------------------
solr/CHANGES.txt | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7b3d29dd/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 5d0af5c..0c3ec3a 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -166,7 +166,9 @@ New Features
* SOLR-2212: Add a factory class corresponding to Lucene's NoMergePolicy. (Lance Norskog, Cao Manh Dat via shalin)
-* SOLR-9670: Support SOLR_AUTHENTICATION_OPTS in solr.cmd (janhoy)
+* SOLR-9670: Support SOLR_AUTHENTICATION_OPTS in solr.cmd (janhoy)
+
+* SOLR-9559: Add ExecutorStream to execute stored Streaming Expressions (Joel Bernstein)
Bug Fixes
----------------------
@@ -370,6 +372,8 @@ Other Changes
so that killing a shard in one collection does not result in leader election in a different collection.
See SOLR-5243 for the related bug. (Cao Manh Dat via shalin)
+* SOLR-9533: Reload core config when a core is reloaded (Gethin James, Joel Bernstein)
+
================== 6.2.1 ==================
Bug Fixes
[09/50] [abbrv] lucene-solr:jira/solr-8593: LUCENE-7462: Fix
LegacySortedSetDocValuesWrapper to reset `upTo` when calling `advanceExact`.
Posted by kr...@apache.org.
LUCENE-7462: Fix LegacySortedSetDocValuesWrapper to reset `upTo` when calling `advanceExact`.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/97339e2c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/97339e2c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/97339e2c
Branch: refs/heads/jira/solr-8593
Commit: 97339e2cacc308c3689d1cd16dfbc44ebea60788
Parents: e1b0693
Author: Adrien Grand <jp...@gmail.com>
Authored: Mon Oct 24 15:43:21 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Mon Oct 24 15:44:40 2016 +0200
----------------------------------------------------------------------
.../apache/lucene/index/LegacySortedNumericDocValuesWrapper.java | 1 +
1 file changed, 1 insertion(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/97339e2c/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java b/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
index cfb61e3..a75274e 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
@@ -77,6 +77,7 @@ public final class LegacySortedNumericDocValuesWrapper extends SortedNumericDocV
public boolean advanceExact(int target) throws IOException {
docID = target;
values.setDocument(docID);
+ upto = 0;
return values.count() != 0;
}
[11/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9654: add
overrequest param to JSON Facet API
Posted by kr...@apache.org.
SOLR-9654: add overrequest param to JSON Facet API
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4a851637
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4a851637
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4a851637
Branch: refs/heads/jira/solr-8593
Commit: 4a85163754e16b466cb4ef3dd0de92fe7d5b87d1
Parents: 37871de
Author: yonik <yo...@apache.org>
Authored: Mon Oct 24 14:23:12 2016 -0400
Committer: yonik <yo...@apache.org>
Committed: Mon Oct 24 14:23:12 2016 -0400
----------------------------------------------------------------------
solr/CHANGES.txt | 3 ++
.../apache/solr/search/facet/FacetField.java | 1 +
.../solr/search/facet/FacetFieldMerger.java | 8 ++---
.../solr/search/facet/FacetFieldProcessor.java | 22 +++++++++---
.../apache/solr/search/facet/FacetRequest.java | 1 +
.../solr/search/facet/TestJsonFacets.java | 37 ++++++++++++++++++++
6 files changed, 63 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a851637/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 3bb28c4..4355b80 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -157,6 +157,9 @@ New Features
* SOLR-9662: New parameter -u <user:pass> in bin/post to pass basicauth credentials (janhoy)
+* SOLR-9654: Add "overrequest" parameter to JSON Facet API to control amount of overrequest
+ on a distributed terms facet. (yonik)
+
Bug Fixes
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a851637/solr/core/src/java/org/apache/solr/search/facet/FacetField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetField.java b/solr/core/src/java/org/apache/solr/search/facet/FacetField.java
index 3f8cb0b..c2cf0c2 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetField.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetField.java
@@ -29,6 +29,7 @@ import org.apache.solr.schema.SchemaField;
abstract class FacetRequestSorted extends FacetRequest {
long offset;
long limit;
+ int overrequest = -1; // Number of buckets to request beyond the limit to do internally during distributed search. -1 means default.
long mincount;
String sortVariable;
SortDirection sortDirection;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a851637/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
index 432e1a7..9f99919 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
@@ -110,11 +110,11 @@ public class FacetFieldMerger extends FacetRequestSortedMerger<FacetField> {
sortBuckets();
- int first = (int)freq.offset;
- int end = freq.limit >=0 ? first + (int) freq.limit : Integer.MAX_VALUE;
- int last = Math.min(sortedBuckets.size(), end);
+ long first = freq.offset;
+ long end = freq.limit >=0 ? first + (int) freq.limit : Integer.MAX_VALUE;
+ long last = Math.min(sortedBuckets.size(), end);
- List<SimpleOrderedMap> resultBuckets = new ArrayList<>(Math.max(0, (last - first)));
+ List<SimpleOrderedMap> resultBuckets = new ArrayList<>(Math.max(0, (int)(last - first)));
/** this only works if there are no filters (like mincount)
for (int i=first; i<last; i++) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a851637/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
index 3c1a40c..bbc782c 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
@@ -212,12 +212,24 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
}
final int off = fcontext.isShard() ? 0 : (int) freq.offset;
- // add a modest amount of over-request if this is a shard request
- final int lim = freq.limit >= 0 ? (fcontext.isShard() ? (int)(freq.limit*1.1+4) : (int)freq.limit) : Integer.MAX_VALUE;
+
+ long effectiveLimit = Integer.MAX_VALUE; // use max-int instead of max-long to avoid overflow
+ if (freq.limit >= 0) {
+ effectiveLimit = freq.limit;
+ if (fcontext.isShard()) {
+ // add over-request if this is a shard request
+ if (freq.overrequest == -1) {
+ effectiveLimit = (long) (effectiveLimit*1.1+4); // default: add 10% plus 4 (to overrequest for very small limits)
+ } else {
+ effectiveLimit += freq.overrequest;
+ }
+ }
+ }
+
final int sortMul = freq.sortDirection.getMultiplier();
- int maxTopVals = (int) (lim >= 0 ? (long) off + lim : Integer.MAX_VALUE - 1);
+ int maxTopVals = (int) (effectiveLimit >= 0 ? Math.min(off + effectiveLimit, Integer.MAX_VALUE - 1) : Integer.MAX_VALUE - 1);
maxTopVals = Math.min(maxTopVals, slotCardinality);
final SlotAcc sortAcc = this.sortAcc, indexOrderAcc = this.indexOrderAcc;
final BiPredicate<Slot,Slot> orderPredicate;
@@ -258,7 +270,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
bottom.slot = slotNum;
bottom = queue.updateTop();
}
- } else if (lim > 0) {
+ } else if (effectiveLimit > 0) {
// queue not full
Slot s = new Slot();
s.slot = slotNum;
@@ -304,7 +316,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
// if we are deep paging, we don't have to order the highest "offset" counts.
int collectCount = Math.max(0, queue.size() - off);
- assert collectCount <= lim;
+ assert collectCount <= effectiveLimit;
int[] sortedSlots = new int[collectCount];
for (int i = collectCount - 1; i >= 0; i--) {
sortedSlots[i] = queue.pop().slot;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a851637/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
index 76d7d2a..40ca686 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
@@ -586,6 +586,7 @@ class FacetFieldParser extends FacetParser<FacetField> {
facet.field = getField(m);
facet.offset = getLong(m, "offset", facet.offset);
facet.limit = getLong(m, "limit", facet.limit);
+ facet.overrequest = (int) getLong(m, "overrequest", facet.overrequest);
if (facet.limit == 0) facet.offset = 0; // normalize. an offset with a limit of non-zero isn't useful.
facet.mincount = getLong(m, "mincount", facet.mincount);
facet.missing = getBoolean(m, "missing", facet.missing);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a851637/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
index c83d308..0ec0be4 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
@@ -1147,6 +1147,43 @@ public class TestJsonFacets extends SolrTestCaseHS {
);
+
+ if (!client.local()) {
+ client.testJQ(params(p, "q", "*:*"
+ , "json.facet", "{" +
+ "cat0:{type:terms, field:${cat_s}, limit:1, overrequest:0}" +
+ ",cat1:{type:terms, field:${cat_s}, limit:1, overrequest:1}" +
+ ",catDef:{type:terms, field:${cat_s}, limit:1, overrequest:-1}" + // -1 is default overrequest
+ ",catBig:{type:terms, field:${cat_s}, offset:1, limit:2147483647, overrequest:2147483647}" + // make sure overflows don't mess us up
+ "}"
+ )
+ , "facets=={ count:6" +
+ ", cat0:{ buckets:[ {val:A,count:2} ] }" + // with no overrequest, we incorrectly conclude that A is the top bucket
+ ", cat1:{ buckets:[ {val:B,count:3} ] }" +
+ ", catDef:{ buckets:[ {val:B,count:3} ] }" +
+ ", catBig:{ buckets:[ {val:A,count:2} ] }" +
+ "}"
+ );
+ } else {
+ // In non-distrib mode, should still be able to specify overrequest, but it shouldn't matter.
+ client.testJQ(params(p, "q", "*:*"
+ , "json.facet", "{" +
+ "cat0:{type:terms, field:${cat_s}, limit:1, overrequest:0}" +
+ ",cat1:{type:terms, field:${cat_s}, limit:1, overrequest:1}" +
+ ",catDef:{type:terms, field:${cat_s}, limit:1, overrequest:-1}" + // -1 is default overrequest
+ ",catBig:{type:terms, field:${cat_s}, offset:1, limit:2147483647, overrequest:2147483647}" + // make sure overflows don't mess us up
+ "}"
+ )
+ , "facets=={ count:6" +
+ ", cat0:{ buckets:[ {val:B,count:3} ] }" + // only change from distrib-mode test above
+ ", cat1:{ buckets:[ {val:B,count:3} ] }" +
+ ", catDef:{ buckets:[ {val:B,count:3} ] }" +
+ ", catBig:{ buckets:[ {val:A,count:2} ] }" +
+ "}"
+ );
+ }
+
+
}
[04/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9506: cache
IndexFingerprint for each segment
Posted by kr...@apache.org.
SOLR-9506: cache IndexFingerprint for each segment
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/184b0f22
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/184b0f22
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/184b0f22
Branch: refs/heads/jira/solr-8593
Commit: 184b0f221559eaed5f273b1907e8af07bc95fec9
Parents: 9b49c72
Author: Noble Paul <no...@apache.org>
Authored: Mon Oct 24 16:45:42 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Mon Oct 24 16:45:42 2016 +0530
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +
.../src/java/org/apache/solr/core/SolrCore.java | 40 +++++++
.../apache/solr/search/SolrIndexSearcher.java | 61 ++++++++---
.../apache/solr/update/IndexFingerprint.java | 78 ++++++++------
.../cloud/LeaderFailureAfterFreshStartTest.java | 32 ------
.../solr/cloud/PeerSyncReplicationTest.java | 2 +-
.../org/apache/solr/update/PeerSyncTest.java | 4 +-
...PeerSyncWithIndexFingerprintCachingTest.java | 108 +++++++++++++++++++
.../solr/cloud/AbstractDistribZkTestBase.java | 31 ++++++
9 files changed, 279 insertions(+), 80 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 92a994f..f455002 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -229,6 +229,9 @@ Optimizations
* SOLR-9546: Eliminate unnecessary boxing/unboxing going on in SolrParams (Pushkar Raste, noble)
+* SOLR-9506: cache IndexFingerprint for each segment (Pushkar Raste, yonik, noble)
+
+
Other Changes
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 7ba15af..a2dc1c4 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -52,12 +52,14 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantLock;
+import com.google.common.collect.MapMaker;
import org.apache.commons.io.FileUtils;
import org.apache.lucene.analysis.util.ResourceLoader;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexDeletionPolicy;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
@@ -127,6 +129,7 @@ import org.apache.solr.search.stats.LocalStatsCache;
import org.apache.solr.search.stats.StatsCache;
import org.apache.solr.update.DefaultSolrCoreState;
import org.apache.solr.update.DirectUpdateHandler2;
+import org.apache.solr.update.IndexFingerprint;
import org.apache.solr.update.SolrCoreState;
import org.apache.solr.update.SolrCoreState.IndexWriterCloser;
import org.apache.solr.update.SolrIndexWriter;
@@ -201,6 +204,8 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
public Date getStartTimeStamp() { return startTime; }
+ private final Map<Object, IndexFingerprint> perSegmentFingerprintCache = new MapMaker().weakKeys().makeMap();
+
public long getStartNanoTime() {
return startNanoTime;
}
@@ -1589,6 +1594,41 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
}
/**
+ * Computes fingerprint of a segment and caches it only if all the version in segment are included in the fingerprint.
+ * We can't use computeIfAbsent as caching is conditional (as described above)
+ * There is chance that two threads may compute fingerprint on the same segment. It might be OK to do so rather than locking entire map.
+ *
+ * @param searcher searcher that includes specified LeaderReaderContext
+ * @param ctx LeafReaderContext of a segment to compute fingerprint of
+ * @param maxVersion maximum version number to consider for fingerprint computation
+ * @return IndexFingerprint of the segment
+ * @throws IOException Can throw IOException
+ */
+ public IndexFingerprint getIndexFingerprint(SolrIndexSearcher searcher, LeafReaderContext ctx, long maxVersion)
+ throws IOException {
+ IndexFingerprint f = null;
+ f = perSegmentFingerprintCache.get(ctx.reader().getCoreCacheKey());
+ // fingerprint is either not cached or
+ // if we want fingerprint only up to a version less than maxVersionEncountered in the segment, or
+ // documents were deleted from segment for which fingerprint was cached
+ //
+ if (f == null || (f.getMaxInHash() > maxVersion) || (f.getNumDocs() != ctx.reader().numDocs())) {
+ log.debug("IndexFingerprint cache miss for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
+ f = IndexFingerprint.getFingerprint(searcher, ctx, maxVersion);
+ // cache fingerprint for the segment only if all the versions in the segment are included in the fingerprint
+ if (f.getMaxVersionEncountered() == f.getMaxInHash()) {
+ log.info("Caching fingerprint for searcher:{} leafReaderContext:{} mavVersion:{}", searcher, ctx, maxVersion);
+ perSegmentFingerprintCache.put(ctx.reader().getCoreCacheKey(), f);
+ }
+
+ } else {
+ log.debug("IndexFingerprint cache hit for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
+ }
+ log.debug("Cache Size: {}, Segments Size:{}", perSegmentFingerprintCache.size(), searcher.getTopReaderContext().leaves().size());
+ return f;
+ }
+
+ /**
* Returns the current registered searcher with its reference count incremented, or null if none are registered.
*/
public RefCounted<SolrIndexSearcher> getRegisteredSearcher() {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 933477b..d9364ca 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -33,7 +33,7 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
+
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
@@ -42,22 +42,50 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.DocumentStoredFieldVisitor;
import org.apache.lucene.document.LazyDocument;
import org.apache.lucene.index.*;
-import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.StoredFieldVisitor.Status;
-import org.apache.lucene.search.*;
+import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.CollectionStatistics;
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.ConstantScoreQuery;
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.EarlyTerminatingSortingCollector;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.FieldDoc;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.LeafCollector;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.MultiCollector;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.SimpleCollector;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TermStatistics;
+import org.apache.lucene.search.TimeLimitingCollector;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.TopDocsCollector;
+import org.apache.lucene.search.TopFieldCollector;
+import org.apache.lucene.search.TopFieldDocs;
+import org.apache.lucene.search.TopScoreDocCollector;
+import org.apache.lucene.search.TotalHitCountCollector;
+import org.apache.lucene.search.Weight;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.apache.solr.common.SolrDocumentBase;
-import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.DirectoryFactory.DirContext;
import org.apache.solr.core.DirectoryFactory;
+import org.apache.solr.core.DirectoryFactory.DirContext;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
@@ -152,8 +180,6 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
private final String path;
private boolean releaseDirectory;
- private final Map<Long, IndexFingerprint> maxVersionFingerprintCache = new ConcurrentHashMap<>();
-
private final NamedList<Object> readerStats;
private static DirectoryReader getReader(SolrCore core, SolrIndexConfig config, DirectoryFactory directoryFactory,
@@ -2416,19 +2442,24 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
final SolrIndexSearcher searcher = this;
final AtomicReference<IOException> exception = new AtomicReference<>();
try {
- return maxVersionFingerprintCache.computeIfAbsent(maxVersion, key -> {
- try {
- return IndexFingerprint.getFingerprint(searcher, key);
- } catch (IOException e) {
- exception.set(e);
- return null;
- }
- });
+ return searcher.getTopReaderContext().leaves().stream()
+ .map(ctx -> {
+ try {
+ return searcher.getCore().getIndexFingerprint(searcher, ctx, maxVersion);
+ } catch (IOException e) {
+ exception.set(e);
+ return null;
+ }
+ })
+ .filter(java.util.Objects::nonNull)
+ .reduce(new IndexFingerprint(maxVersion), IndexFingerprint::reduce);
+
} finally {
if (exception.get() != null) throw exception.get();
}
}
+
/////////////////////////////////////////////////////////////////////
// SolrInfoMBean stuff: Statistics and Module Info
/////////////////////////////////////////////////////////////////////
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java b/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
index 877ef03..0b7e655 100644
--- a/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
+++ b/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
@@ -52,6 +52,14 @@ public class IndexFingerprint implements MapSerializable {
private long numDocs;
private long maxDoc;
+ public IndexFingerprint() {
+ // default constructor
+ }
+
+ public IndexFingerprint (long maxVersionSpecified) {
+ this.maxVersionSpecified = maxVersionSpecified;
+ }
+
public long getMaxVersionSpecified() {
return maxVersionSpecified;
}
@@ -82,53 +90,62 @@ public class IndexFingerprint implements MapSerializable {
/** Opens a new realtime searcher and returns it's (possibly cached) fingerprint */
public static IndexFingerprint getFingerprint(SolrCore core, long maxVersion) throws IOException {
+ RTimer timer = new RTimer();
core.getUpdateHandler().getUpdateLog().openRealtimeSearcher();
RefCounted<SolrIndexSearcher> newestSearcher = core.getUpdateHandler().getUpdateLog().uhandler.core.getRealtimeSearcher();
try {
- return newestSearcher.get().getIndexFingerprint(maxVersion);
+ IndexFingerprint f = newestSearcher.get().getIndexFingerprint(maxVersion);
+ final double duration = timer.stop();
+ log.info("IndexFingerprint millis:{} result:{}",duration, f);
+ return f;
} finally {
if (newestSearcher != null) {
newestSearcher.decref();
}
}
}
-
- /** Calculates an index fingerprint */
- public static IndexFingerprint getFingerprint(SolrIndexSearcher searcher, long maxVersion) throws IOException {
- RTimer timer = new RTimer();
-
+
+ public static IndexFingerprint getFingerprint(SolrIndexSearcher searcher, LeafReaderContext ctx, Long maxVersion)
+ throws IOException {
SchemaField versionField = VersionInfo.getAndCheckVersionField(searcher.getSchema());
-
- IndexFingerprint f = new IndexFingerprint();
- f.maxVersionSpecified = maxVersion;
- f.maxDoc = searcher.maxDoc();
-
- // TODO: this could be parallelized, or even cached per-segment if performance becomes an issue
ValueSource vs = versionField.getType().getValueSource(versionField, null);
Map funcContext = ValueSource.newContext(searcher);
vs.createWeight(funcContext, searcher);
- for (LeafReaderContext ctx : searcher.getTopReaderContext().leaves()) {
- int maxDoc = ctx.reader().maxDoc();
- f.numDocs += ctx.reader().numDocs();
- Bits liveDocs = ctx.reader().getLiveDocs();
- FunctionValues fv = vs.getValues(funcContext, ctx);
- for (int doc = 0; doc < maxDoc; doc++) {
- if (liveDocs != null && !liveDocs.get(doc)) continue;
- long v = fv.longVal(doc);
- f.maxVersionEncountered = Math.max(v, f.maxVersionEncountered);
- if (v <= f.maxVersionSpecified) {
- f.maxInHash = Math.max(v, f.maxInHash);
- f.versionsHash += Hash.fmix64(v);
- f.numVersions++;
- }
+
+ IndexFingerprint f = new IndexFingerprint();
+ f.maxVersionSpecified = maxVersion;
+ f.maxDoc = ctx.reader().maxDoc();
+ f.numDocs = ctx.reader().numDocs();
+
+ int maxDoc = ctx.reader().maxDoc();
+ Bits liveDocs = ctx.reader().getLiveDocs();
+ FunctionValues fv = vs.getValues(funcContext, ctx);
+ for (int doc = 0; doc < maxDoc; doc++) {
+ if (liveDocs != null && !liveDocs.get(doc)) continue;
+ long v = fv.longVal(doc);
+ f.maxVersionEncountered = Math.max(v, f.maxVersionEncountered);
+ if (v <= f.maxVersionSpecified) {
+ f.maxInHash = Math.max(v, f.maxInHash);
+ f.versionsHash += Hash.fmix64(v);
+ f.numVersions++;
}
}
-
- final double duration = timer.stop();
- log.info("IndexFingerprint millis:" + duration + " result:" + f);
-
+
return f;
}
+
+
+ public static IndexFingerprint reduce(IndexFingerprint acc, IndexFingerprint f2) {
+ // acc should have maxVersionSpecified already set in it using IndexFingerprint(long maxVersionSpecified) constructor
+ acc.maxDoc = Math.max(acc.maxDoc, f2.maxDoc);
+ acc.numDocs += f2.numDocs;
+ acc.maxVersionEncountered = Math.max(acc.maxVersionEncountered, f2.maxVersionEncountered);
+ acc.maxInHash = Math.max(acc.maxInHash, f2.maxInHash);
+ acc.versionsHash += f2.versionsHash;
+ acc.numVersions += f2.numVersions;
+
+ return acc;
+ }
/** returns 0 for equal, negative if f1 is less recent than f2, positive if more recent */
public static int compare(IndexFingerprint f1, IndexFingerprint f2) {
@@ -200,4 +217,5 @@ public class IndexFingerprint implements MapSerializable {
public String toString() {
return toMap(new LinkedHashMap<>()).toString();
}
+
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
index 348532c..ef21386 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
@@ -29,7 +29,6 @@ import java.util.stream.Collectors;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.cloud.ZkTestServer.LimitViolationAction;
import org.apache.solr.common.SolrInputDocument;
@@ -37,10 +36,8 @@ import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.Slice.State;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.core.Diagnostics;
import org.apache.solr.handler.ReplicationHandler;
import org.junit.Test;
import org.slf4j.Logger;
@@ -197,35 +194,6 @@ public class LeaderFailureAfterFreshStartTest extends AbstractFullDistribZkTestB
}
- static void waitForNewLeader(CloudSolrClient cloudClient, String shardName, Replica oldLeader, int maxWaitInSecs)
- throws Exception {
- log.info("Will wait for a node to become leader for {} secs", maxWaitInSecs);
- boolean waitForLeader = true;
- int i = 0;
- ZkStateReader zkStateReader = cloudClient.getZkStateReader();
- zkStateReader.forceUpdateCollection(DEFAULT_COLLECTION);
-
- while(waitForLeader) {
- ClusterState clusterState = zkStateReader.getClusterState();
- DocCollection coll = clusterState.getCollection("collection1");
- Slice slice = coll.getSlice(shardName);
- if(slice.getLeader() != oldLeader && slice.getState() == State.ACTIVE) {
- log.info("New leader got elected in {} secs", i);
- break;
- }
-
- if(i == maxWaitInSecs) {
- Diagnostics.logThreadDumps("Could not find new leader in specified timeout");
- zkStateReader.getZkClient().printLayoutToStdOut();
- fail("Could not find new leader even after waiting for " + maxWaitInSecs + "secs");
- }
-
- i++;
- Thread.sleep(1000);
- }
- }
-
-
private void waitTillNodesActive() throws Exception {
for (int i = 0; i < 60; i++) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java b/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
index 3ded7d2..e00ea3c 100644
--- a/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
@@ -149,7 +149,7 @@ public class PeerSyncReplicationTest extends AbstractFullDistribZkTestBase {
log.info("Now shutting down initial leader");
forceNodeFailures(singletonList(initialLeaderJetty));
log.info("Updating mappings from zk");
- LeaderFailureAfterFreshStartTest.waitForNewLeader(cloudClient, "shard1", (Replica) initialLeaderJetty.client.info, 15);
+ waitForNewLeader(cloudClient, "shard1", (Replica) initialLeaderJetty.client.info, 15);
updateMappingsFromZk(jettys, clients, true);
assertEquals("PeerSynced node did not become leader", nodePeerSynced, shardToLeaderJetty.get("shard1"));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
index 64edd21..8f3a89a 100644
--- a/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
+++ b/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
@@ -122,7 +122,8 @@ public class PeerSyncTest extends BaseDistributedSearchTestCase {
del(client0, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",Long.toString(-++v)), "1000");
assertSync(client1, numVersions, true, shardsArr[0]);
- client0.commit(); client1.commit(); queryAndCompare(params("q", "*:*", "sort","_version_ desc"), client0, client1);
+ client0.commit(); client1.commit();
+ queryAndCompare(params("q", "*:*", "sort","_version_ desc"), client0, client1);
// test that delete by query is returned even if not requested, and that it doesn't delete newer stuff than it should
v=2000;
@@ -145,7 +146,6 @@ public class PeerSyncTest extends BaseDistributedSearchTestCase {
assertSync(client1, numVersions, true, shardsArr[0]);
client0.commit(); client1.commit(); queryAndCompare(params("q", "*:*", "sort","_version_ desc"), client0, client1);
-
//
// Test that handling reorders work when applying docs retrieved from peer
//
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java
new file mode 100644
index 0000000..9617ff2
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.update;
+
+import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.solr.BaseDistributedSearchTestCase;
+import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.request.QueryRequest;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase;
+import org.junit.Assert;
+import org.junit.Test;
+
+
+/**
+ * This test is deliberately kept in different class as we don't want segment merging to kick in after deleting documents.
+ * This ensures that first check the cached IndexFingerprint and
+ * recompute it only if any documents in the segment were deleted since caching the fingerprint first time around
+ *
+ *
+ */
+@SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776")
+public class PeerSyncWithIndexFingerprintCachingTest extends BaseDistributedSearchTestCase {
+ private static int numVersions = 100; // number of versions to use when syncing
+ private final String FROM_LEADER = DistribPhase.FROMLEADER.toString();
+
+ private ModifiableSolrParams seenLeader =
+ params(DISTRIB_UPDATE_PARAM, FROM_LEADER);
+
+ public PeerSyncWithIndexFingerprintCachingTest() {
+ stress = 0;
+
+ // TODO: a better way to do this?
+ configString = "solrconfig-tlog.xml";
+ schemaString = "schema.xml";
+ }
+
+ @Test
+ @ShardsFixed(num = 3)
+ public void test() throws Exception {
+ handle.clear();
+ handle.put("timestamp", SKIPVAL);
+ handle.put("score", SKIPVAL);
+ handle.put("maxScore", SKIPVAL);
+
+ SolrClient client0 = clients.get(0);
+ SolrClient client1 = clients.get(1);
+
+ long v =1;
+ for(; v < 8; ++v) {
+ add(client0, seenLeader, sdoc("id", ""+v,"_version_",v));
+ add(client1, seenLeader, sdoc("id",""+v,"_version_",v));
+
+ }
+ client0.commit(); client1.commit();
+
+ IndexFingerprint before = getFingerprint(client0, Long.MAX_VALUE);
+
+ del(client0, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",Long.toString(-++v)), "2");
+ client0.commit();
+
+ IndexFingerprint after = getFingerprint(client0, Long.MAX_VALUE);
+
+ // make sure fingerprint before and after deleting are not the same
+ Assert.assertTrue(IndexFingerprint.compare(before, after) != 0);
+
+ // replica which missed the delete should be able to sync
+ assertSync(client1, numVersions, true, shardsArr[0]);
+ client0.commit(); client1.commit();
+
+ queryAndCompare(params("q", "*:*", "sort","_version_ desc"), client0, client1);
+ }
+
+ IndexFingerprint getFingerprint(SolrClient client, long maxVersion) throws IOException, SolrServerException {
+ QueryRequest qr = new QueryRequest(params("qt","/get", "getFingerprint",Long.toString(maxVersion)));
+ NamedList rsp = client.request(qr);
+ return IndexFingerprint.fromObject(rsp.get("fingerprint"));
+ }
+
+ void assertSync(SolrClient client, int numVersions, boolean expectedResult, String... syncWith) throws IOException, SolrServerException {
+ QueryRequest qr = new QueryRequest(params("qt","/get", "getVersions",Integer.toString(numVersions), "sync", StrUtils.join(Arrays.asList(syncWith), ',')));
+ NamedList rsp = client.request(qr);
+ assertEquals(expectedResult, (Boolean) rsp.get("sync"));
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
index 03db71c..d04d996 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
@@ -24,11 +24,14 @@ import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.io.FileUtils;
import org.apache.solr.BaseDistributedSearchTestCase;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.cloud.Slice.State;
import org.apache.solr.core.Diagnostics;
import org.apache.solr.core.MockDirectoryFactory;
import org.apache.zookeeper.KeeperException;
@@ -222,6 +225,34 @@ public abstract class AbstractDistribZkTestBase extends BaseDistributedSearchTes
log.info("Collection has disappeared - collection: " + collection);
}
+
+ static void waitForNewLeader(CloudSolrClient cloudClient, String shardName, Replica oldLeader, int maxWaitInSecs)
+ throws Exception {
+ log.info("Will wait for a node to become leader for {} secs", maxWaitInSecs);
+ boolean waitForLeader = true;
+ int i = 0;
+ ZkStateReader zkStateReader = cloudClient.getZkStateReader();
+ zkStateReader.forceUpdateCollection(DEFAULT_COLLECTION);
+
+ while(waitForLeader) {
+ ClusterState clusterState = zkStateReader.getClusterState();
+ DocCollection coll = clusterState.getCollection("collection1");
+ Slice slice = coll.getSlice(shardName);
+ if(slice.getLeader() != oldLeader && slice.getState() == State.ACTIVE) {
+ log.info("New leader got elected in {} secs", i);
+ break;
+ }
+
+ if(i == maxWaitInSecs) {
+ Diagnostics.logThreadDumps("Could not find new leader in specified timeout");
+ zkStateReader.getZkClient().printLayoutToStdOut();
+ fail("Could not find new leader even after waiting for " + maxWaitInSecs + "secs");
+ }
+
+ i++;
+ Thread.sleep(1000);
+ }
+ }
public static void verifyReplicaStatus(ZkStateReader reader, String collection, String shard, String coreNodeName, Replica.State expectedState) throws InterruptedException {
int maxIterations = 100;
[15/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-4531: Add tests to
ensure that recovery does not fail on corrupted tlogs
Posted by kr...@apache.org.
SOLR-4531: Add tests to ensure that recovery does not fail on corrupted tlogs
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b7aa582d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b7aa582d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b7aa582d
Branch: refs/heads/jira/solr-8593
Commit: b7aa582dffd7a0bae3246e43c66a20a9c2e5341d
Parents: ce57e8a
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Tue Oct 25 12:13:08 2016 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Tue Oct 25 12:13:08 2016 +0530
----------------------------------------------------------------------
solr/CHANGES.txt | 5 +-
.../apache/solr/cloud/TestCloudRecovery.java | 154 +++++++++++++++++++
.../TestLeaderRecoverFromLogOnStartup.java | 77 ----------
3 files changed, 158 insertions(+), 78 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b7aa582d/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 475ba7f..4521288 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -339,7 +339,10 @@ Other Changes
solr.log.X files are rotated, preserving solr.log from last run in solr.log.1, solr.log.1 => solr.log.2 etc
solr-*-console.log files are moved into $SOLR_LOGS_DIR/archived/ instead of being overwritten
Last JVM garbage collection log solr_gc.log is moved into $SOLR_LOGS_DIR/archived/
- (janhoy)
+ (janhoy)
+
+* SOLR-4531: Add tests to ensure that recovery does not fail on corrupted tlogs.
+ (Simon Scofield, Cao Manh Dat via shalin)
================== 6.2.1 ==================
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b7aa582d/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java
new file mode 100644
index 0000000..2a7413c
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.cloud.ClusterStateUtil;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.update.DirectUpdateHandler2;
+import org.apache.solr.update.UpdateLog;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestCloudRecovery extends SolrCloudTestCase {
+
+ private static final String COLLECTION = "collection1";
+
+ @BeforeClass
+ public static void setupCluster() throws Exception {
+ System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory");
+ System.setProperty("solr.ulog.numRecordsToKeep", "1000");
+
+ configureCluster(2)
+ .addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf"))
+ .configure();
+
+ CollectionAdminRequest
+ .createCollection(COLLECTION, "config", 2, 2)
+ .setMaxShardsPerNode(2)
+ .process(cluster.getSolrClient());
+ AbstractDistribZkTestBase.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(),
+ false, true, 30);
+ }
+
+ @Before
+ public void resetCollection() throws IOException, SolrServerException {
+ cluster.getSolrClient().deleteByQuery(COLLECTION, "*:*");
+ cluster.getSolrClient().commit(COLLECTION);
+ }
+
+ @Test
+ public void leaderRecoverFromLogOnStartupTest() throws Exception {
+ AtomicInteger countReplayLog = new AtomicInteger(0);
+ DirectUpdateHandler2.commitOnClose = false;
+ UpdateLog.testing_logReplayFinishHook = countReplayLog::incrementAndGet;
+
+ CloudSolrClient cloudClient = cluster.getSolrClient();
+ cloudClient.add(COLLECTION, sdoc("id", "1"));
+ cloudClient.add(COLLECTION, sdoc("id", "2"));
+ cloudClient.add(COLLECTION, sdoc("id", "3"));
+ cloudClient.add(COLLECTION, sdoc("id", "4"));
+
+ ModifiableSolrParams params = new ModifiableSolrParams();
+ params.set("q", "*:*");
+ QueryResponse resp = cloudClient.query(COLLECTION, params);
+ assertEquals(0, resp.getResults().getNumFound());
+
+ ChaosMonkey.stop(cluster.getJettySolrRunners());
+ assertTrue("Timeout waiting for all not live", ClusterStateUtil.waitForAllReplicasNotLive(cloudClient.getZkStateReader(), 45000));
+ ChaosMonkey.start(cluster.getJettySolrRunners());
+ assertTrue("Timeout waiting for all live and active", ClusterStateUtil.waitForAllActiveAndLiveReplicas(cloudClient.getZkStateReader(), COLLECTION, 120000));
+
+ resp = cloudClient.query(COLLECTION, params);
+ assertEquals(4, resp.getResults().getNumFound());
+ // Make sure all nodes is recover from tlog
+ assertEquals(4, countReplayLog.get());
+ }
+
+ @Test
+ public void corruptedLogTest() throws Exception {
+ AtomicInteger countReplayLog = new AtomicInteger(0);
+ DirectUpdateHandler2.commitOnClose = false;
+ UpdateLog.testing_logReplayFinishHook = countReplayLog::incrementAndGet;
+
+ CloudSolrClient cloudClient = cluster.getSolrClient();
+ cloudClient.add(COLLECTION, sdoc("id", "1000"));
+ cloudClient.add(COLLECTION, sdoc("id", "1001"));
+ for (int i = 0; i < 10; i++) {
+ cloudClient.add(COLLECTION, sdoc("id", String.valueOf(i)));
+ }
+
+ ModifiableSolrParams params = new ModifiableSolrParams();
+ params.set("q", "*:*");
+ QueryResponse resp = cloudClient.query(COLLECTION, params);
+ assertEquals(0, resp.getResults().getNumFound());
+
+ int logHeaderSize = Integer.MAX_VALUE;
+ Map<File, byte[]> contentFiles = new HashMap<>();
+ for (JettySolrRunner solrRunner : cluster.getJettySolrRunners()) {
+ for (SolrCore solrCore : solrRunner.getCoreContainer().getCores()) {
+ File tlogFolder = new File(solrCore.getUlogDir(), UpdateLog.TLOG_NAME);
+ String[] tLogFiles = tlogFolder.list();
+ Arrays.sort(tLogFiles);
+ File lastTLogFile = new File(tlogFolder.getAbsolutePath() + "/" + tLogFiles[tLogFiles.length - 1]);
+ byte[] tlogBytes = IOUtils.toByteArray(new FileInputStream(lastTLogFile));
+ contentFiles.put(lastTLogFile, tlogBytes);
+ logHeaderSize = Math.min(tlogBytes.length, logHeaderSize);
+ }
+ }
+
+ ChaosMonkey.stop(cluster.getJettySolrRunners());
+ assertTrue("Timeout waiting for all not live", ClusterStateUtil.waitForAllReplicasNotLive(cloudClient.getZkStateReader(), 45000));
+
+ for (Map.Entry<File, byte[]> entry : contentFiles.entrySet()) {
+ byte[] tlogBytes = entry.getValue();
+
+ if (tlogBytes.length <= logHeaderSize) continue;
+ FileOutputStream stream = new FileOutputStream(entry.getKey());
+ int skipLastBytes = Math.max(random().nextInt(tlogBytes.length - logHeaderSize), 2);
+ for (int i = 0; i < entry.getValue().length - skipLastBytes; i++) {
+ stream.write(tlogBytes[i]);
+ }
+ stream.close();
+ }
+
+ ChaosMonkey.start(cluster.getJettySolrRunners());
+ assertTrue("Timeout waiting for all live and active", ClusterStateUtil.waitForAllActiveAndLiveReplicas(cloudClient.getZkStateReader(), COLLECTION, 120000));
+
+ resp = cloudClient.query(COLLECTION, params);
+ // Make sure cluster still healthy
+ assertTrue(resp.getResults().getNumFound() >= 2);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b7aa582d/solr/core/src/test/org/apache/solr/cloud/TestLeaderRecoverFromLogOnStartup.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLeaderRecoverFromLogOnStartup.java b/solr/core/src/test/org/apache/solr/cloud/TestLeaderRecoverFromLogOnStartup.java
deleted file mode 100644
index 10de042..0000000
--- a/solr/core/src/test/org/apache/solr/cloud/TestLeaderRecoverFromLogOnStartup.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.cloud;
-
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.cloud.ClusterStateUtil;
-import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.update.DirectUpdateHandler2;
-import org.apache.solr.update.UpdateLog;
-import org.junit.Test;
-
-public class TestLeaderRecoverFromLogOnStartup extends AbstractFullDistribZkTestBase {
- @Override
- public void distribSetUp() throws Exception {
- System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory");
- System.setProperty("solr.ulog.numRecordsToKeep", "1000");
- super.distribSetUp();
- }
-
- @Test
- @ShardsFixed(num = 4)
- public void test() throws Exception {
- AtomicInteger countReplayLog = new AtomicInteger(0);
- DirectUpdateHandler2.commitOnClose = false;
- UpdateLog.testing_logReplayFinishHook = new Runnable() {
- @Override
- public void run() {
- countReplayLog.incrementAndGet();
- }
- };
-
- String testCollectionName = "testCollection";
- createCollection(testCollectionName, 2, 2, 1);
- waitForRecoveriesToFinish(false);
-
- cloudClient.setDefaultCollection(testCollectionName);
- cloudClient.add(sdoc("id", "1"));
- cloudClient.add(sdoc("id", "2"));
- cloudClient.add(sdoc("id", "3"));
- cloudClient.add(sdoc("id", "4"));
-
- ModifiableSolrParams params = new ModifiableSolrParams();
- params.set("q", "*:*");
- QueryResponse resp = cloudClient.query(params);
- assertEquals(0, resp.getResults().getNumFound());
-
- ChaosMonkey.stop(jettys);
- ChaosMonkey.stop(controlJetty);
- assertTrue("Timeout waiting for all not live", ClusterStateUtil.waitForAllReplicasNotLive(cloudClient.getZkStateReader(), 45000));
- ChaosMonkey.start(jettys);
- ChaosMonkey.start(controlJetty);
- assertTrue("Timeout waiting for all live and active", ClusterStateUtil.waitForAllActiveAndLiveReplicas(cloudClient.getZkStateReader(), testCollectionName, 120000));
-
- cloudClient.commit();
- resp = cloudClient.query(params);
- assertEquals(4, resp.getResults().getNumFound());
- // Make sure all nodes is recover from tlog
- assertEquals(4, countReplayLog.get());
- }
-}
[08/50] [abbrv] lucene-solr:jira/solr-8593: LUCENE-7520: WSTE
shouldn't expand MTQ if its field doesn't match filter
Posted by kr...@apache.org.
LUCENE-7520: WSTE shouldn't expand MTQ if its field doesn't match filter
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e1b06938
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e1b06938
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e1b06938
Branch: refs/heads/jira/solr-8593
Commit: e1b06938b4b0442b18878e59fde57e29ca641499
Parents: ef57374
Author: David Smiley <ds...@apache.org>
Authored: Mon Oct 24 09:31:55 2016 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Mon Oct 24 09:31:55 2016 -0400
----------------------------------------------------------------------
lucene/CHANGES.txt | 4 ++++
.../highlight/WeightedSpanTermExtractor.java | 13 +++++--------
.../lucene/search/highlight/HighlighterTest.java | 17 +++++++++++++++++
3 files changed, 26 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e1b06938/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index c4b3521..954137f 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -111,6 +111,10 @@ Improvements
* LUCENE-7496: Better toString for SweetSpotSimilarity (janhoy)
+* LUCENE-7520: Highlighter's WeightedSpanTermExtractor shouldn't attempt to expand a MultiTermQuery
+ when its field doesn't match the field the extraction is scoped to.
+ (Cao Manh Dat via David Smiley)
+
Optimizations
* LUCENE-7501: BKDReader should not store the split dimension explicitly in the
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e1b06938/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
----------------------------------------------------------------------
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
index 1b277f1..0e0093b 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
@@ -217,13 +217,14 @@ public class WeightedSpanTermExtractor {
} else if (isQueryUnsupported(query.getClass())) {
// nothing
} else {
+ if (query instanceof MultiTermQuery &&
+ (!expandMultiTermQuery || !fieldNameComparator(((MultiTermQuery)query).getField()))) {
+ return;
+ }
Query origQuery = query;
final IndexReader reader = getLeafContext().reader();
Query rewritten;
if (query instanceof MultiTermQuery) {
- if (!expandMultiTermQuery) {
- return;
- }
rewritten = MultiTermQuery.SCORING_BOOLEAN_REWRITE.rewrite(reader, (MultiTermQuery) query);
} else {
rewritten = origQuery.rewrite(reader);
@@ -508,11 +509,7 @@ public class WeightedSpanTermExtractor {
*/
public Map<String,WeightedSpanTerm> getWeightedSpanTerms(Query query, float boost, TokenStream tokenStream,
String fieldName) throws IOException {
- if (fieldName != null) {
- this.fieldName = fieldName;
- } else {
- this.fieldName = null;
- }
+ this.fieldName = fieldName;
Map<String,WeightedSpanTerm> terms = new PositionCheckingMap<>();
this.tokenStream = tokenStream;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e1b06938/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
----------------------------------------------------------------------
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
index fc402ba..c37709b 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
@@ -33,6 +33,7 @@ import java.util.StringTokenizer;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.CachingTokenFilter;
+import org.apache.lucene.analysis.CannedTokenStream;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockPayloadAnalyzer;
import org.apache.lucene.analysis.MockTokenFilter;
@@ -1339,6 +1340,22 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
}
+ public void testNotRewriteMultiTermQuery() throws IOException {
+ // field "bar": (not the field we ultimately want to extract)
+ MultiTermQuery mtq = new TermRangeQuery("bar", new BytesRef("aa"), new BytesRef("zz"), true, true) ;
+ WeightedSpanTermExtractor extractor = new WeightedSpanTermExtractor() {
+ @Override
+ protected void extract(Query query, float boost, Map<String, WeightedSpanTerm> terms) throws IOException {
+ assertEquals(mtq, query);
+ super.extract(query, boost, terms);
+ }
+ };
+ extractor.setExpandMultiTermQuery(true);
+ extractor.setMaxDocCharsToAnalyze(51200);
+ extractor.getWeightedSpanTerms(
+ mtq, 3, new CannedTokenStream(new Token("aa",0,2), new Token("bb", 2,4)), "foo"); // field "foo"
+ }
+
public void testGetBestSingleFragmentWithWeights() throws Exception {
TestHighlightRunner helper = new TestHighlightRunner() {
[17/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9441: Solr
collection backup on HDFS can only be manipulated by the Solr process owner.
Posted by kr...@apache.org.
SOLR-9441: Solr collection backup on HDFS can only be manipulated by the Solr process owner.
This closes #71.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/27ba8e2e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/27ba8e2e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/27ba8e2e
Branch: refs/heads/jira/solr-8593
Commit: 27ba8e2e82df6b901bbc5adaa3490d5f002fd76f
Parents: 0782b09
Author: markrmiller <ma...@apache.org>
Authored: Tue Oct 25 10:21:00 2016 -0400
Committer: markrmiller <ma...@apache.org>
Committed: Tue Oct 25 10:21:57 2016 -0400
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +++
.../core/backup/repository/HdfsBackupRepository.java | 9 +++++++++
.../apache/solr/handler/TestHdfsBackupRestoreCore.java | 11 +++++++++++
3 files changed, 23 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27ba8e2e/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 4521288..2f4827b 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -211,6 +211,9 @@ Bug Fixes
* SOLR-9687: Fixed Interval Facet count issue in cases of open/close intervals on the same values
(Andy Chillrud, Tom�s Fern�ndez L�bbe)
+* SOLR-9441: Solr collection backup on HDFS can only be manipulated by the Solr process owner.
+ (Hrishikesh Gadre via Mark Miller)
+
Optimizations
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27ba8e2e/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java b/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java
index f12d9fd..f465765 100644
--- a/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java
+++ b/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
@@ -41,6 +42,8 @@ import org.apache.solr.store.hdfs.HdfsDirectory.HdfsIndexInput;
import com.google.common.base.Preconditions;
public class HdfsBackupRepository implements BackupRepository {
+ private static final String HDFS_UMASK_MODE_PARAM = "solr.hdfs.permissions.umask-mode";
+
private HdfsDirectoryFactory factory;
private Configuration hdfsConfig = null;
private FileSystem fileSystem = null;
@@ -58,6 +61,12 @@ public class HdfsBackupRepository implements BackupRepository {
factory.init(args);
this.hdfsConfig = factory.getConf();
+ // Configure the umask mode if specified.
+ if (args.get(HDFS_UMASK_MODE_PARAM) != null) {
+ String umaskVal = (String)args.get(HDFS_UMASK_MODE_PARAM);
+ this.hdfsConfig.set(FsPermission.UMASK_LABEL, umaskVal);
+ }
+
String hdfsSolrHome = (String) Preconditions.checkNotNull(args.get(HdfsDirectoryFactory.HDFS_HOME),
"Please specify " + HdfsDirectoryFactory.HDFS_HOME + " property.");
Path path = new Path(hdfsSolrHome);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27ba8e2e/solr/core/src/test/org/apache/solr/handler/TestHdfsBackupRestoreCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/TestHdfsBackupRestoreCore.java b/solr/core/src/test/org/apache/solr/handler/TestHdfsBackupRestoreCore.java
index 4e8d4cc..a07d491 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestHdfsBackupRestoreCore.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestHdfsBackupRestoreCore.java
@@ -27,7 +27,10 @@ import java.util.Map;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
@@ -88,6 +91,7 @@ public class TestHdfsBackupRestoreCore extends SolrCloudTestCase {
" <str name=\"location\">${solr.hdfs.default.backup.path}</str>\n" +
" <str name=\"solr.hdfs.home\">${solr.hdfs.home:}</str>\n" +
" <str name=\"solr.hdfs.confdir\">${solr.hdfs.confdir:}</str>\n" +
+ " <str name=\"solr.hdfs.permissions.umask-mode\">${solr.hdfs.permissions.umask-mode:000}</str>\n" +
" </repository>\n" +
" </backup>\n" +
" \n" +
@@ -233,6 +237,13 @@ public class TestHdfsBackupRestoreCore extends SolrCloudTestCase {
}
//See if restore was successful by checking if all the docs are present again
BackupRestoreUtils.verifyDocs(nDocs, masterClient, coreName);
+
+ // Verify the permissions for the backup folder.
+ FileStatus status = fs.getFileStatus(new org.apache.hadoop.fs.Path("/backup/snapshot."+backupName));
+ FsPermission perm = status.getPermission();
+ assertEquals(FsAction.ALL, perm.getUserAction());
+ assertEquals(FsAction.ALL, perm.getGroupAction());
+ assertEquals(FsAction.ALL, perm.getOtherAction());
}
}
}
[43/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9533: Fix precommit
Posted by kr...@apache.org.
SOLR-9533: Fix precommit
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d215f9e5
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d215f9e5
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d215f9e5
Branch: refs/heads/jira/solr-8593
Commit: d215f9e5681d7bd570eb3c965402a1010f023942
Parents: 727bf55
Author: Joel Bernstein <jb...@apache.org>
Authored: Wed Oct 26 20:08:28 2016 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Wed Oct 26 20:19:40 2016 -0400
----------------------------------------------------------------------
.../org/apache/solr/core/TestCorePropertiesReload.java | 11 +++++++----
1 file changed, 7 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d215f9e5/solr/core/src/test/org/apache/solr/core/TestCorePropertiesReload.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestCorePropertiesReload.java b/solr/core/src/test/org/apache/solr/core/TestCorePropertiesReload.java
index bb7aaa0..6e1f768 100644
--- a/solr/core/src/test/org/apache/solr/core/TestCorePropertiesReload.java
+++ b/solr/core/src/test/org/apache/solr/core/TestCorePropertiesReload.java
@@ -17,12 +17,14 @@
package org.apache.solr.core;
import java.io.File;
-import java.io.FileWriter;
+import java.io.FileOutputStream;
+import java.io.OutputStreamWriter;
+import java.io.BufferedWriter;
+import java.io.Writer;
import java.util.Properties;
import org.apache.commons.io.FileUtils;
import org.apache.solr.SolrTestCaseJ4;
-import org.junit.BeforeClass;
import org.junit.Test;
public class TestCorePropertiesReload extends SolrTestCaseJ4 {
@@ -59,11 +61,12 @@ public class TestCorePropertiesReload extends SolrTestCaseJ4 {
}
private void writeProperties(Properties props) throws Exception {
- FileWriter out = null;
+ Writer out = null;
try {
File confDir = new File(new File(solrHomeDirectory, "collection1"), "conf");
- out = new FileWriter(new File(confDir, "solrcore.properties"));
+ out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(new File(confDir, "solrcore.properties")), "UTF8"));
props.store(out, "Reload Test");
+
} finally {
out.close();
}
[29/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9692: blockUnknown
property still breaks the internode communication
Posted by kr...@apache.org.
SOLR-9692: blockUnknown property still breaks the internode communication
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/6d9a99f9
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/6d9a99f9
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/6d9a99f9
Branch: refs/heads/jira/solr-8593
Commit: 6d9a99f99264b32cd5547dfe6f5db9862fd84bda
Parents: 9303112
Author: Noble Paul <no...@apache.org>
Authored: Wed Oct 26 13:36:37 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Wed Oct 26 13:36:37 2016 +0530
----------------------------------------------------------------------
solr/CHANGES.txt | 2 +-
.../apache/solr/servlet/SolrDispatchFilter.java | 22 ++++++++------------
.../solr/security/BasicAuthIntegrationTest.java | 2 +-
3 files changed, 11 insertions(+), 15 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6d9a99f9/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 8e6ee7e..7dfbf39 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -372,7 +372,7 @@ Bug Fixes
* SOLR-9389: HDFS Transaction logs stay open for writes which leaks Xceivers. (Tim Owen via Mark Miller)
-* SOLR-9188: blockUnknown property makes inter-node communication impossible (noble)
+* SOLR-9692: blockUnknown property makes inter-node communication impossible (noble)
* SOLR-9455: Deleting a sub-shard in recovery state can mark parent shard as inactive. (shalin)
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6d9a99f9/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index b9d3729..5a4cfb6 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@ -348,27 +348,23 @@ public class SolrDispatchFilter extends BaseSolrFilter {
if (authenticationPlugin == null) {
return true;
} else {
- try {
- if (PKIAuthenticationPlugin.PATH.equals(((HttpServletRequest) request).getPathInfo())) return true;
- } catch (Exception e) {
- log.error("Unexpected error ", e);
- }
-
- //special case when solr is securing inter-node requests
+ // /admin/info/key must be always open. see SOLR-9188
+ // tests work only w/ getPathInfo
+ //otherwise it's just enough to have getServletPath()
+ if (PKIAuthenticationPlugin.PATH.equals(((HttpServletRequest) request).getServletPath()) ||
+ PKIAuthenticationPlugin.PATH.equals(((HttpServletRequest) request).getPathInfo())) return true;
String header = ((HttpServletRequest) request).getHeader(PKIAuthenticationPlugin.HEADER);
if (header != null && cores.getPkiAuthenticationPlugin() != null)
authenticationPlugin = cores.getPkiAuthenticationPlugin();
try {
log.debug("Request to authenticate: {}, domain: {}, port: {}", request, request.getLocalName(), request.getLocalPort());
// upon successful authentication, this should call the chain's next filter.
- requestContinues = authenticationPlugin.doAuthenticate(request, response, new FilterChain() {
- public void doFilter(ServletRequest req, ServletResponse rsp) throws IOException, ServletException {
- isAuthenticated.set(true);
- wrappedRequest.set(req);
- }
+ requestContinues = authenticationPlugin.doAuthenticate(request, response, (req, rsp) -> {
+ isAuthenticated.set(true);
+ wrappedRequest.set(req);
});
} catch (Exception e) {
- e.printStackTrace();
+ log.info("Error authenticating", e);
throw new SolrException(ErrorCode.SERVER_ERROR, "Error during request authentication, ", e);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6d9a99f9/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
index 6967b27..46646c7 100644
--- a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
@@ -192,7 +192,7 @@ public class BasicAuthIntegrationTest extends SolrCloudTestCase {
executeCommand(baseUrl + authcPrefix, cl, "{set-property : { blockUnknown: true}}", "harry", "HarryIsUberCool");
verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/blockUnknown", "true", 20, "harry", "HarryIsUberCool");
- verifySecurityStatus(cl, baseUrl + PKIAuthenticationPlugin.PATH + "?wt=json", "key", NOT_NULL_PREDICATE, 20);
+ verifySecurityStatus(cl, baseUrl + "/admin/info/key?wt=json", "key", NOT_NULL_PREDICATE, 20);
String[] toolArgs = new String[]{
"status", "-solr", baseUrl};
[30/50] [abbrv] lucene-solr:jira/solr-8593: Merge remote-tracking
branch 'origin/master'
Posted by kr...@apache.org.
Merge remote-tracking branch 'origin/master'
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ecfbe51c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ecfbe51c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ecfbe51c
Branch: refs/heads/jira/solr-8593
Commit: ecfbe51c941fbebd0007cf167b050daf821011d8
Parents: 6d9a99f b69c5d9
Author: Noble Paul <no...@apache.org>
Authored: Wed Oct 26 13:36:51 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Wed Oct 26 13:36:51 2016 +0530
----------------------------------------------------------------------
dev-tools/idea/solr/contrib/langid/langid.iml | 1 +
solr/CHANGES.txt | 13 ++
.../solr/handler/dataimport/DocBuilder.java | 2 +
.../handler/dataimport/TestDocBuilder2.java | 23 ++
.../org/apache/solr/core/CoreContainer.java | 32 +--
.../solr/handler/admin/SecurityConfHandler.java | 142 ++++++++----
.../handler/admin/SecurityConfHandlerLocal.java | 102 +++++++++
.../handler/admin/SecurityConfHandlerZk.java | 92 ++++++++
.../apache/solr/index/NoMergePolicyFactory.java | 34 +++
.../apache/solr/security/BasicAuthPlugin.java | 14 +-
.../security/Sha256AuthenticationProvider.java | 2 +-
.../conf/solrconfig-nomergepolicyfactory.xml | 32 +++
.../cloud/LeaderElectionContextKeyTest.java | 114 ++++++++++
.../apache/solr/core/TestMergePolicyConfig.java | 20 ++
.../SecurityConfHandlerLocalForTesting.java | 43 ++++
.../handler/admin/SecurityConfHandlerTest.java | 66 +++---
.../solr/security/BasicAuthIntegrationTest.java | 4 +-
.../solr/security/BasicAuthStandaloneTest.java | 220 +++++++++++++++++++
18 files changed, 861 insertions(+), 95 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ecfbe51c/solr/CHANGES.txt
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ecfbe51c/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
----------------------------------------------------------------------
[19/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9536: Add hossman to
CHANGES.
Posted by kr...@apache.org.
SOLR-9536: Add hossman to CHANGES.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c15c8af6
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c15c8af6
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c15c8af6
Branch: refs/heads/jira/solr-8593
Commit: c15c8af66db5c2c84cdf95520a61f78d512c5911
Parents: e152575
Author: markrmiller <ma...@apache.org>
Authored: Tue Oct 25 12:42:02 2016 -0400
Committer: markrmiller <ma...@apache.org>
Committed: Tue Oct 25 12:42:14 2016 -0400
----------------------------------------------------------------------
solr/CHANGES.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c15c8af6/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index b693543..8e6ee7e 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -215,7 +215,7 @@ Bug Fixes
(Hrishikesh Gadre via Mark Miller)
* SOLR-9536: OldBackupDirectory timestamp field needs to be initialized to avoid NPE.
- (Hrishikesh Gadre via Mark Miller)
+ (Hrishikesh Gadre, hossman via Mark Miller)
Optimizations
----------------------
[41/50] [abbrv] lucene-solr:jira/solr-8593: SOLR-9533: Reload core
config when a core is reloaded
Posted by kr...@apache.org.
SOLR-9533: Reload core config when a core is reloaded
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2ee72314
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2ee72314
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2ee72314
Branch: refs/heads/jira/solr-8593
Commit: 2ee723140c5377a4507cdaf5c877d9f0d47d82fc
Parents: 53507b4
Author: Joel Bernstein <jb...@apache.org>
Authored: Wed Oct 26 17:38:13 2016 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Wed Oct 26 20:19:40 2016 -0400
----------------------------------------------------------------------
.../src/java/org/apache/solr/core/SolrCore.java | 4 +-
.../solr/core/TestCorePropertiesReload.java | 71 ++++++++++++++++++++
2 files changed, 74 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2ee72314/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index a2dc1c4..96f8738 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -585,9 +585,11 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
boolean success = false;
SolrCore core = null;
try {
+ CoreDescriptor cd = new CoreDescriptor(coreDescriptor.getName(), coreDescriptor);
+ cd.loadExtraProperties(); //Reload the extra properties
core = new SolrCore(getName(), getDataDir(), coreConfig.getSolrConfig(),
coreConfig.getIndexSchema(), coreConfig.getProperties(),
- coreDescriptor, updateHandler, solrDelPolicy, currentCore);
+ cd, updateHandler, solrDelPolicy, currentCore);
// we open a new IndexWriter to pick up the latest config
core.getUpdateHandler().getSolrCoreState().newIndexWriter(core, false);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2ee72314/solr/core/src/test/org/apache/solr/core/TestCorePropertiesReload.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestCorePropertiesReload.java b/solr/core/src/test/org/apache/solr/core/TestCorePropertiesReload.java
new file mode 100644
index 0000000..bb7aaa0
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/core/TestCorePropertiesReload.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.core;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.util.Properties;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.solr.SolrTestCaseJ4;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestCorePropertiesReload extends SolrTestCaseJ4 {
+
+ private final File solrHomeDirectory = createTempDir().toFile();
+
+ public void setMeUp() throws Exception {
+ FileUtils.copyDirectory(new File(TEST_HOME()), solrHomeDirectory);
+ Properties props = new Properties();
+ props.setProperty("test", "Before reload");
+ writeProperties(props);
+ initCore("solrconfig.xml", "schema.xml", solrHomeDirectory.getAbsolutePath());
+ }
+
+ @Test
+ public void testPropertiesReload() throws Exception {
+ setMeUp();
+ SolrCore core = h.getCore();
+ CoreDescriptor coreDescriptor = core.getCoreDescriptor();
+ String testProp = coreDescriptor.getCoreProperty("test", null);
+ assertTrue(testProp.equals("Before reload"));
+
+ //Re-write the properties file
+ Properties props = new Properties();
+ props.setProperty("test", "After reload");
+ writeProperties(props);
+
+ h.reload();
+ core = h.getCore();
+ coreDescriptor = core.getCoreDescriptor();
+
+ testProp = coreDescriptor.getCoreProperty("test", null);
+ assertTrue(testProp.equals("After reload"));
+ }
+
+ private void writeProperties(Properties props) throws Exception {
+ FileWriter out = null;
+ try {
+ File confDir = new File(new File(solrHomeDirectory, "collection1"), "conf");
+ out = new FileWriter(new File(confDir, "solrcore.properties"));
+ props.store(out, "Reload Test");
+ } finally {
+ out.close();
+ }
+ }
+}