You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by is...@apache.org on 2017/03/12 00:18:10 UTC
[01/50] [abbrv] lucene-solr:jira/solr-6736: LUCENE-7727: Replace
end-of-life Markdown parser "Pegdown" by "Flexmark" for compatibility with
Java 9
Repository: lucene-solr
Updated Branches:
refs/heads/jira/solr-6736 ed99a219c -> f664f1f39
LUCENE-7727: Replace end-of-life Markdown parser "Pegdown" by "Flexmark" for compatibility with Java 9
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/707d7b91
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/707d7b91
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/707d7b91
Branch: refs/heads/jira/solr-6736
Commit: 707d7b91e8793b4bb017e132c8a206acf85885ab
Parents: 3087eb5
Author: Uwe Schindler <us...@apache.org>
Authored: Thu Mar 2 17:43:44 2017 +0100
Committer: Uwe Schindler <us...@apache.org>
Committed: Thu Mar 2 17:43:44 2017 +0100
----------------------------------------------------------------------
build.xml | 10 +++----
lucene/CHANGES.txt | 3 ++
lucene/build.xml | 6 ++--
lucene/common-build.xml | 65 ++++++++++++++++++++++++----------------
solr/build.xml | 9 ++----
solr/site/quickstart.mdtext | 2 --
6 files changed, 53 insertions(+), 42 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/707d7b91/build.xml
----------------------------------------------------------------------
diff --git a/build.xml b/build.xml
index 07c01c2..96c4526 100644
--- a/build.xml
+++ b/build.xml
@@ -89,7 +89,7 @@
<fail message="The Beast only works inside of individual modules"/>
</target>
- <target name="documentation" depends="resolve-pegdown" description="Generate Lucene and Solr Documentation">
+ <target name="documentation" depends="resolve-markdown" description="Generate Lucene and Solr Documentation">
<subant target="documentation" inheritall="false" failonerror="true">
<fileset dir="lucene" includes="build.xml" />
<fileset dir="solr" includes="build.xml" />
@@ -97,7 +97,7 @@
</subant>
</target>
- <target name="documentation-lint" depends="resolve-pegdown,-ecj-javadoc-lint-unsupported,-ecj-resolve" description="Validates the generated documentation (HTML errors, broken links,...)">
+ <target name="documentation-lint" depends="resolve-markdown,-ecj-javadoc-lint-unsupported,-ecj-resolve" description="Validates the generated documentation (HTML errors, broken links,...)">
<subant target="documentation-lint" inheritall="false" failonerror="true">
<fileset dir="lucene" includes="build.xml" />
<fileset dir="solr" includes="build.xml" />
@@ -319,7 +319,7 @@
<delete failonerror="true" dir="${maven-build-dir}/"/>
</target>
- <target name="generate-maven-artifacts" depends="resolve,resolve-groovy,resolve-pegdown,install-maven-tasks"
+ <target name="generate-maven-artifacts" depends="resolve,resolve-groovy,resolve-markdown,install-maven-tasks"
description="Generate Maven Artifacts for Lucene and Solr">
<property name="maven.dist.dir" location="dist/maven" />
<mkdir dir="${maven.dist.dir}" />
@@ -543,7 +543,7 @@ File | Project Structure | Platform Settings | SDKs):
</target>
<target name="nightly-smoke" description="Builds an unsigned release and smoke tests it"
- depends="clean,resolve-groovy,resolve-pegdown,install-maven-tasks">
+ depends="clean,resolve-groovy,resolve-markdown,install-maven-tasks">
<fail message="To run nightly smoke, the JDK must be exactly Java 1.8, was: ${java.specification.version}">
<condition>
<not><equals arg1="${java.specification.version}" arg2="1.8"/></not>
@@ -788,7 +788,7 @@ Test args: [${args}]</echo>
</antcall>
</target>
- <target name="jenkins-maven-nightly" depends="-print-java-info,clean,clean-maven-build,resolve-groovy,resolve-pegdown,install-maven-tasks">
+ <target name="jenkins-maven-nightly" depends="-print-java-info,clean,clean-maven-build,resolve-groovy,resolve-markdown,install-maven-tasks">
<!-- step 1: build, install, deploy, and validate ANT-generated maven artifacts: -->
<antcall>
<param name="is.jenkins.build" value="true"/>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/707d7b91/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index f393dab..5e468bf 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -245,6 +245,9 @@ Build
* LUCENE-7726: Fix HTML entity bugs in Javadocs to be able to build with
Java 9. (Uwe Schindler, Hossman)
+* LUCENE-7727: Replace end-of-life Markdown parser "Pegdown" by "Flexmark"
+ for compatibility with Java 9. (Uwe Schindler)
+
Other
* LUCENE-7666: Fix typos in lucene-join package info javadoc.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/707d7b91/lucene/build.xml
----------------------------------------------------------------------
diff --git a/lucene/build.xml b/lucene/build.xml
index f004a19..9397000 100644
--- a/lucene/build.xml
+++ b/lucene/build.xml
@@ -208,7 +208,7 @@
<modules-crawl target="-ecj-javadoc-lint"/>
</target>
- <target name="process-webpages" depends="resolve-pegdown">
+ <target name="process-webpages" depends="resolve-markdown">
<makeurl property="process-webpages.buildfiles" separator="|">
<fileset dir="." includes="**/build.xml" excludes="build.xml,analysis/*,build/**,tools/**,site/**"/>
</makeurl>
@@ -237,10 +237,10 @@
<param name="defaultCodec" expression="${defaultCodec}"/>
</xslt>
- <pegdown todir="${javadoc.dir}">
+ <markdown todir="${javadoc.dir}">
<fileset dir="." includes="MIGRATE.txt,JRE_VERSION_MIGRATION.txt,SYSTEM_REQUIREMENTS.txt"/>
<globmapper from="*.txt" to="*.html"/>
- </pegdown>
+ </markdown>
<copy todir="${javadoc.dir}">
<fileset dir="site/html"/>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/707d7b91/lucene/common-build.xml
----------------------------------------------------------------------
diff --git a/lucene/common-build.xml b/lucene/common-build.xml
index 7d64bc2..327a01d 100644
--- a/lucene/common-build.xml
+++ b/lucene/common-build.xml
@@ -2410,61 +2410,74 @@ ${ant.project.name}.test.dependencies=${test.classpath.list}
</forbidden-apis>
</target>
- <target name="resolve-pegdown" unless="pegdown.loaded" depends="resolve-groovy">
- <ivy:cachepath organisation="org.pegdown" module="pegdown" revision="1.6.0"
- inline="true" conf="default" transitive="true" pathid="pegdown.classpath"/>
- <groovy classpathref="pegdown.classpath"><![CDATA[
+ <target name="resolve-markdown" unless="markdown.loaded" depends="resolve-groovy">
+ <property name="flexmark.version" value="0.16.1"/>
+ <ivy:cachepath transitive="true" pathid="markdown.classpath">
+ <ivy:dependency org="com.vladsch.flexmark" name="flexmark" rev="${flexmark.version}" conf="default" />
+ <ivy:dependency org="com.vladsch.flexmark" name="flexmark-ext-autolink" rev="${flexmark.version}" conf="default" />
+ <ivy:dependency org="com.vladsch.flexmark" name="flexmark-ext-abbreviation" rev="${flexmark.version}" conf="default" />
+ </ivy:cachepath>
+ <groovy classpathref="markdown.classpath"><![CDATA[
import org.apache.tools.ant.AntTypeDefinition;
import org.apache.tools.ant.ComponentHelper;
import org.apache.tools.ant.filters.TokenFilter.ChainableReaderFilter;
- import org.pegdown.PegDownProcessor;
- import org.pegdown.Extensions;
- import org.pegdown.FastEncoder;
-
- public final class PegDownFilter extends ChainableReaderFilter {
+ import com.vladsch.flexmark.ast.Node;
+ import com.vladsch.flexmark.ast.Heading;
+ import com.vladsch.flexmark.html.HtmlRenderer;
+ import com.vladsch.flexmark.parser.Parser;
+ import com.vladsch.flexmark.parser.ParserEmulationProfile;
+ import com.vladsch.flexmark.util.html.Escaping;
+ import com.vladsch.flexmark.util.options.MutableDataSet;
+ import com.vladsch.flexmark.ext.abbreviation.AbbreviationExtension;
+ import com.vladsch.flexmark.ext.autolink.AutolinkExtension;
+
+ public final class MarkdownFilter extends ChainableReaderFilter {
@Override
public String filter(String markdownSource) {
- PegDownProcessor processor = new PegDownProcessor(
- Extensions.ABBREVIATIONS | Extensions.AUTOLINKS |
- Extensions.FENCED_CODE_BLOCKS | Extensions.SMARTS
- );
+ MutableDataSet options = new MutableDataSet();
+ options.setFrom(ParserEmulationProfile.MARKDOWN);
+ options.set(Parser.EXTENSIONS, [ AbbreviationExtension.create(), AutolinkExtension.create() ]);
+ options.set(HtmlRenderer.RENDER_HEADER_ID, true);
+ options.set(HtmlRenderer.MAX_TRAILING_BLANK_LINES, 0);
+ Node parsed = Parser.builder(options).build().parse(markdownSource);
+
StringBuilder html = new StringBuilder('<html>\n<head>\n');
- // match the first heading in markdown and use as title:
- markdownSource.find(~/(?m)^#+\s*(.+)$/) {
- match, title -> html.append('<title>').append(FastEncoder.encode(title)).append('</title>\n');
+ CharSequence title = parsed.getFirstChildAny(Heading.class)?.getText();
+ if (title != null) {
+ html.append('<title>').append(Escaping.escapeHtml(title, false)).append('</title>\n');
}
html.append('<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">\n')
- .append('</head>\n<body>\n')
- .append(processor.markdownToHtml(markdownSource))
- .append('\n</body>\n</html>\n');
+ .append('</head>\n<body>\n');
+ HtmlRenderer.builder(options).build().render(parsed, html);
+ html.append('</body>\n</html>\n');
return html;
}
}
AntTypeDefinition t = new AntTypeDefinition();
- t.setName('pegdownfilter');
- t.setClass(PegDownFilter.class);
+ t.setName('markdownfilter');
+ t.setClass(MarkdownFilter.class);
ComponentHelper.getComponentHelper(project).addDataTypeDefinition(t);
]]></groovy>
- <property name="pegdown.loaded" value="true"/>
+ <property name="markdown.loaded" value="true"/>
</target>
- <!-- PEGDOWN macro: Before using depend on the target "resolve-pegdown" -->
+ <!-- markdown macro: Before using depend on the target "resolve-markdown" -->
- <macrodef name="pegdown">
+ <macrodef name="markdown">
<attribute name="todir"/>
<attribute name="flatten" default="false"/>
<attribute name="overwrite" default="false"/>
<element name="nested" optional="false" implicit="true"/>
<sequential>
<copy todir="@{todir}" flatten="@{flatten}" overwrite="@{overwrite}" verbose="true"
- preservelastmodified="false" encoding="UTF-8" taskname="pegdown"
+ preservelastmodified="false" encoding="UTF-8" taskname="markdown"
>
<filterchain>
<tokenfilter>
<filetokenizer/>
<replaceregex pattern="\b(LUCENE|SOLR)\-\d+\b" replace="[\0](https://issues.apache.org/jira/browse/\0)" flags="gs"/>
- <pegdownfilter/>
+ <markdownfilter/>
</tokenfilter>
</filterchain>
<nested/>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/707d7b91/solr/build.xml
----------------------------------------------------------------------
diff --git a/solr/build.xml b/solr/build.xml
index b176585..091e247 100644
--- a/solr/build.xml
+++ b/solr/build.xml
@@ -202,9 +202,6 @@
<replaceregex pattern="src\s*=\s*"images/" replace="src="/solr/assets/images/" flags="gs"/>
<!-- Redirect to the website's version-specific system requirements page -->
<replaceregex pattern="\(SYSTEM_REQUIREMENTS.html\)" replace="(/solr/api/SYSTEM_REQUIREMENTS.html)" flags="gs"/>
- <!-- Remove name anchors. Unlike pegdown, the website markdown processor automatically attaches id-s to headers.
- Exception: don't remove the "techproducts" anchor, because it has no following header. -->
- <replaceregex pattern="<a\s+name\s*=\s*"(?!techproducts)[^"]+"\s*>\s*</a>\s*" replace="" flags="gs"/>
</tokenfilter>
</filterchain>
</copy>
@@ -225,7 +222,7 @@
</copy>
</target>
- <target name="process-webpages" depends="define-lucene-javadoc-url,resolve-pegdown">
+ <target name="process-webpages" depends="define-lucene-javadoc-url,resolve-markdown">
<makeurl property="process-webpages.buildfiles" separator="|">
<fileset dir="." includes="core/build.xml,test-framework/build.xml,solrj/build.xml,contrib/**/build.xml"/>
</makeurl>
@@ -244,10 +241,10 @@
<param name="luceneJavadocUrl" expression="${lucene.javadoc.url}"/>
</xslt>
- <pegdown todir="${javadoc.dir}">
+ <markdown todir="${javadoc.dir}">
<fileset dir="site" includes="**/*.mdtext"/>
<globmapper from="*.mdtext" to="*.html"/>
- </pegdown>
+ </markdown>
<copy todir="${javadoc.dir}">
<fileset dir="site/assets" />
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/707d7b91/solr/site/quickstart.mdtext
----------------------------------------------------------------------
diff --git a/solr/site/quickstart.mdtext b/solr/site/quickstart.mdtext
index 8d2002b..3c82adb 100644
--- a/solr/site/quickstart.mdtext
+++ b/solr/site/quickstart.mdtext
@@ -263,7 +263,6 @@ Execute the following command to delete a specific document:
bin/post -c gettingstarted -d "<delete><id>SP2514N</id></delete>"
-<a name="searching"></a>
## Searching
Solr can be queried via REST clients, cURL, wget, Chrome POSTMAN, etc., as well as via the native clients available for
@@ -594,7 +593,6 @@ Here's a Unix script for convenient copying and pasting in order to run the key
bin/solr healthcheck -c gettingstarted
date
-<a name="cleanup"></a>
## Cleanup
As you work through this guide, you may want to stop Solr and reset the environment back to the starting point.
[09/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10225: fix
BlockCache evictions metric to not count explicit removal
Posted by is...@apache.org.
SOLR-10225: fix BlockCache evictions metric to not count explicit removal
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4990eed1
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4990eed1
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4990eed1
Branch: refs/heads/jira/solr-6736
Commit: 4990eed1b3aef3fb8bf98fa5427f12a96c029d03
Parents: fbc844d
Author: yonik <yo...@apache.org>
Authored: Fri Mar 3 12:39:33 2017 -0500
Committer: yonik <yo...@apache.org>
Committed: Fri Mar 3 12:39:33 2017 -0500
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +++
.../java/org/apache/solr/store/blockcache/BlockCache.java | 9 ++++++---
2 files changed, 9 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4990eed1/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index fa4d33b..2659155 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -196,6 +196,9 @@ Bug Fixes
* SOLR-10196: ElectionContext#runLeaderProcess can hit NPE on core close. (Mark Miller)
+* SOLR-10225: Fix HDFS BlockCache evictions metric to not count explicit removal
+ due to a directory close. (yonik)
+
Optimizations
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4990eed1/solr/core/src/java/org/apache/solr/store/blockcache/BlockCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/store/blockcache/BlockCache.java b/solr/core/src/java/org/apache/solr/store/blockcache/BlockCache.java
index ad5b2f4..9deef6c 100644
--- a/solr/core/src/java/org/apache/solr/store/blockcache/BlockCache.java
+++ b/solr/core/src/java/org/apache/solr/store/blockcache/BlockCache.java
@@ -23,6 +23,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
+import com.github.benmanes.caffeine.cache.RemovalCause;
import com.github.benmanes.caffeine.cache.RemovalListener;
/**
@@ -75,7 +76,7 @@ public class BlockCache {
lockCounters[i] = new AtomicInteger();
}
- RemovalListener<BlockCacheKey,BlockCacheLocation> listener = (blockCacheKey, blockCacheLocation, removalCause) -> releaseLocation(blockCacheKey, blockCacheLocation);
+ RemovalListener<BlockCacheKey,BlockCacheLocation> listener = (blockCacheKey, blockCacheLocation, removalCause) -> releaseLocation(blockCacheKey, blockCacheLocation, removalCause);
cache = Caffeine.newBuilder()
.removalListener(listener)
@@ -88,7 +89,7 @@ public class BlockCache {
cache.invalidate(key);
}
- private void releaseLocation(BlockCacheKey blockCacheKey, BlockCacheLocation location) {
+ private void releaseLocation(BlockCacheKey blockCacheKey, BlockCacheLocation location, RemovalCause removalCause) {
if (location == null) {
return;
}
@@ -103,7 +104,9 @@ public class BlockCache {
for (OnRelease onRelease : onReleases) {
onRelease.release(blockCacheKey);
}
- metrics.blockCacheEviction.incrementAndGet();
+ if (removalCause.wasEvicted()) {
+ metrics.blockCacheEviction.incrementAndGet();
+ }
metrics.blockCacheSize.decrementAndGet();
}
[14/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10227 Fix a test
failure.
Posted by is...@apache.org.
SOLR-10227 Fix a test failure.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/5ccc8e7a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/5ccc8e7a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/5ccc8e7a
Branch: refs/heads/jira/solr-6736
Commit: 5ccc8e7ae9419ec8077f6fa6c3a2972edb9e393d
Parents: b66d133
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Sat Mar 4 09:31:26 2017 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Sat Mar 4 09:31:26 2017 +0100
----------------------------------------------------------------------
.../src/test/org/apache/solr/metrics/SolrMetricManagerTest.java | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5ccc8e7a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
index 3813488..ee2acd3 100644
--- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
@@ -58,8 +58,8 @@ public class SolrMetricManagerTest extends SolrTestCaseJ4 {
Map<String, Counter> metrics1 = SolrMetricTestUtils.getRandomMetrics(r, true);
Map<String, Counter> metrics2 = SolrMetricTestUtils.getRandomMetrics(r, true);
- String fromName = TestUtil.randomSimpleString(r, 1, 10);
- String toName = TestUtil.randomSimpleString(r, 1, 10);
+ String fromName = "from-" + TestUtil.randomSimpleString(r, 1, 10);
+ String toName = "to-" + TestUtil.randomSimpleString(r, 1, 10);
// register test metrics
for (Map.Entry<String, Counter> entry : metrics1.entrySet()) {
metricManager.register(fromName, entry.getValue(), false, entry.getKey(), "metrics1");
[44/50] [abbrv] lucene-solr:jira/solr-6736: LUCENE-7700: Move
throughput control and merge aborting out of IndexWriter's core.
Posted by is...@apache.org.
LUCENE-7700: Move throughput control and merge aborting out of IndexWriter's core.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9540bc37
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9540bc37
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9540bc37
Branch: refs/heads/jira/solr-6736
Commit: 9540bc37583dfd4e995b893154039fcf031dc3c3
Parents: d2bf30d
Author: Dawid Weiss <dw...@apache.org>
Authored: Fri Mar 10 10:23:29 2017 +0100
Committer: Dawid Weiss <dw...@apache.org>
Committed: Fri Mar 10 10:23:29 2017 +0100
----------------------------------------------------------------------
lucene/CHANGES.txt | 6 +
.../lucene/index/ConcurrentMergeScheduler.java | 75 ++++++--
.../org/apache/lucene/index/IndexWriter.java | 94 ++++------
.../org/apache/lucene/index/MergePolicy.java | 184 +++++++++++++++++--
.../apache/lucene/index/MergeRateLimiter.java | 177 +++++++-----------
.../org/apache/lucene/index/MergeScheduler.java | 12 ++
.../apache/lucene/index/NoMergeScheduler.java | 7 +
.../lucene/index/TestMergeRateLimiter.java | 4 +-
8 files changed, 358 insertions(+), 201 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9540bc37/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 4040945..b6ee4b8 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -117,6 +117,12 @@ API Changes
instead of once all shard responses are present. (Simon Willnauer,
Mike McCandless)
+* LUCENE-7700: A cleanup of merge throughput control logic. Refactored all the
+ code previously scattered throughout the IndexWriter and
+ ConcurrentMergeScheduler into a more accessible set of public methods (see
+ MergePolicy.OneMergeProgress, MergeScheduler.wrapForMerge and
+ OneMerge.mergeInit). (Dawid Weiss, Mike McCandless).
+
* LUCENE-7734: FieldType's copy constructor was widened to accept any IndexableFieldType.
(David Smiley)
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9540bc37/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java b/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java
index 0dd0a4d..6e930c4 100644
--- a/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java
+++ b/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java
@@ -25,6 +25,11 @@ import java.util.Locale;
import org.apache.lucene.index.MergePolicy.OneMerge;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FilterDirectory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.RateLimitedIndexOutput;
+import org.apache.lucene.store.RateLimiter;
import org.apache.lucene.util.CollectionUtil;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.ThreadInterruptedException;
@@ -255,6 +260,36 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
assert false: "merge thread " + currentThread + " was not found";
}
+ @Override
+ public Directory wrapForMerge(OneMerge merge, Directory in) {
+ Thread mergeThread = Thread.currentThread();
+ if (!MergeThread.class.isInstance(mergeThread)) {
+ throw new AssertionError("wrapForMerge should be called from MergeThread. Current thread: "
+ + mergeThread);
+ }
+
+ // Return a wrapped Directory which has rate-limited output.
+ RateLimiter rateLimiter = ((MergeThread) mergeThread).rateLimiter;
+ return new FilterDirectory(in) {
+ @Override
+ public IndexOutput createOutput(String name, IOContext context) throws IOException {
+ ensureOpen();
+
+ // This Directory is only supposed to be used during merging,
+ // so all writes should have MERGE context, else there is a bug
+ // somewhere that is failing to pass down the right IOContext:
+ assert context.context == IOContext.Context.MERGE: "got context=" + context.context;
+
+ // Because rateLimiter is bound to a particular merge thread, this method should
+ // always be called from that context. Verify this.
+ assert mergeThread == Thread.currentThread() : "Not the same merge thread, current="
+ + Thread.currentThread() + ", expected=" + mergeThread;
+
+ return new RateLimitedIndexOutput(rateLimiter, in.createOutput(name, context));
+ }
+ };
+ }
+
/**
* Called whenever the running merges have changed, to set merge IO limits.
* This method sorts the merge threads by their merge size in
@@ -327,8 +362,9 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
newMBPerSec = targetMBPerSec;
}
- double curMBPerSec = merge.rateLimiter.getMBPerSec();
-
+ MergeRateLimiter rateLimiter = mergeThread.rateLimiter;
+ double curMBPerSec = rateLimiter.getMBPerSec();
+
if (verbose()) {
long mergeStartNS = merge.mergeStartNS;
if (mergeStartNS == -1) {
@@ -339,11 +375,11 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
message.append(String.format(Locale.ROOT, "merge thread %s estSize=%.1f MB (written=%.1f MB) runTime=%.1fs (stopped=%.1fs, paused=%.1fs) rate=%s\n",
mergeThread.getName(),
bytesToMB(merge.estimatedMergeBytes),
- bytesToMB(merge.rateLimiter.totalBytesWritten),
+ bytesToMB(rateLimiter.getTotalBytesWritten()),
nsToSec(now - mergeStartNS),
- nsToSec(merge.rateLimiter.getTotalStoppedNS()),
- nsToSec(merge.rateLimiter.getTotalPausedNS()),
- rateToString(merge.rateLimiter.getMBPerSec())));
+ nsToSec(rateLimiter.getTotalStoppedNS()),
+ nsToSec(rateLimiter.getTotalPausedNS()),
+ rateToString(rateLimiter.getMBPerSec())));
if (newMBPerSec != curMBPerSec) {
if (newMBPerSec == 0.0) {
@@ -364,7 +400,7 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
}
}
- merge.rateLimiter.setMBPerSec(newMBPerSec);
+ rateLimiter.setMBPerSec(newMBPerSec);
}
if (verbose()) {
message(message.toString());
@@ -449,7 +485,7 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
Thread currentThread = Thread.currentThread();
int count = 0;
for (MergeThread mergeThread : mergeThreads) {
- if (currentThread != mergeThread && mergeThread.isAlive() && mergeThread.merge.rateLimiter.getAbort() == false) {
+ if (currentThread != mergeThread && mergeThread.isAlive() && mergeThread.merge.isAborted() == false) {
count++;
}
}
@@ -497,8 +533,6 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
return;
}
- updateIOThrottle(merge);
-
boolean success = false;
try {
if (verbose()) {
@@ -507,14 +541,16 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
// OK to spawn a new merge thread to handle this
// merge:
- final MergeThread merger = getMergeThread(writer, merge);
- mergeThreads.add(merger);
+ final MergeThread newMergeThread = getMergeThread(writer, merge);
+ mergeThreads.add(newMergeThread);
+
+ updateIOThrottle(newMergeThread.merge, newMergeThread.rateLimiter);
if (verbose()) {
- message(" launch new thread [" + merger.getName() + "]");
+ message(" launch new thread [" + newMergeThread.getName() + "]");
}
- merger.start();
+ newMergeThread.start();
updateMergeThreads();
success = true;
@@ -598,16 +634,17 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
/** Runs a merge thread to execute a single merge, then exits. */
protected class MergeThread extends Thread implements Comparable<MergeThread> {
-
final IndexWriter writer;
final OneMerge merge;
+ final MergeRateLimiter rateLimiter;
/** Sole constructor. */
public MergeThread(IndexWriter writer, OneMerge merge) {
this.writer = writer;
this.merge = merge;
+ this.rateLimiter = new MergeRateLimiter(merge.getMergeProgress());
}
-
+
@Override
public int compareTo(MergeThread other) {
// Larger merges sort first:
@@ -616,9 +653,7 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
@Override
public void run() {
-
try {
-
if (verbose()) {
message(" merge thread: start");
}
@@ -715,7 +750,7 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
}
/** Tunes IO throttle when a new merge starts. */
- private synchronized void updateIOThrottle(OneMerge newMerge) throws IOException {
+ private synchronized void updateIOThrottle(OneMerge newMerge, MergeRateLimiter rateLimiter) throws IOException {
if (doAutoIOThrottle == false) {
return;
}
@@ -794,7 +829,7 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
} else {
rate = targetMBPerSec;
}
- newMerge.rateLimiter.setMBPerSec(rate);
+ rateLimiter.setMBPerSec(rate);
targetMBPerSecChanged();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9540bc37/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
index da030ca..aa28d99 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
@@ -36,6 +36,7 @@ import java.util.Queue;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
+import java.util.stream.Collectors;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.codecs.Codec;
@@ -51,22 +52,18 @@ import org.apache.lucene.search.Sort;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
-import org.apache.lucene.store.FilterDirectory;
import org.apache.lucene.store.FlushInfo;
import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.store.LockValidatingDirectoryWrapper;
import org.apache.lucene.store.MMapDirectory;
import org.apache.lucene.store.MergeInfo;
-import org.apache.lucene.store.RateLimitedIndexOutput;
import org.apache.lucene.store.TrackingDirectoryWrapper;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.CloseableThreadLocal;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.InfoStream;
@@ -277,7 +274,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
private final Directory directoryOrig; // original user directory
private final Directory directory; // wrapped with additional checks
- private final Directory mergeDirectory; // wrapped with throttling: used for merging
private final Analyzer analyzer; // how to analyze text
private final AtomicLong changeCount = new AtomicLong(); // increments every time a change is completed
@@ -353,8 +349,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
* card to make sure they can later charge you when you check out. */
final AtomicLong pendingNumDocs = new AtomicLong();
- final CloseableThreadLocal<MergeRateLimiter> rateLimiters = new CloseableThreadLocal<>();
-
DirectoryReader getReader() throws IOException {
return getReader(true, false);
}
@@ -809,10 +803,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
directoryOrig = d;
directory = new LockValidatingDirectoryWrapper(d, writeLock);
- // Directory we use for merging, so we can abort running merges, and so
- // merge schedulers can optionally rate-limit per-merge IO:
- mergeDirectory = addMergeRateLimiters(directory);
-
analyzer = config.getAnalyzer();
mergeScheduler = config.getMergeScheduler();
mergeScheduler.setInfoStream(infoStream);
@@ -2212,8 +2202,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
try {
abortMerges();
- rateLimiters.close();
-
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "rollback: done finish merges");
}
@@ -2418,7 +2406,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "now abort pending merge " + segString(merge.segments));
}
- merge.rateLimiter.setAbort();
+ merge.setAborted();
mergeFinish(merge);
}
pendingMerges.clear();
@@ -2427,7 +2415,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "now abort running merge " + segString(merge.segments));
}
- merge.rateLimiter.setAbort();
+ merge.setAborted();
}
// We wait here to make all merges stop. It should not
@@ -2775,13 +2763,17 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
* index.
*
* <p>
- * <b>NOTE:</b> this method merges all given {@link LeafReader}s in one
+ * <b>NOTE:</b> this merges all given {@link LeafReader}s in one
* merge. If you intend to merge a large number of readers, it may be better
* to call this method multiple times, each time with a small set of readers.
* In principle, if you use a merge policy with a {@code mergeFactor} or
* {@code maxMergeAtOnce} parameter, you should pass that many readers in one
* call.
*
+ * <p>
+ * <b>NOTE:</b> this method does not call or make use of the {@link MergeScheduler},
+ * so any custom bandwidth throttling is at the moment ignored.
+ *
* @return The <a href="#sequence_number">sequence number</a>
* for this operation
*
@@ -2832,8 +2824,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
SegmentMerger merger = new SegmentMerger(Arrays.asList(readers), info, infoStream, trackingDir,
globalFieldNumberMap,
context);
-
- rateLimiters.set(new MergeRateLimiter(null));
if (!merger.shouldMerge()) {
return docWriter.deleteQueue.getNextSequenceNumber();
@@ -2864,7 +2854,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
// Now create the compound file if needed
if (useCompoundFile) {
Collection<String> filesToDelete = infoPerCommit.files();
- TrackingDirectoryWrapper trackingCFSDir = new TrackingDirectoryWrapper(mergeDirectory);
+ TrackingDirectoryWrapper trackingCFSDir = new TrackingDirectoryWrapper(directory);
// TODO: unlike merge, on exception we arent sniping any trash cfs files here?
// createCompoundFile tries to cleanup, but it might not always be able to...
try {
@@ -3745,7 +3735,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
// deleter.refresh() call that will remove any index
// file that current segments does not reference), we
// abort this merge
- if (merge.rateLimiter.getAbort()) {
+ if (merge.isAborted()) {
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "commitMerge: skip: it was aborted");
}
@@ -3905,8 +3895,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
boolean success = false;
- rateLimiters.set(merge.rateLimiter);
-
final long t0 = System.currentTimeMillis();
final MergePolicy mergePolicy = config.getMergePolicy();
@@ -3937,7 +3925,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "hit exception during merge");
}
- } else if (merge.rateLimiter.getAbort() == false && (merge.maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS || (!closed && !closing))) {
+ } else if (!merge.isAborted() && (merge.maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS || (!closed && !closing))) {
// This merge (and, generally, any change to the
// segments) may now enable new merges, so we call
// merge policy & update pending merges.
@@ -3951,7 +3939,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
tragicEvent(t, "merge");
}
- if (merge.info != null && merge.rateLimiter.getAbort() == false) {
+ if (merge.info != null && merge.isAborted() == false) {
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "merge time " + (System.currentTimeMillis()-t0) + " msec for " + merge.info.info.maxDoc() + " docs");
}
@@ -3976,7 +3964,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
assert merge.segments.size() > 0;
if (stopMerges) {
- merge.rateLimiter.setAbort();
+ merge.setAborted();
throw new MergePolicy.MergeAbortedException("merge is aborted: " + segString(merge.segments));
}
@@ -4087,7 +4075,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
return;
}
- if (merge.rateLimiter.getAbort()) {
+ merge.mergeInit();
+
+ if (merge.isAborted()) {
return;
}
@@ -4239,9 +4229,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
* but without holding synchronized lock on IndexWriter
* instance */
private int mergeMiddle(MergePolicy.OneMerge merge, MergePolicy mergePolicy) throws IOException {
+ merge.checkAborted();
- merge.rateLimiter.checkAbort();
-
+ Directory mergeDirectory = config.getMergeScheduler().wrapForMerge(merge, directory);
List<SegmentCommitInfo> sourceSegments = merge.segments;
IOContext context = new IOContext(merge.getStoreMergeInfo());
@@ -4339,7 +4329,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
globalFieldNumberMap,
context);
- merge.rateLimiter.checkAbort();
+ merge.checkAborted();
merge.mergeStartNS = System.nanoTime();
@@ -4354,11 +4344,20 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
if (infoStream.isEnabled("IW")) {
if (merger.shouldMerge()) {
+ String pauseInfo = merge.getMergeProgress().getPauseTimes().entrySet()
+ .stream()
+ .filter((e) -> e.getValue() > 0)
+ .map((e) -> String.format(Locale.ROOT, "%.1f sec %s",
+ e.getValue() / 1000000000.,
+ e.getKey().name().toLowerCase(Locale.ROOT)))
+ .collect(Collectors.joining(", "));
+ if (!pauseInfo.isEmpty()) {
+ pauseInfo = " (" + pauseInfo + ")";
+ }
+
long t1 = System.nanoTime();
double sec = (t1-merge.mergeStartNS)/1000000000.;
double segmentMB = (merge.info.sizeInBytes()/1024./1024.);
- double stoppedSec = merge.rateLimiter.getTotalStoppedNS()/1000000000.;
- double throttleSec = merge.rateLimiter.getTotalPausedNS()/1000000000.;
infoStream.message("IW", "merge codec=" + codec + " maxDoc=" + merge.info.info.maxDoc() + "; merged segment has " +
(mergeState.mergeFieldInfos.hasVectors() ? "vectors" : "no vectors") + "; " +
(mergeState.mergeFieldInfos.hasNorms() ? "norms" : "no norms") + "; " +
@@ -4367,10 +4366,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
(mergeState.mergeFieldInfos.hasProx() ? "freqs" : "no freqs") + "; " +
(mergeState.mergeFieldInfos.hasPointValues() ? "points" : "no points") + "; " +
String.format(Locale.ROOT,
- "%.1f sec (%.1f sec stopped, %.1f sec paused) to merge segment [%.2f MB, %.2f MB/sec]",
+ "%.1f sec%s to merge segment [%.2f MB, %.2f MB/sec]",
sec,
- stoppedSec,
- throttleSec,
+ pauseInfo,
segmentMB,
segmentMB / sec));
} else {
@@ -4406,7 +4404,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
success = true;
} catch (Throwable t) {
synchronized(this) {
- if (merge.rateLimiter.getAbort()) {
+ if (merge.isAborted()) {
// This can happen if rollback is called while we were building
// our CFS -- fall through to logic below to remove the non-CFS
// merged files:
@@ -4439,7 +4437,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
// registered with IFD
deleteNewFiles(filesToRemove);
- if (merge.rateLimiter.getAbort()) {
+ if (merge.isAborted()) {
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "abort merge after building CFS");
}
@@ -5063,30 +5061,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
throw new IllegalArgumentException("number of documents in the index cannot exceed " + actualMaxDocs + " (current document count is " + pendingNumDocs.get() + "; added numDocs is " + addedNumDocs + ")");
}
- /** Wraps the incoming {@link Directory} so that we assign a per-thread
- * {@link MergeRateLimiter} to all created {@link IndexOutput}s. */
- private Directory addMergeRateLimiters(Directory in) {
- return new FilterDirectory(in) {
- @Override
- public IndexOutput createOutput(String name, IOContext context) throws IOException {
- ensureOpen();
-
- // Paranoia defense: if this trips we have a bug somewhere...
- IndexWriter.this.ensureOpen(false);
-
- // This Directory is only supposed to be used during merging,
- // so all writes should have MERGE context, else there is a bug
- // somewhere that is failing to pass down the right IOContext:
- assert context.context == IOContext.Context.MERGE: "got context=" + context.context;
-
- MergeRateLimiter rateLimiter = rateLimiters.get();
- assert rateLimiter != null;
-
- return new RateLimitedIndexOutput(rateLimiter, in.createOutput(name, context));
- }
- };
- }
-
/** Returns the highest <a href="#sequence_number">sequence number</a> across
* all completed operations, or 0 if no operations have finished yet. Still
* in-flight operations (in other threads) are not counted until they finish.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9540bc37/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
index dbf37df..d9a0ab8 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
@@ -19,12 +19,19 @@ package org.apache.lucene.index;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.EnumMap;
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.function.BooleanSupplier;
+import java.util.stream.Collectors;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MergeInfo;
-import org.apache.lucene.store.RateLimiter;
/**
* <p>Expert: a MergePolicy determines the sequence of
@@ -55,6 +62,125 @@ import org.apache.lucene.store.RateLimiter;
* @lucene.experimental
*/
public abstract class MergePolicy {
+ /**
+ * Progress and state for an executing merge. This class
+ * encapsulates the logic to pause and resume the merge thread
+ * or to abort the merge entirely.
+ *
+ * @lucene.experimental */
+ public static class OneMergeProgress {
+ /** Reason for pausing the merge thread. */
+ public static enum PauseReason {
+ /** Stopped (because of throughput rate set to 0, typically). */
+ STOPPED,
+ /** Temporarily paused because of exceeded throughput rate. */
+ PAUSED,
+ /** Other reason. */
+ OTHER
+ };
+
+ private final ReentrantLock pauseLock = new ReentrantLock();
+ private final Condition pausing = pauseLock.newCondition();
+
+ /**
+ * Pause times (in nanoseconds) for each {@link PauseReason}.
+ */
+ private final EnumMap<PauseReason, AtomicLong> pauseTimesNS;
+
+ private volatile boolean aborted;
+
+ /**
+ * This field is for sanity-check purposes only. Only the same thread that invoked
+ * {@link OneMerge#mergeInit()} is permitted to be calling
+ * {@link #pauseNanos}. This is always verified at runtime.
+ */
+ private Thread owner;
+
+ /** Creates a new merge progress info. */
+ public OneMergeProgress() {
+ // Place all the pause reasons in there immediately so that we can simply update values.
+ pauseTimesNS = new EnumMap<PauseReason,AtomicLong>(PauseReason.class);
+ for (PauseReason p : PauseReason.values()) {
+ pauseTimesNS.put(p, new AtomicLong());
+ }
+ }
+
+ /**
+ * Abort the merge this progress tracks at the next
+ * possible moment.
+ */
+ public void abort() {
+ aborted = true;
+ wakeup(); // wakeup any paused merge thread.
+ }
+
+ /**
+ * Return the aborted state of this merge.
+ */
+ public boolean isAborted() {
+ return aborted;
+ }
+
+ /**
+ * Pauses the calling thread for at least <code>pauseNanos</code> nanoseconds
+ * unless the merge is aborted or the external condition returns <code>false</code>,
+ * in which case control returns immediately.
+ *
+ * The external condition is required so that other threads can terminate the pausing immediately,
+ * before <code>pauseNanos</code> expires. We can't rely on just {@link Condition#awaitNanos(long)} alone
+ * because it can return due to spurious wakeups too.
+ *
+ * @param condition The pause condition that should return false if immediate return from this
+ * method is needed. Other threads can wake up any sleeping thread by calling
+ * {@link #wakeup}, but it'd fall to sleep for the remainder of the requested time if this
+ * condition
+ */
+ public void pauseNanos(long pauseNanos, PauseReason reason, BooleanSupplier condition) throws InterruptedException {
+ if (Thread.currentThread() != owner) {
+ throw new RuntimeException("Only the merge owner thread can call pauseNanos(). This thread: "
+ + Thread.currentThread().getName() + ", owner thread: "
+ + owner);
+ }
+
+ long start = System.nanoTime();
+ AtomicLong timeUpdate = pauseTimesNS.get(reason);
+ pauseLock.lock();
+ try {
+ while (pauseNanos > 0 && !aborted && condition.getAsBoolean()) {
+ pauseNanos = pausing.awaitNanos(pauseNanos);
+ }
+ } finally {
+ pauseLock.unlock();
+ timeUpdate.addAndGet(System.nanoTime() - start);
+ }
+ }
+
+ /**
+ * Request a wakeup for any threads stalled in {@link #pauseNanos}.
+ */
+ public void wakeup() {
+ pauseLock.lock();
+ try {
+ pausing.signalAll();
+ } finally {
+ pauseLock.unlock();
+ }
+ }
+
+ /** Returns pause reasons and associated times in nanoseconds. */
+ public Map<PauseReason,Long> getPauseTimes() {
+ Set<Entry<PauseReason,AtomicLong>> entries = pauseTimesNS.entrySet();
+ return entries.stream()
+ .collect(Collectors.toMap(
+ (e) -> e.getKey(),
+ (e) -> e.getValue().get()));
+ }
+
+ final void setMergeThread(Thread owner) {
+ assert this.owner == null;
+ this.owner = owner;
+ }
+ }
/** OneMerge provides the information necessary to perform
* an individual primitive merge operation, resulting in
@@ -64,7 +190,6 @@ public abstract class MergePolicy {
*
* @lucene.experimental */
public static class OneMerge {
-
SegmentCommitInfo info; // used by IndexWriter
boolean registerDone; // used by IndexWriter
long mergeGen; // used by IndexWriter
@@ -82,8 +207,10 @@ public abstract class MergePolicy {
/** Segments to be merged. */
public final List<SegmentCommitInfo> segments;
- /** A private {@link RateLimiter} for this merge, used to rate limit writes and abort. */
- public final MergeRateLimiter rateLimiter;
+ /**
+ * Control used to pause/stop/resume the merge thread.
+ */
+ private final OneMergeProgress mergeProgress;
volatile long mergeStartNS = -1;
@@ -106,9 +233,17 @@ public abstract class MergePolicy {
}
totalMaxDoc = count;
- rateLimiter = new MergeRateLimiter(this);
+ mergeProgress = new OneMergeProgress();
}
+ /**
+ * Called by {@link IndexWriter} after the merge started and from the
+ * thread that will be executing the merge.
+ */
+ public void mergeInit() throws IOException {
+ mergeProgress.setMergeThread(Thread.currentThread());
+ }
+
/** Called by {@link IndexWriter} after the merge is done and all readers have been closed. */
public void mergeFinished() throws IOException {
}
@@ -163,7 +298,7 @@ public abstract class MergePolicy {
if (maxNumSegments != -1) {
b.append(" [maxNumSegments=" + maxNumSegments + "]");
}
- if (rateLimiter.getAbort()) {
+ if (isAborted()) {
b.append(" [ABORTED]");
}
return b.toString();
@@ -194,7 +329,32 @@ public abstract class MergePolicy {
/** Return {@link MergeInfo} describing this merge. */
public MergeInfo getStoreMergeInfo() {
return new MergeInfo(totalMaxDoc, estimatedMergeBytes, isExternal, maxNumSegments);
- }
+ }
+
+ /** Returns true if this merge was or should be aborted. */
+ public boolean isAborted() {
+ return mergeProgress.isAborted();
+ }
+
+ /** Marks this merge as aborted. The merge thread should terminate at the soonest possible moment. */
+ public void setAborted() {
+ this.mergeProgress.abort();
+ }
+
+ /** Checks if merge has been aborted and throws a merge exception if so. */
+ public void checkAborted() throws MergeAbortedException {
+ if (isAborted()) {
+ throw new MergePolicy.MergeAbortedException("merge is aborted: " + segString());
+ }
+ }
+
+ /**
+ * Returns a {@link OneMergeProgress} instance for this merge, which provides
+ * statistics of the merge threads (run time vs. sleep time) if merging is throttled.
+ */
+ public OneMergeProgress getMergeProgress() {
+ return mergeProgress;
+ }
}
/**
@@ -222,8 +382,7 @@ public abstract class MergePolicy {
merges.add(merge);
}
- /** Returns a description of the merges in this
- * specification. */
+ /** Returns a description of the merges in this specification. */
public String segString(Directory dir) {
StringBuilder b = new StringBuilder();
b.append("MergeSpec:\n");
@@ -235,8 +394,7 @@ public abstract class MergePolicy {
}
}
- /** Exception thrown if there are any problems while
- * executing a merge. */
+ /** Exception thrown if there are any problems while executing a merge. */
public static class MergeException extends RuntimeException {
private Directory dir;
@@ -259,9 +417,9 @@ public abstract class MergePolicy {
}
}
- /** Thrown when a merge was explicity aborted because
+ /** Thrown when a merge was explicitly aborted because
* {@link IndexWriter#abortMerges} was called. Normally
- * this exception is privately caught and suppresed by
+ * this exception is privately caught and suppressed by
* {@link IndexWriter}. */
public static class MergeAbortedException extends IOException {
/** Create a {@link MergeAbortedException}. */
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9540bc37/lucene/core/src/java/org/apache/lucene/index/MergeRateLimiter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergeRateLimiter.java b/lucene/core/src/java/org/apache/lucene/index/MergeRateLimiter.java
index d04c2d2..e5361d5 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MergeRateLimiter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MergeRateLimiter.java
@@ -20,118 +20,107 @@ package org.apache.lucene.index;
import org.apache.lucene.store.RateLimiter;
import org.apache.lucene.util.ThreadInterruptedException;
-import static org.apache.lucene.store.RateLimiter.SimpleRateLimiter;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.lucene.index.MergePolicy.OneMergeProgress;
+import org.apache.lucene.index.MergePolicy.OneMergeProgress.PauseReason;
/** This is the {@link RateLimiter} that {@link IndexWriter} assigns to each running merge, to
* give {@link MergeScheduler}s ionice like control.
*
- * This is similar to {@link SimpleRateLimiter}, except it's merge-private,
- * it will wake up if its rate changes while it's paused, it tracks how
- * much time it spent stopped and paused, and it supports aborting.
- *
* @lucene.internal */
public class MergeRateLimiter extends RateLimiter {
private final static int MIN_PAUSE_CHECK_MSEC = 25;
- volatile long totalBytesWritten;
+
+ private final static long MIN_PAUSE_NS = TimeUnit.MILLISECONDS.toNanos(2);
+ private final static long MAX_PAUSE_NS = TimeUnit.MILLISECONDS.toNanos(250);
+
+ private volatile double mbPerSec;
+ private volatile long minPauseCheckBytes;
- double mbPerSec;
private long lastNS;
- private long minPauseCheckBytes;
- private boolean abort;
- long totalPausedNS;
- long totalStoppedNS;
- final MergePolicy.OneMerge merge;
- /** Returned by {@link #maybePause}. */
- private static enum PauseResult {NO, STOPPED, PAUSED};
+ private AtomicLong totalBytesWritten = new AtomicLong();
- /** Sole constructor. */
- public MergeRateLimiter(MergePolicy.OneMerge merge) {
- this.merge = merge;
+ private final OneMergeProgress mergeProgress;
+ /** Sole constructor. */
+ public MergeRateLimiter(OneMergeProgress mergeProgress) {
// Initially no IO limit; use setter here so minPauseCheckBytes is set:
+ this.mergeProgress = mergeProgress;
setMBPerSec(Double.POSITIVE_INFINITY);
}
@Override
- public synchronized void setMBPerSec(double mbPerSec) {
- // 0.0 is allowed: it means the merge is paused
- if (mbPerSec < 0.0) {
- throw new IllegalArgumentException("mbPerSec must be positive; got: " + mbPerSec);
+ public void setMBPerSec(double mbPerSec) {
+ // Synchronized to make updates to mbPerSec and minPauseCheckBytes atomic.
+ synchronized (this) {
+ // 0.0 is allowed: it means the merge is paused
+ if (mbPerSec < 0.0) {
+ throw new IllegalArgumentException("mbPerSec must be positive; got: " + mbPerSec);
+ }
+ this.mbPerSec = mbPerSec;
+
+ // NOTE: Double.POSITIVE_INFINITY casts to Long.MAX_VALUE
+ this.minPauseCheckBytes = Math.min(1024*1024, (long) ((MIN_PAUSE_CHECK_MSEC / 1000.0) * mbPerSec * 1024 * 1024));
+ assert minPauseCheckBytes >= 0;
}
- this.mbPerSec = mbPerSec;
- // NOTE: Double.POSITIVE_INFINITY casts to Long.MAX_VALUE
- minPauseCheckBytes = Math.min(1024*1024, (long) ((MIN_PAUSE_CHECK_MSEC / 1000.0) * mbPerSec * 1024 * 1024));
- assert minPauseCheckBytes >= 0;
- notify();
+
+ mergeProgress.wakeup();
}
@Override
- public synchronized double getMBPerSec() {
+ public double getMBPerSec() {
return mbPerSec;
}
/** Returns total bytes written by this merge. */
public long getTotalBytesWritten() {
- return totalBytesWritten;
+ return totalBytesWritten.get();
}
@Override
public long pause(long bytes) throws MergePolicy.MergeAbortedException {
+ totalBytesWritten.addAndGet(bytes);
- totalBytesWritten += bytes;
-
- long startNS = System.nanoTime();
- long curNS = startNS;
-
- // While loop because 1) Thread.wait doesn't always sleep long
- // enough, and 2) we wake up and check again when our rate limit
+ // While loop because we may wake up and check again when our rate limit
// is changed while we were pausing:
- long pausedNS = 0;
- while (true) {
- PauseResult result = maybePause(bytes, curNS);
- if (result == PauseResult.NO) {
- // Set to curNS, not targetNS, to enforce the instant rate, not
- // the "averaaged over all history" rate:
- lastNS = curNS;
- break;
- }
- curNS = System.nanoTime();
- long ns = curNS - startNS;
- startNS = curNS;
-
- // Separately track when merge was stopped vs rate limited:
- if (result == PauseResult.STOPPED) {
- totalStoppedNS += ns;
- } else {
- assert result == PauseResult.PAUSED;
- totalPausedNS += ns;
- }
- pausedNS += ns;
+ long paused = 0;
+ long delta;
+ while ((delta = maybePause(bytes, System.nanoTime())) >= 0) {
+ // Keep waiting.
+ paused += delta;
}
- return pausedNS;
+ return paused;
}
/** Total NS merge was stopped. */
- public synchronized long getTotalStoppedNS() {
- return totalStoppedNS;
+ public long getTotalStoppedNS() {
+ return mergeProgress.getPauseTimes().get(PauseReason.STOPPED);
}
/** Total NS merge was paused to rate limit IO. */
- public synchronized long getTotalPausedNS() {
- return totalPausedNS;
+ public long getTotalPausedNS() {
+ return mergeProgress.getPauseTimes().get(PauseReason.PAUSED);
}
- /** Returns NO if no pause happened, STOPPED if pause because rate was 0.0 (merge is stopped), PAUSED if paused with a normal rate limit. */
- private synchronized PauseResult maybePause(long bytes, long curNS) throws MergePolicy.MergeAbortedException {
-
+ /**
+ * Returns the number of nanoseconds spent in a paused state or <code>-1</code>
+ * if no pause was applied. If the thread needs pausing, this method delegates
+ * to the linked {@link OneMergeProgress}.
+ */
+ private long maybePause(long bytes, long curNS) throws MergePolicy.MergeAbortedException {
// Now is a good time to abort the merge:
- checkAbort();
+ if (mergeProgress.isAborted()) {
+ throw new MergePolicy.MergeAbortedException("Merge aborted.");
+ }
- double secondsToPause = (bytes/1024./1024.) / mbPerSec;
+ double rate = mbPerSec; // read from volatile rate once.
+ double secondsToPause = (bytes/1024./1024.) / rate;
// Time we should sleep until; this is purely instantaneous
// rate (just adds seconds onto the last time we had paused to);
@@ -140,54 +129,30 @@ public class MergeRateLimiter extends RateLimiter {
long curPauseNS = targetNS - curNS;
- // NOTE: except maybe on real-time JVMs, minimum realistic
- // wait/sleep time is 1 msec; if you pass just 1 nsec the impl
- // rounds up to 1 msec, so we don't bother unless it's > 2 msec:
-
- if (curPauseNS <= 2000000) {
- return PauseResult.NO;
+ // We don't bother with thread pausing if the pause is smaller than 2 msec.
+ if (curPauseNS <= MIN_PAUSE_NS) {
+ // Set to curNS, not targetNS, to enforce the instant rate, not
+ // the "averaged over all history" rate:
+ lastNS = curNS;
+ return -1;
}
- // Defensive: sleep for at most 250 msec; the loop above will call us again if we should keep sleeping:
- if (curPauseNS > 250L*1000000) {
- curPauseNS = 250L*1000000;
+ // Defensive: don't sleep for too long; the loop above will call us again if
+ // we should keep sleeping and the rate may be adjusted in between.
+ if (curPauseNS > MAX_PAUSE_NS) {
+ curPauseNS = MAX_PAUSE_NS;
}
- int sleepMS = (int) (curPauseNS / 1000000);
- int sleepNS = (int) (curPauseNS % 1000000);
-
- double rate = mbPerSec;
-
+ long start = System.nanoTime();
try {
- // CMS can wake us up here if it changes our target rate:
- wait(sleepMS, sleepNS);
+ mergeProgress.pauseNanos(
+ curPauseNS,
+ rate == 0.0 ? PauseReason.STOPPED : PauseReason.PAUSED,
+ () -> rate == mbPerSec);
} catch (InterruptedException ie) {
throw new ThreadInterruptedException(ie);
}
-
- if (rate == 0.0) {
- return PauseResult.STOPPED;
- } else {
- return PauseResult.PAUSED;
- }
- }
-
- /** Throws {@link MergePolicy.MergeAbortedException} if this merge was aborted. */
- public synchronized void checkAbort() throws MergePolicy.MergeAbortedException {
- if (abort) {
- throw new MergePolicy.MergeAbortedException("merge is aborted: " + merge.segString());
- }
- }
-
- /** Mark this merge aborted. */
- public synchronized void setAbort() {
- abort = true;
- notify();
- }
-
- /** Returns true if this merge was aborted. */
- public synchronized boolean getAbort() {
- return abort;
+ return System.nanoTime() - start;
}
@Override
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9540bc37/lucene/core/src/java/org/apache/lucene/index/MergeScheduler.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergeScheduler.java b/lucene/core/src/java/org/apache/lucene/index/MergeScheduler.java
index 65af45b..66d0870 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MergeScheduler.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MergeScheduler.java
@@ -20,6 +20,9 @@ package org.apache.lucene.index;
import java.io.Closeable;
import java.io.IOException;
+import org.apache.lucene.index.MergePolicy.OneMerge;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RateLimitedIndexOutput;
import org.apache.lucene.util.InfoStream;
/** <p>Expert: {@link IndexWriter} uses an instance
@@ -42,6 +45,15 @@ public abstract class MergeScheduler implements Closeable {
* */
public abstract void merge(IndexWriter writer, MergeTrigger trigger, boolean newMergesFound) throws IOException;
+ /**
+ * Wraps the incoming {@link Directory} so that we can merge-throttle it
+ * using {@link RateLimitedIndexOutput}.
+ */
+ public Directory wrapForMerge(OneMerge merge, Directory in) {
+ // A no-op by default.
+ return in;
+ }
+
/** Close this MergeScheduler. */
@Override
public abstract void close() throws IOException;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9540bc37/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java b/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java
index 1630653..e4c0136 100644
--- a/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java
+++ b/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java
@@ -16,6 +16,8 @@
*/
package org.apache.lucene.index;
+import org.apache.lucene.index.MergePolicy.OneMerge;
+import org.apache.lucene.store.Directory;
/**
* A {@link MergeScheduler} which never executes any merges. It is also a
@@ -41,6 +43,11 @@ public final class NoMergeScheduler extends MergeScheduler {
@Override
public void merge(IndexWriter writer, MergeTrigger trigger, boolean newMergesFound) {}
+
+ @Override
+ public Directory wrapForMerge(OneMerge merge, Directory in) {
+ return in;
+ }
@Override
public MergeScheduler clone() {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9540bc37/lucene/core/src/test/org/apache/lucene/index/TestMergeRateLimiter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestMergeRateLimiter.java b/lucene/core/src/test/org/apache/lucene/index/TestMergeRateLimiter.java
index ef922bb..723cfbc 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestMergeRateLimiter.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestMergeRateLimiter.java
@@ -27,8 +27,8 @@ public class TestMergeRateLimiter extends LuceneTestCase {
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
w.addDocument(new Document());
w.close();
- MergePolicy.OneMerge merge = new MergePolicy.OneMerge(SegmentInfos.readLatestCommit(dir).asList());
- MergeRateLimiter rateLimiter = new MergeRateLimiter(merge);
+
+ MergeRateLimiter rateLimiter = new MergeRateLimiter(new MergePolicy.OneMergeProgress());
assertEquals(Double.POSITIVE_INFINITY, rateLimiter.getMBPerSec(), 0.0);
assertTrue(rateLimiter.getMinPauseCheckBytes() > 0);
dir.close();
[31/50] [abbrv] lucene-solr:jira/solr-6736: LUCENE-7718:
buildAndPushRelease.py script should refer to working tree instead of
directory
Posted by is...@apache.org.
LUCENE-7718: buildAndPushRelease.py script should refer to working tree instead of directory
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a6e14ec6
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a6e14ec6
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a6e14ec6
Branch: refs/heads/jira/solr-6736
Commit: a6e14ec6d2d176f8363efc46b0685fda9a0942b2
Parents: 3a99339
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Tue Feb 28 21:22:17 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Wed Mar 8 01:49:13 2017 +0530
----------------------------------------------------------------------
dev-tools/scripts/buildAndPushRelease.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a6e14ec6/dev-tools/scripts/buildAndPushRelease.py
----------------------------------------------------------------------
diff --git a/dev-tools/scripts/buildAndPushRelease.py b/dev-tools/scripts/buildAndPushRelease.py
index e34c943..d742214 100644
--- a/dev-tools/scripts/buildAndPushRelease.py
+++ b/dev-tools/scripts/buildAndPushRelease.py
@@ -59,7 +59,7 @@ def runAndSendGPGPassword(command, password):
def getGitRev():
status = os.popen('git status').read().strip()
- if 'nothing to commit, working directory clean' not in status:
+ if 'nothing to commit, working directory clean' not in status and 'nothing to commit, working tree clean' not in status:
raise RuntimeError('git clone is dirty:\n\n%s' % status)
branch = os.popen('git rev-parse --abbrev-ref HEAD').read().strip()
command = 'git log origin/%s..' % branch
[25/50] [abbrv] lucene-solr:jira/solr-6736: LUCENE-7712:
SimpleQueryParser now parses foo~ as foo~2
Posted by is...@apache.org.
LUCENE-7712: SimpleQueryParser now parses foo~ as foo~2
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/21559fe8
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/21559fe8
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/21559fe8
Branch: refs/heads/jira/solr-6736
Commit: 21559fe86da5e84c75c25b8373f6c78f1ac75a8f
Parents: 7af6cc9
Author: Mike McCandless <mi...@apache.org>
Authored: Tue Mar 7 06:55:47 2017 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Tue Mar 7 06:55:47 2017 -0500
----------------------------------------------------------------------
lucene/CHANGES.txt | 4 ++++
.../apache/lucene/queryparser/simple/SimpleQueryParser.java | 8 +++++++-
.../lucene/queryparser/simple/TestSimpleQueryParser.java | 2 +-
3 files changed, 12 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/21559fe8/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index a339760..b067fde 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -150,6 +150,10 @@ New Features
efficiently filter out duplicate suggestions (Uwe Schindler, Mike
McCandless)
+* LUCENE-7712: SimpleQueryParser now supports default fuzziness
+ syntax, mapping foo~ to a FuzzyQuery with edit distance 2. (Lee
+ Hinman, David Pilato via Mike McCandless)
+
Bug Fixes
* LUCENE-7630: Fix (Edge)NGramTokenFilter to no longer drop payloads
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/21559fe8/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java
index 6e05aec..2a7f7ea 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java
@@ -498,7 +498,13 @@ public class SimpleQueryParser extends QueryBuilder {
}
int fuzziness = 0;
try {
- fuzziness = Integer.parseInt(new String(slopText, 0, slopLength));
+ String fuzzyString = new String(slopText, 0, slopLength);
+ if ("".equals(fuzzyString)) {
+ // Use automatic fuzziness, ~2
+ fuzziness = 2;
+ } else {
+ fuzziness = Integer.parseInt(fuzzyString);
+ }
} catch (NumberFormatException e) {
// swallow number format exceptions parsing fuzziness
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/21559fe8/lucene/queryparser/src/test/org/apache/lucene/queryparser/simple/TestSimpleQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/simple/TestSimpleQueryParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/simple/TestSimpleQueryParser.java
index d70a22c..169caa2 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/simple/TestSimpleQueryParser.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/simple/TestSimpleQueryParser.java
@@ -88,7 +88,7 @@ public class TestSimpleQueryParser extends LuceneTestCase {
Query expected = new FuzzyQuery(new Term("field", "foobar"), 2);
assertEquals(expected, parse("foobar~2"));
- assertEquals(regular, parse("foobar~"));
+ assertEquals(expected, parse("foobar~"));
assertEquals(regular, parse("foobar~a"));
assertEquals(regular, parse("foobar~1a"));
[42/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10257: Add logarithm
StreamEvaluator
Posted by is...@apache.org.
SOLR-10257: Add logarithm StreamEvaluator
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d945a246
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d945a246
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d945a246
Branch: refs/heads/jira/solr-6736
Commit: d945a246f6071699790119f07a66fb4c5505cee2
Parents: c680f45
Author: Joel Bernstein <jb...@apache.org>
Authored: Thu Mar 9 20:48:14 2017 -0500
Committer: Joel Bernstein <jb...@apache.org>
Committed: Thu Mar 9 21:01:24 2017 -0500
----------------------------------------------------------------------
.../org/apache/solr/handler/StreamHandler.java | 3 +-
.../solrj/io/eval/NaturalLogEvaluator.java | 60 ++++++++++++
.../io/stream/eval/NaturalLogEvaluatorTest.java | 98 ++++++++++++++++++++
3 files changed, 160 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d945a246/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index 06e59b6..e69f52b 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -42,6 +42,7 @@ import org.apache.solr.client.solrj.io.eval.IfThenElseEvaluator;
import org.apache.solr.client.solrj.io.eval.LessThanEqualToEvaluator;
import org.apache.solr.client.solrj.io.eval.LessThanEvaluator;
import org.apache.solr.client.solrj.io.eval.MultiplyEvaluator;
+import org.apache.solr.client.solrj.io.eval.NaturalLogEvaluator;
import org.apache.solr.client.solrj.io.eval.NotEvaluator;
import org.apache.solr.client.solrj.io.eval.OrEvaluator;
import org.apache.solr.client.solrj.io.eval.RawValueEvaluator;
@@ -197,7 +198,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
.withFunctionName("div", DivideEvaluator.class)
.withFunctionName("mult", MultiplyEvaluator.class)
.withFunctionName("sub", SubtractEvaluator.class)
-
+ .withFunctionName("log", NaturalLogEvaluator.class)
// Conditional Stream Evaluators
.withFunctionName("if", IfThenElseEvaluator.class)
;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d945a246/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NaturalLogEvaluator.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NaturalLogEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NaturalLogEvaluator.java
new file mode 100644
index 0000000..19709e6
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NaturalLogEvaluator.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ *
+ */
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.util.List;
+import java.util.Locale;
+
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+public class NaturalLogEvaluator extends NumberEvaluator {
+ protected static final long serialVersionUID = 1L;
+
+ public NaturalLogEvaluator(StreamExpression expression, StreamFactory factory) throws IOException{
+ super(expression, factory);
+
+ if(1 != subEvaluators.size()){
+ throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting one value but found %d",expression,subEvaluators.size()));
+ }
+ }
+
+ @Override
+ public Number evaluate(Tuple tuple) throws IOException {
+
+ List<BigDecimal> results = evaluateAll(tuple);
+
+ // we're still doing these checks because if we ever add an array-flatten evaluator,
+ // one found in the constructor could become != 1
+ if(1 != results.size()){
+ throw new IOException(String.format(Locale.ROOT,"%s(...) only works with a 1 value but %d were provided", constructingFactory.getFunctionName(getClass()), results.size()));
+ }
+
+ if(null == results.get(0)){
+ return null;
+ }
+
+ return Math.log(results.get(0).doubleValue());
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d945a246/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/NaturalLogEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/NaturalLogEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/NaturalLogEvaluatorTest.java
new file mode 100644
index 0000000..c4ae127
--- /dev/null
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/NaturalLogEvaluatorTest.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.stream.eval;
+
+import java.io.IOException;
+import java.util.Map;
+
+import junit.framework.Assert;
+
+import org.apache.commons.collections.map.HashedMap;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.eval.AddEvaluator;
+import org.apache.solr.client.solrj.io.eval.NaturalLogEvaluator;
+import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+import org.junit.Test;
+
+public class NaturalLogEvaluatorTest extends LuceneTestCase {
+
+ StreamFactory factory;
+ Map<String, Object> values;
+
+ public NaturalLogEvaluatorTest() {
+ super();
+
+ factory = new StreamFactory()
+ .withFunctionName("log", NaturalLogEvaluator.class).withFunctionName("add", AddEvaluator.class);
+ values = new HashedMap();
+ }
+
+ @Test
+ public void logOneField() throws Exception{
+ StreamEvaluator evaluator = factory.constructEvaluator("log(a)");
+ Object result;
+
+ values.clear();
+ values.put("a", 100);
+ result = evaluator.evaluate(new Tuple(values));
+ Assert.assertTrue(result instanceof Double);
+ Assert.assertTrue(result.equals(Math.log(100)));
+
+ }
+
+ @Test
+ public void logNestedField() throws Exception{
+ StreamEvaluator evaluator = factory.constructEvaluator("log(add(50,50))");
+ Object result;
+
+ values.clear();
+ result = evaluator.evaluate(new Tuple(values));
+ Assert.assertTrue(result instanceof Double);
+ Assert.assertTrue(result.equals(Math.log(100)));
+
+ }
+
+ @Test(expected = IOException.class)
+ public void logNoField() throws Exception{
+ factory.constructEvaluator("log()");
+ }
+
+ @Test(expected = IOException.class)
+ public void logTwoFields() throws Exception{
+ factory.constructEvaluator("log(a,b)");
+ }
+
+ @Test
+ public void logNoValue() throws Exception{
+ StreamEvaluator evaluator = factory.constructEvaluator("log(a)");
+
+ values.clear();
+ Object result = evaluator.evaluate(new Tuple(values));
+ assertNull(result);
+ }
+ @Test
+ public void logNullValue() throws Exception{
+ StreamEvaluator evaluator = factory.constructEvaluator("log(a)");
+
+ values.clear();
+ values.put("a", null);
+ Object result = evaluator.evaluate(new Tuple(values));
+ assertNull(result);
+ }
+}
[49/50] [abbrv] lucene-solr:jira/solr-6736: Merge branch 'master'
into jira/solr-6736
Posted by is...@apache.org.
Merge branch 'master' into jira/solr-6736
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/405da79c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/405da79c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/405da79c
Branch: refs/heads/jira/solr-6736
Commit: 405da79c8dfbff846cd9d7d51f21656e27a105a0
Parents: 10c2beb a3f4896
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Sun Mar 12 05:40:50 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Sun Mar 12 05:40:50 2017 +0530
----------------------------------------------------------------------
dev-tools/scripts/smokeTestRelease.py | 4 +
lucene/CHANGES.txt | 12 +
.../org/apache/lucene/document/FieldType.java | 6 +-
.../lucene/index/ConcurrentMergeScheduler.java | 75 ++++--
.../org/apache/lucene/index/IndexWriter.java | 94 +++-----
.../org/apache/lucene/index/MergePolicy.java | 184 +++++++++++++--
.../apache/lucene/index/MergeRateLimiter.java | 177 ++++++--------
.../org/apache/lucene/index/MergeScheduler.java | 12 +
.../apache/lucene/index/NoMergeScheduler.java | 7 +
.../org/apache/lucene/search/BooleanQuery.java | 1 -
.../lucene/index/TestMergeRateLimiter.java | 4 +-
solr/CHANGES.txt | 2 +
.../java/org/apache/solr/api/V2HttpCall.java | 2 +-
.../java/org/apache/solr/core/PluginBag.java | 2 +-
.../org/apache/solr/handler/StreamHandler.java | 30 ++-
.../apache/solr/servlet/SolrDispatchFilter.java | 2 +-
.../conf/solrconfig-managed-schema.xml | 2 +-
.../org/apache/solr/cloud/rule/RulesTest.java | 2 +-
.../apache/solr/core/TestDynamicLoading.java | 2 +-
.../apache/solr/core/TestSolrConfigHandler.java | 14 +-
.../apache/solr/handler/TestReqParamsAPI.java | 3 +
.../solr/handler/V2ApiIntegrationTest.java | 4 +-
.../solr/rest/schema/TestBulkSchemaAPI.java | 2 +-
.../solr/security/BasicAuthIntegrationTest.java | 4 +-
.../TestRuleBasedAuthorizationPlugin.java | 23 ++
solr/server/etc/jetty.xml | 54 +++--
.../conf/solrconfig.xml | 2 +-
.../solr/client/solrj/impl/CloudSolrClient.java | 4 +-
.../solrj/io/eval/NaturalLogEvaluator.java | 60 +++++
.../solrj/io/stream/SignificantTermsStream.java | 58 +----
.../client/solrj/io/stream/TupleStream.java | 94 ++++++++
.../solrj/embedded/SolrExampleJettyTest.java | 2 +-
.../solrj/io/stream/StreamExpressionTest.java | 234 +++++++++++++------
.../io/stream/eval/NaturalLogEvaluatorTest.java | 98 ++++++++
34 files changed, 897 insertions(+), 379 deletions(-)
----------------------------------------------------------------------
[40/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10254: Fix pre-commit
Posted by is...@apache.org.
SOLR-10254: Fix pre-commit
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c85aac2a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c85aac2a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c85aac2a
Branch: refs/heads/jira/solr-6736
Commit: c85aac2a65472d0d80050a703c99844e694c1584
Parents: 682c6a7
Author: Joel Bernstein <jb...@apache.org>
Authored: Wed Mar 8 21:35:24 2017 -0500
Committer: Joel Bernstein <jb...@apache.org>
Committed: Wed Mar 8 21:35:24 2017 -0500
----------------------------------------------------------------------
.../solr/client/solrj/io/stream/SignificantTermsStream.java | 9 ---------
1 file changed, 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c85aac2a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
index 2acee51..101a71d 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
@@ -19,7 +19,6 @@ package org.apache.solr.client.solrj.io.stream;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
@@ -27,13 +26,10 @@ import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
-import java.util.Random;
-import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.io.SolrClientCache;
import org.apache.solr.client.solrj.io.Tuple;
@@ -48,11 +44,6 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.ZkCoreNodeProps;
-import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.NamedList;
[05/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10219: re-enable
HDFS tests under JDK9 (CHANGES.txt entry)
Posted by is...@apache.org.
SOLR-10219: re-enable HDFS tests under JDK9 (CHANGES.txt entry)
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/da113fde
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/da113fde
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/da113fde
Branch: refs/heads/jira/solr-6736
Commit: da113fde771adf0b1a6b4676533e8e02cab41f9a
Parents: 8ed2b76
Author: Chris Hostetter <ho...@apache.org>
Authored: Thu Mar 2 14:13:17 2017 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Thu Mar 2 14:13:17 2017 -0700
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +++
1 file changed, 3 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/da113fde/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 152b344..09912ee 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -269,6 +269,9 @@ Other Changes
* SOLR-10134: EmbeddedSolrServer responds on Schema API requests (Robert Alexandersson via Mikhail Khludnev)
+* SOLR-10219: re-enable HDFS tests under JDK9 (hossman, Uwe Schindler)
+
+
================== 6.4.2 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
[11/50] [abbrv] lucene-solr:jira/solr-6736: LUCENE-6819: Make
ExtractingRequestHandlerTest not rely on index-time boosts.
Posted by is...@apache.org.
LUCENE-6819: Make ExtractingRequestHandlerTest not rely on index-time boosts.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/7453f78b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/7453f78b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/7453f78b
Branch: refs/heads/jira/solr-6736
Commit: 7453f78b3539c7f4f5fa6e5324b251467ca50644
Parents: efbabf8
Author: Adrien Grand <jp...@gmail.com>
Authored: Fri Mar 3 21:43:50 2017 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Fri Mar 3 21:43:50 2017 +0100
----------------------------------------------------------------------
.../solr/handler/extraction/ExtractingRequestHandlerTest.java | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7453f78b/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java b/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java
index a53bcd9..dc84b51 100644
--- a/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java
+++ b/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java
@@ -113,7 +113,7 @@ public class ExtractingRequestHandlerTest extends SolrTestCaseJ4 {
assertQ(req("+id:simple2 +t_content:serif"), "//*[@numFound='0']"); // make sure <style> content is excluded
assertQ(req("+id:simple2 +t_content:blur"), "//*[@numFound='0']"); // make sure <script> content is excluded
- // load again in the exact same way, but boost one field
+ // make sure the fact there is an index-time boost does not fail the parsing
loadLocal("extraction/simple.html",
"literal.id","simple3",
"uprefix", "t_",
@@ -125,7 +125,7 @@ public class ExtractingRequestHandlerTest extends SolrTestCaseJ4 {
);
assertQ(req("t_href:http"), "//*[@numFound='2']");
- assertQ(req("t_href:http"), "//doc[1]/str[.='simple3']");
+ assertQ(req("t_href:http"), "//doc[2]/str[.='simple3']");
assertQ(req("+id:simple3 +t_content_type:[* TO *]"), "//*[@numFound='1']");//test lowercase and then uprefix
loadLocal("extraction/version_control.xml", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
[17/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10171: Add Constant
Reduction Rules to Calcite Planner
Posted by is...@apache.org.
SOLR-10171: Add Constant Reduction Rules to Calcite Planner
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/686fbd3b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/686fbd3b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/686fbd3b
Branch: refs/heads/jira/solr-6736
Commit: 686fbd3ba294e7a71f08b2821b5c3ce50235c78b
Parents: b2dcb47
Author: Kevin Risden <kr...@apache.org>
Authored: Sun Feb 19 21:34:40 2017 -0600
Committer: Kevin Risden <kr...@apache.org>
Committed: Sat Mar 4 19:22:34 2017 -0600
----------------------------------------------------------------------
solr/CHANGES.txt | 2 ++
.../org/apache/solr/handler/sql/SolrRules.java | 14 ++++++++++++++
.../apache/solr/handler/sql/SolrTableScan.java | 4 ++++
.../org/apache/solr/handler/TestSQLHandler.java | 8 ++++++++
.../solr/client/solrj/io/sql/JdbcTest.java | 19 +++++++++++++++++++
5 files changed, 47 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/686fbd3b/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index bb5a784..5b7eac5 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -285,6 +285,8 @@ Other Changes
* SOLR-10155: For numeric types facet.contains= and facet.prefix= are now rejected.
(Gus Heck, Christine Poerschke)
+* SOLR-10171 Add Constant Reduction Rules to Calcite Planner (Kevin Risden)
+
================== 6.4.2 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/686fbd3b/solr/core/src/java/org/apache/solr/handler/sql/SolrRules.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrRules.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrRules.java
index 4cbadda..bbc4eb7 100644
--- a/solr/core/src/java/org/apache/solr/handler/sql/SolrRules.java
+++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrRules.java
@@ -26,6 +26,9 @@ import org.apache.calcite.rel.logical.LogicalAggregate;
import org.apache.calcite.rel.logical.LogicalFilter;
import org.apache.calcite.rel.logical.LogicalProject;
import org.apache.calcite.rel.logical.LogicalSort;
+import org.apache.calcite.rel.rules.AggregateValuesRule;
+import org.apache.calcite.rel.rules.ReduceExpressionsRule;
+import org.apache.calcite.rel.rules.ValuesReduceRule;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexInputRef;
@@ -52,6 +55,17 @@ class SolrRules {
SolrAggregateRule.AGGREGATE_RULE,
};
+ static final RelOptRule[] CONSTANT_REDUCTION_RULES = {
+ ReduceExpressionsRule.PROJECT_INSTANCE,
+ ReduceExpressionsRule.FILTER_INSTANCE,
+ ReduceExpressionsRule.CALC_INSTANCE,
+ ReduceExpressionsRule.JOIN_INSTANCE,
+ ValuesReduceRule.FILTER_INSTANCE,
+ ValuesReduceRule.PROJECT_FILTER_INSTANCE,
+ ValuesReduceRule.PROJECT_INSTANCE,
+ AggregateValuesRule.INSTANCE
+ };
+
static List<String> solrFieldNames(final RelDataType rowType) {
return SqlValidatorUtil.uniquify(
new AbstractList<String>() {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/686fbd3b/solr/core/src/java/org/apache/solr/handler/sql/SolrTableScan.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrTableScan.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrTableScan.java
index 88c53ac..5363d59 100644
--- a/solr/core/src/java/org/apache/solr/handler/sql/SolrTableScan.java
+++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrTableScan.java
@@ -72,6 +72,10 @@ class SolrTableScan extends TableScan implements SolrRel {
for (RelOptRule rule : SolrRules.RULES) {
planner.addRule(rule);
}
+
+ for (RelOptRule rule : SolrRules.CONSTANT_REDUCTION_RULES) {
+ planner.addRule(rule);
+ }
}
public void implement(Implementor implementor) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/686fbd3b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
index 35f7ad0..d724fbd 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
@@ -317,6 +317,14 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
assert(tuple.getLong("myInt") == 7);
assert(tuple.get("myString").equals("a"));
+ // SOLR-8845 - Test to make sure that 1 = 0 works for things like Spark SQL
+ sParams = mapParams(CommonParams.QT, "/sql",
+ "stmt", "select id, field_i, str_s from collection1 where 1 = 0");
+
+ solrStream = new SolrStream(jetty.url, sParams);
+ tuples = getTuples(solrStream);
+
+ assertEquals(0, tuples.size());
} finally {
delete();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/686fbd3b/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
index 927856a..ce14907 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
@@ -450,6 +450,25 @@ public class JdbcTest extends SolrCloudTestCase {
}
@Test
+ public void testOneEqualZeroMetadata() throws Exception {
+ // SOLR-8845 - Make sure that 1 = 1 (literal comparison literal) works
+ try (Connection con = DriverManager.getConnection("jdbc:solr://" + zkHost +
+ "?collection=" + COLLECTIONORALIAS)) {
+
+ try (Statement stmt = con.createStatement()) {
+ try (ResultSet rs = stmt.executeQuery("select a_s from " + COLLECTIONORALIAS + " where 1 = 0")) {
+ assertFalse(rs.next());
+
+ ResultSetMetaData resultSetMetaData = rs.getMetaData();
+ assertNotNull(resultSetMetaData);
+ assertEquals(1, resultSetMetaData.getColumnCount());
+ assertEquals("a_s", resultSetMetaData.getColumnName(1));
+ }
+ }
+ }
+ }
+
+ @Test
public void testDriverMetadata() throws Exception {
String collection = COLLECTIONORALIAS;
[34/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10235: fix precommit
Posted by is...@apache.org.
SOLR-10235: fix precommit
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d8442070
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d8442070
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d8442070
Branch: refs/heads/jira/solr-6736
Commit: d8442070cfbb0cec815a465a6c143e2b31870e34
Parents: 4d7bc94
Author: Uwe Schindler <us...@apache.org>
Authored: Tue Mar 7 22:07:13 2017 +0100
Committer: Uwe Schindler <us...@apache.org>
Committed: Tue Mar 7 22:07:13 2017 +0100
----------------------------------------------------------------------
.../org/apache/solr/handler/dataimport/TestJdbcDataSource.java | 2 ++
1 file changed, 2 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8442070/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
index dcb4dbc..e9908f9 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
@@ -35,6 +35,7 @@ import java.util.Properties;
import javax.sql.DataSource;
+import org.apache.solr.common.util.SuppressForbidden;
import org.apache.solr.handler.dataimport.JdbcDataSource.ResultSetIterator;
import static org.mockito.Mockito.*;
import org.junit.After;
@@ -635,6 +636,7 @@ public class TestJdbcDataSource extends AbstractDataImportHandlerTestCase {
return 0;
}
+ @SuppressForbidden(reason="Required by JDBC")
@Override
public java.util.logging.Logger getParentLogger() throws java.sql.SQLFeatureNotSupportedException {
throw new java.sql.SQLFeatureNotSupportedException();
[18/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10146: fixing tabs
Posted by is...@apache.org.
SOLR-10146: fixing tabs
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/f49bd79e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/f49bd79e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/f49bd79e
Branch: refs/heads/jira/solr-6736
Commit: f49bd79ec14f58361320a0931a936840cb15df86
Parents: 686fbd3
Author: Mikhail Khludnev <mk...@apache.org>
Authored: Sun Mar 5 12:30:36 2017 +0300
Committer: Mikhail Khludnev <mk...@apache.org>
Committed: Sun Mar 5 12:30:36 2017 +0300
----------------------------------------------------------------------
solr/webapp/web/partials/collections.html | 28 +++++++++++++-------------
1 file changed, 14 insertions(+), 14 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f49bd79e/solr/webapp/web/partials/collections.html
----------------------------------------------------------------------
diff --git a/solr/webapp/web/partials/collections.html b/solr/webapp/web/partials/collections.html
index 2ac12de..f958e14 100644
--- a/solr/webapp/web/partials/collections.html
+++ b/solr/webapp/web/partials/collections.html
@@ -216,22 +216,22 @@ limitations under the License.
<div class="block" id="shard-data">
<div class="content shard" ng-repeat="shard in collection.shards">
<a ng-click="toggleShard(shard)">
- <h2>
- <span ng-class="{open:shard.show}"><b>Shard: {{shard.name}}</b></span>
- <div style="float:right" ng-show="!shard.range || shard.state == 'inactive'"><a ng-click="toggleRemoveShard(shard)"><span class="rem"></span></a></div>
- </h2>
+ <h2>
+ <span ng-class="{open:shard.show}"><b>Shard: {{shard.name}}</b></span>
+ <div style="float:right" ng-show="!shard.range || shard.state == 'inactive'"><a ng-click="toggleRemoveShard(shard)"><span class="rem"></span></a></div>
+ </h2>
</a>
<ul ng-show="shard.showRemove">
- <li>
- <form class="delete-shard">
- <p class="clearfix"><em>Are you sure you want to delete this shard?</em></p>
- <p class="clearfix buttons">
- <button class="submit" ng-class="{success: shard.deleted}" ng-click="deleteShard(shard)"><span>Delete Shard</span></button>
- <button type="reset" class="reset" ng-click="toggleRemoveShard(shard)"><span>Cancel</span></button>
- </p>
- </form>
- </li>
- </ul>
+ <li>
+ <form class="delete-shard">
+ <p class="clearfix"><em>Are you sure you want to delete this shard?</em></p>
+ <p class="clearfix buttons">
+ <button class="submit" ng-class="{success: shard.deleted}" ng-click="deleteShard(shard)"><span>Delete Shard</span></button>
+ <button type="reset" class="reset" ng-click="toggleRemoveShard(shard)"><span>Cancel</span></button>
+ </p>
+ </form>
+ </li>
+ </ul>
<ul ng-show="shard.show">
<li>
<ul>
[16/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10146: Added button
to the Admin UI 'Collection' tab for deleting an inactive shard
Posted by is...@apache.org.
SOLR-10146: Added button to the Admin UI 'Collection' tab for deleting an inactive shard
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b2dcb47e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b2dcb47e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b2dcb47e
Branch: refs/heads/jira/solr-6736
Commit: b2dcb47ef44a814c670b5cf2a53c0d6c52810d84
Parents: 59433bb
Author: Jan H�ydahl <ja...@apache.org>
Authored: Sun Mar 5 00:04:49 2017 +0100
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Sun Mar 5 00:04:49 2017 +0100
----------------------------------------------------------------------
solr/CHANGES.txt | 2 ++
solr/webapp/web/css/angular/collections.css | 17 +++++++++++++++++
.../web/js/angular/controllers/collections.js | 14 ++++++++++++++
solr/webapp/web/js/angular/services.js | 1 +
solr/webapp/web/partials/collections.html | 18 +++++++++++++++++-
5 files changed, 51 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b2dcb47e/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index ea56cbb..bb5a784 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -149,6 +149,8 @@ New Features
* SOLR-8593: Integrate Apache Calcite into the SQLHandler (Kevin Risden, Cao Manh Dat, Joel Bernstein)
+* SOLR-10146: Added button to the Admin UI 'Collection' tab for deleting an inactive shard (Amrit Sarkar, janhoy)
+
Bug Fixes
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b2dcb47e/solr/webapp/web/css/angular/collections.css
----------------------------------------------------------------------
diff --git a/solr/webapp/web/css/angular/collections.css b/solr/webapp/web/css/angular/collections.css
index 7c2e0a6..e4b61bc 100644
--- a/solr/webapp/web/css/angular/collections.css
+++ b/solr/webapp/web/css/angular/collections.css
@@ -311,6 +311,14 @@ limitations under the License.
right:10px;
}
+#content #collections .shard h2 span.rem {
+ background-image: url( ../../img/ico/cross.png );
+ background-position: 100% 50%;
+ cursor: pointer;
+ padding-right: 21px;
+ right:10px;
+}
+
#content #collections .replica h2 span .open {
background-image: url( ../../img/ico/chevron-small.png );
}
@@ -337,6 +345,15 @@ limitations under the License.
background-image: url( ../../img/ico/tick.png );
}
+#content #collections .delete-shard span
+{
+ background-image: url( ../../img/ico/cross.png );
+}
+#content #collections .delete-shard button.submit span
+{
+ background-image: url( ../../img/ico/tick.png );
+}
+
#content #collections #node-name .chosen-container
{
width: 100% !important;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b2dcb47e/solr/webapp/web/js/angular/controllers/collections.js
----------------------------------------------------------------------
diff --git a/solr/webapp/web/js/angular/controllers/collections.js b/solr/webapp/web/js/angular/controllers/collections.js
index 2bd6ab6..22c5e7f 100644
--- a/solr/webapp/web/js/angular/controllers/collections.js
+++ b/solr/webapp/web/js/angular/controllers/collections.js
@@ -210,6 +210,20 @@ solrAdminApp.controller('CollectionsController',
$scope.hideAll();
replica.showRemove = !replica.showRemove;
};
+
+ $scope.toggleRemoveShard = function(shard) {
+ $scope.hideAll();
+ shard.showRemove = !shard.showRemove;
+ };
+
+ $scope.deleteShard = function(shard) {
+ Collections.deleteShard({collection: shard.collection, shard:shard.name}, function(data) {
+ shard.deleted = true;
+ $timeout(function() {
+ $scope.refresh();
+ }, 2000);
+ });
+ }
$scope.deleteReplica = function(replica) {
Collections.deleteReplica({collection: replica.collection, shard:replica.shard, replica:replica.name}, function(data) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b2dcb47e/solr/webapp/web/js/angular/services.js
----------------------------------------------------------------------
diff --git a/solr/webapp/web/js/angular/services.js b/solr/webapp/web/js/angular/services.js
index 07043c3..a1df37e 100644
--- a/solr/webapp/web/js/angular/services.js
+++ b/solr/webapp/web/js/angular/services.js
@@ -34,6 +34,7 @@ solrAdminServices.factory('System',
"deleteAlias": {params:{action: "DELETEALIAS"}},
"deleteReplica": {params:{action: "DELETEREPLICA"}},
"addReplica": {params:{action: "ADDREPLICA"}},
+ "deleteShard": {params:{action: "DELETESHARD"}},
"reload": {method: "GET", params:{action:"RELOAD", core: "@core"}},
"optimize": {params:{}}
});
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b2dcb47e/solr/webapp/web/partials/collections.html
----------------------------------------------------------------------
diff --git a/solr/webapp/web/partials/collections.html b/solr/webapp/web/partials/collections.html
index 9797b90..2ac12de 100644
--- a/solr/webapp/web/partials/collections.html
+++ b/solr/webapp/web/partials/collections.html
@@ -215,7 +215,23 @@ limitations under the License.
<div class="block" id="shard-data">
<div class="content shard" ng-repeat="shard in collection.shards">
- <a ng-click="toggleShard(shard)"><h2><span ng-class="{open:shard.show}">Shard: {{shard.name}}</span></h2></a>
+ <a ng-click="toggleShard(shard)">
+ <h2>
+ <span ng-class="{open:shard.show}"><b>Shard: {{shard.name}}</b></span>
+ <div style="float:right" ng-show="!shard.range || shard.state == 'inactive'"><a ng-click="toggleRemoveShard(shard)"><span class="rem"></span></a></div>
+ </h2>
+ </a>
+ <ul ng-show="shard.showRemove">
+ <li>
+ <form class="delete-shard">
+ <p class="clearfix"><em>Are you sure you want to delete this shard?</em></p>
+ <p class="clearfix buttons">
+ <button class="submit" ng-class="{success: shard.deleted}" ng-click="deleteShard(shard)"><span>Delete Shard</span></button>
+ <button type="reset" class="reset" ng-click="toggleRemoveShard(shard)"><span>Cancel</span></button>
+ </p>
+ </form>
+ </li>
+ </ul>
<ul ng-show="shard.show">
<li>
<ul>
[07/50] [abbrv] lucene-solr:jira/solr-6736: LUCENE-7731: fix typo in
search/package-summary.html#scoring javadocs. (chillon.m via Christine
Poerschke)
Posted by is...@apache.org.
LUCENE-7731: fix typo in search/package-summary.html#scoring javadocs. (chillon.m via Christine Poerschke)
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d947f53e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d947f53e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d947f53e
Branch: refs/heads/jira/solr-6736
Commit: d947f53ef0de8c0b5fef2757f1f6c8d0874df8a4
Parents: 4347431
Author: Christine Poerschke <cp...@apache.org>
Authored: Fri Mar 3 13:13:26 2017 +0000
Committer: Christine Poerschke <cp...@apache.org>
Committed: Fri Mar 3 13:13:26 2017 +0000
----------------------------------------------------------------------
lucene/core/src/java/org/apache/lucene/search/package-info.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d947f53e/lucene/core/src/java/org/apache/lucene/search/package-info.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/package-info.java b/lucene/core/src/java/org/apache/lucene/search/package-info.java
index c9d8e26..2201be0 100644
--- a/lucene/core/src/java/org/apache/lucene/search/package-info.java
+++ b/lucene/core/src/java/org/apache/lucene/search/package-info.java
@@ -226,7 +226,7 @@
* <a href="http://en.wikipedia.org/wiki/Information_retrieval#Model_types">models</a>, including:
* <ul>
* <li><a href="http://en.wikipedia.org/wiki/Vector_Space_Model">Vector Space Model (VSM)</a></li>
- * <li><a href="http://en.wikipedia.org/wiki/Probabilistic_relevance_model">Probablistic Models</a> such as
+ * <li><a href="http://en.wikipedia.org/wiki/Probabilistic_relevance_model">Probabilistic Models</a> such as
* <a href="http://en.wikipedia.org/wiki/Probabilistic_relevance_model_(BM25)">Okapi BM25</a> and
* <a href="http://en.wikipedia.org/wiki/Divergence-from-randomness_model">DFR</a></li>
* <li><a href="http://en.wikipedia.org/wiki/Language_model">Language models</a></li>
[03/50] [abbrv] lucene-solr:jira/solr-6736: LUCENE-6819: Remove
index-time boosts.
Posted by is...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java b/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java
index ce81ae1..a8d6523 100644
--- a/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java
+++ b/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java
@@ -252,7 +252,7 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro
if (mappedOutputField != null) {
log.debug("Mapping field {} to {}", doc.getFieldValue(docIdField), fieldLang);
SolrInputField inField = doc.getField(fieldName);
- doc.setField(mappedOutputField, inField.getValue(), inField.getBoost());
+ doc.setField(mappedOutputField, inField.getValue());
if(!mapKeepOrig) {
log.debug("Removing old field {}", fieldName);
doc.removeField(fieldName);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/LoadSolrBuilder.java
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/LoadSolrBuilder.java b/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/LoadSolrBuilder.java
index 2c2660b..a3af6e1 100644
--- a/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/LoadSolrBuilder.java
+++ b/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/LoadSolrBuilder.java
@@ -17,10 +17,12 @@
package org.apache.solr.morphlines.solr;
import java.io.IOException;
+import java.lang.invoke.MethodHandles;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.SolrInputDocument;
@@ -34,6 +36,9 @@ import org.kitesdk.morphline.base.AbstractCommand;
import org.kitesdk.morphline.base.Configs;
import org.kitesdk.morphline.base.Metrics;
import org.kitesdk.morphline.base.Notifications;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import com.codahale.metrics.Timer;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
@@ -43,6 +48,9 @@ import com.typesafe.config.ConfigFactory;
*/
public final class LoadSolrBuilder implements CommandBuilder {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final AtomicBoolean WARNED_ABOUT_INDEX_TIME_BOOSTS = new AtomicBoolean();
+
@Override
public Collection<String> getNames() {
return Collections.singletonList("loadSolr");
@@ -60,7 +68,6 @@ public final class LoadSolrBuilder implements CommandBuilder {
private static final class LoadSolr extends AbstractCommand {
private final DocumentLoader loader;
- private final Map<String, Float> boosts = new HashMap();
private final Timer elapsedTime;
public LoadSolr(CommandBuilder builder, Config config, Command parent, Command child, MorphlineContext context) {
@@ -70,10 +77,13 @@ public final class LoadSolrBuilder implements CommandBuilder {
LOG.debug("solrLocator: {}", locator);
this.loader = locator.getLoader();
Config boostsConfig = getConfigs().getConfig(config, "boosts", ConfigFactory.empty());
- for (Map.Entry<String, Object> entry : new Configs().getEntrySet(boostsConfig)) {
- String fieldName = entry.getKey();
- float boost = Float.parseFloat(entry.getValue().toString().trim());
- boosts.put(fieldName, boost);
+ if (new Configs().getEntrySet(boostsConfig).isEmpty() == false) {
+ String message = "Ignoring field boosts: as index-time boosts are not supported anymore";
+ if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) {
+ log.warn(message);
+ } else {
+ log.debug(message);
+ }
}
validateArguments();
this.elapsedTime = getTimer(Metrics.ELAPSED_TIME);
@@ -134,20 +144,10 @@ public final class LoadSolrBuilder implements CommandBuilder {
SolrInputDocument doc = new SolrInputDocument(new HashMap(2 * map.size()));
for (Map.Entry<String, Collection<Object>> entry : map.entrySet()) {
String key = entry.getKey();
- doc.setField(key, entry.getValue(), getBoost(key));
+ doc.setField(key, entry.getValue());
}
return doc;
}
-
- private float getBoost(String key) {
- if (boosts.size() > 0) {
- Float boost = boosts.get(key);
- if (boost != null) {
- return boost.floatValue();
- }
- }
- return 1.0f;
- }
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAToSolrMapper.java
----------------------------------------------------------------------
diff --git a/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAToSolrMapper.java b/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAToSolrMapper.java
index 024dc4e..ec08ee8 100644
--- a/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAToSolrMapper.java
+++ b/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAToSolrMapper.java
@@ -76,7 +76,7 @@ public class UIMAToSolrMapper {
if (log.isDebugEnabled()) {
log.debug("writing {} in {}", new Object[]{featureValue, fieldName});
}
- document.addField(fieldName, featureValue, 1.0f);
+ document.addField(fieldName, featureValue);
}
}
} catch (Exception e) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java
index 4125aa9..fd568df 100644
--- a/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java
@@ -84,8 +84,6 @@ public class DocumentAnalysisRequestHandler extends AnalysisRequestHandlerBase {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final XMLErrorLogger xmllog = new XMLErrorLogger(log);
- private static final float DEFAULT_BOOST = 1.0f;
-
private XMLInputFactory inputFactory;
@Override
@@ -298,7 +296,7 @@ public class DocumentAnalysisRequestHandler extends AnalysisRequestHandlerBase {
}
return doc;
} else if ("field".equals(reader.getLocalName())) {
- doc.addField(fieldName, text.toString(), DEFAULT_BOOST);
+ doc.addField(fieldName, text.toString());
if (uniqueKeyField.equals(fieldName)) {
hasId = true;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/handler/ExportWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/ExportWriter.java b/solr/core/src/java/org/apache/solr/handler/ExportWriter.java
index 8bdd959..7602d9e 100644
--- a/solr/core/src/java/org/apache/solr/handler/ExportWriter.java
+++ b/solr/core/src/java/org/apache/solr/handler/ExportWriter.java
@@ -1377,7 +1377,7 @@ public class ExportWriter implements SolrCore.RawWriter, Closeable {
while((o = vals.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
BytesRef ref = vals.lookupOrd(o);
fieldType.indexedToReadable(ref, cref);
- IndexableField f = fieldType.createField(schemaField, cref.toString(), 1.0f);
+ IndexableField f = fieldType.createField(schemaField, cref.toString());
if (f == null) w.add(cref.toString());
else w.add(fieldType.toObject(f));
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
index 7f08684..62a50e6 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
@@ -298,7 +298,6 @@ public class LukeRequestHandler extends RequestHandlerBase
if (bytes != null) {
f.add( "binary", Base64.byteArrayToBase64(bytes.bytes, bytes.offset, bytes.length));
}
- f.add( "boost", field.boost() );
if (!ftype.isPointField()) {
Term t = new Term(field.name(), ftype!=null ? ftype.storedToIndexed(field) : field.stringValue());
f.add( "docFreq", t.text()==null ? 0 : reader.docFreq( t ) ); // this can be 0 for non-indexed fields
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/handler/loader/CSVLoaderBase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/loader/CSVLoaderBase.java b/solr/core/src/java/org/apache/solr/handler/loader/CSVLoaderBase.java
index 84c82d7..b503fa3 100644
--- a/solr/core/src/java/org/apache/solr/handler/loader/CSVLoaderBase.java
+++ b/solr/core/src/java/org/apache/solr/handler/loader/CSVLoaderBase.java
@@ -83,7 +83,7 @@ abstract class CSVLoaderBase extends ContentStreamLoader {
private class FieldAdder {
void add(SolrInputDocument doc, int line, int column, String val) {
if (val.length() > 0) {
- doc.addField(fieldnames[column],val,1.0f);
+ doc.addField(fieldnames[column],val);
}
}
}
@@ -92,7 +92,7 @@ abstract class CSVLoaderBase extends ContentStreamLoader {
private class FieldAdderEmpty extends CSVLoaderBase.FieldAdder {
@Override
void add(SolrInputDocument doc, int line, int column, String val) {
- doc.addField(fieldnames[column],val,1.0f);
+ doc.addField(fieldnames[column],val);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java b/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java
index 2041d2f..8650ab8 100644
--- a/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java
+++ b/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java
@@ -28,6 +28,7 @@ import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.UUID;
+import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.io.IOUtils;
import org.apache.solr.common.SolrException;
@@ -65,6 +66,7 @@ import static org.apache.solr.common.params.CommonParams.PATH;
*/
public class JsonLoader extends ContentStreamLoader {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final AtomicBoolean WARNED_ABOUT_INDEX_TIME_BOOSTS = new AtomicBoolean();
public static final String CHILD_DOC_KEY = "_childDocuments_";
@Override
@@ -435,8 +437,6 @@ public class JsonLoader extends ContentStreamLoader {
cmd.commitWithin = commitWithin;
cmd.overwrite = overwrite;
- float boost = 1.0f;
-
while (true) {
int ev = parser.nextEvent();
if (ev == JSONParser.STRING) {
@@ -454,7 +454,13 @@ public class JsonLoader extends ContentStreamLoader {
} else if (UpdateRequestHandler.COMMIT_WITHIN.equals(key)) {
cmd.commitWithin = (int) parser.getLong();
} else if ("boost".equals(key)) {
- boost = Float.parseFloat(parser.getNumberChars().toString());
+ String boost = parser.getNumberChars().toString();
+ String message = "Ignoring document boost: " + boost + " as index-time boosts are not supported anymore";
+ if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) {
+ log.warn(message);
+ } else {
+ log.debug(message);
+ }
} else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown key '" + key + "' at [" + parser.getPosition() + "]");
}
@@ -467,7 +473,6 @@ public class JsonLoader extends ContentStreamLoader {
if (cmd.solrDoc == null) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Missing solr document at [" + parser.getPosition() + "]");
}
- cmd.solrDoc.setDocumentBoost(boost);
return cmd;
} else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
@@ -536,7 +541,7 @@ public class JsonLoader extends ContentStreamLoader {
// SolrInputDocument.addField will do the right thing
// if the doc already has another value for this field
// (ie: repeating fieldname keys)
- sdoc.addField(sif.getName(), sif.getValue(), sif.getBoost());
+ sdoc.addField(sif.getName(), sif.getValue());
}
}
@@ -548,14 +553,13 @@ public class JsonLoader extends ContentStreamLoader {
parseExtendedFieldValue(sif, ev);
} else {
Object val = parseNormalFieldValue(ev, sif.getName());
- sif.setValue(val, 1.0f);
+ sif.setValue(val);
}
}
private void parseExtendedFieldValue(SolrInputField sif, int ev) throws IOException {
assert ev == JSONParser.OBJECT_START;
- float boost = 1.0f;
Object normalFieldValue = null;
Map<String, Object> extendedInfo = null;
@@ -573,7 +577,12 @@ public class JsonLoader extends ContentStreamLoader {
+ "Unexpected " + JSONParser.getEventString(ev) + " at [" + parser.getPosition() + "], field=" + sif.getName());
}
- boost = (float) parser.getDouble();
+ String message = "Ignoring field boost: " + parser.getDouble() + " as index-time boosts are not supported anymore";
+ if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) {
+ log.warn(message);
+ } else {
+ log.debug(message);
+ }
} else if ("value".equals(label)) {
normalFieldValue = parseNormalFieldValue(parser.nextEvent(), sif.getName());
} else {
@@ -593,9 +602,9 @@ public class JsonLoader extends ContentStreamLoader {
if (normalFieldValue != null) {
extendedInfo.put("value", normalFieldValue);
}
- sif.setValue(extendedInfo, boost);
+ sif.setValue(extendedInfo);
} else {
- sif.setValue(normalFieldValue, boost);
+ sif.setValue(normalFieldValue);
}
return;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java b/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java
index d6a5170..038ed9f 100644
--- a/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java
+++ b/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java
@@ -35,6 +35,7 @@ import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
import com.google.common.collect.Lists;
import org.apache.commons.io.IOUtils;
@@ -71,6 +72,7 @@ import static org.apache.solr.common.params.CommonParams.NAME;
public class XMLLoader extends ContentStreamLoader {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final AtomicBoolean WARNED_ABOUT_INDEX_TIME_BOOSTS = new AtomicBoolean();
static final XMLErrorLogger xmllog = new XMLErrorLogger(log);
public static final String CONTEXT_TRANSFORMER_KEY = "xsltupdater.transformer";
@@ -379,7 +381,12 @@ public class XMLLoader extends ContentStreamLoader {
for (int i = 0; i < parser.getAttributeCount(); i++) {
attrName = parser.getAttributeLocalName(i);
if ("boost".equals(attrName)) {
- doc.setDocumentBoost(Float.parseFloat(parser.getAttributeValue(i)));
+ String message = "Ignoring document boost: " + parser.getAttributeValue(i) + " as index-time boosts are not supported anymore";
+ if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) {
+ log.warn(message);
+ } else {
+ log.debug(message);
+ }
} else {
log.warn("XML element <doc> has invalid XML attr:" + attrName);
}
@@ -387,7 +394,6 @@ public class XMLLoader extends ContentStreamLoader {
StringBuilder text = new StringBuilder();
String name = null;
- float boost = 1.0f;
boolean isNull = false;
String update = null;
Collection<SolrInputDocument> subDocs = null;
@@ -438,8 +444,7 @@ public class XMLLoader extends ContentStreamLoader {
}
break;
}
- doc.addField(name, v, boost);
- boost = 1.0f;
+ doc.addField(name, v);
// field is over
name = null;
}
@@ -460,7 +465,6 @@ public class XMLLoader extends ContentStreamLoader {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
msg);
}
- boost = 1.0f;
update = null;
isNull = false;
String attrVal = "";
@@ -470,7 +474,12 @@ public class XMLLoader extends ContentStreamLoader {
if (NAME.equals(attrName)) {
name = attrVal;
} else if ("boost".equals(attrName)) {
- boost = Float.parseFloat(attrVal);
+ String message = "Ignoring field boost: " + attrVal + " as index-time boosts are not supported anymore";
+ if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) {
+ log.warn(message);
+ } else {
+ log.debug(message);
+ }
} else if ("null".equals(attrName)) {
isNull = StrUtils.parseBoolean(attrVal);
} else if ("update".equals(attrName)) {
@@ -488,7 +497,7 @@ public class XMLLoader extends ContentStreamLoader {
for (Map.Entry<String, Map<String, Object>> entry : updateMap.entrySet()) {
name = entry.getKey();
Map<String, Object> value = entry.getValue();
- doc.addField(name, value, 1.0f);
+ doc.addField(name, value);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java b/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java
index 10756da..4e2829e 100644
--- a/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java
+++ b/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java
@@ -200,7 +200,7 @@ public abstract class AbstractSpatialFieldType<T extends SpatialStrategy> extend
//--------------------------------------------------------------
@Override
- public final Field createField(SchemaField field, Object val, float boost) {
+ public final Field createField(SchemaField field, Object val) {
throw new IllegalStateException("instead call createFields() because isPolyField() is true");
}
@@ -210,7 +210,7 @@ public abstract class AbstractSpatialFieldType<T extends SpatialStrategy> extend
}
@Override
- public List<IndexableField> createFields(SchemaField field, Object val, float boost) {
+ public List<IndexableField> createFields(SchemaField field, Object val) {
String shapeStr = null;
Shape shape;
if (val instanceof Shape) {
@@ -237,7 +237,7 @@ public abstract class AbstractSpatialFieldType<T extends SpatialStrategy> extend
return result;
}
- /** Called by {@link #createFields(SchemaField, Object, float)} to get the stored value. */
+ /** Called by {@link #createFields(SchemaField, Object)} to get the stored value. */
protected String getStoredValue(Shape shape, String shapeStr) {
return (shapeStr == null) ? shapeToString(shape) : shapeStr;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/BBoxField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/BBoxField.java b/solr/core/src/java/org/apache/solr/schema/BBoxField.java
index d7fda7c..d69255b 100644
--- a/solr/core/src/java/org/apache/solr/schema/BBoxField.java
+++ b/solr/core/src/java/org/apache/solr/schema/BBoxField.java
@@ -137,7 +137,7 @@ public class BBoxField extends AbstractSpatialFieldType<BBoxStrategy> implements
//Solr's FieldType ought to expose Lucene FieldType. Instead as a hack we create a Field with a dummy value.
final SchemaField solrNumField = new SchemaField("_", numberType);//dummy temp
org.apache.lucene.document.FieldType luceneType =
- (org.apache.lucene.document.FieldType) solrNumField.createField(0.0, 1.0f).fieldType();
+ (org.apache.lucene.document.FieldType) solrNumField.createField(0.0).fieldType();
luceneType.setStored(storeSubFields);
//and annoyingly this Field isn't going to have a docValues format because Solr uses a separate Field for that
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/BinaryField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/BinaryField.java b/solr/core/src/java/org/apache/solr/schema/BinaryField.java
index 889bd3c..d1882b1 100644
--- a/solr/core/src/java/org/apache/solr/schema/BinaryField.java
+++ b/solr/core/src/java/org/apache/solr/schema/BinaryField.java
@@ -20,7 +20,6 @@ import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.nio.ByteBuffer;
-import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef;
@@ -71,7 +70,7 @@ public class BinaryField extends FieldType {
}
@Override
- public IndexableField createField(SchemaField field, Object val, float boost) {
+ public IndexableField createField(SchemaField field, Object val) {
if (val == null) return null;
if (!field.stored()) {
log.trace("Ignoring unstored binary field: " + field);
@@ -95,8 +94,6 @@ public class BinaryField extends FieldType {
len = buf.length;
}
- Field f = new org.apache.lucene.document.StoredField(field.getName(), buf, offset, len);
- f.setBoost(boost);
- return f;
+ return new org.apache.lucene.document.StoredField(field.getName(), buf, offset, len);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/BoolField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/BoolField.java b/solr/core/src/java/org/apache/solr/schema/BoolField.java
index 1645ee6..7d5c0d7 100644
--- a/solr/core/src/java/org/apache/solr/schema/BoolField.java
+++ b/solr/core/src/java/org/apache/solr/schema/BoolField.java
@@ -182,8 +182,8 @@ public class BoolField extends PrimitiveFieldType {
}
@Override
- public List<IndexableField> createFields(SchemaField field, Object value, float boost) {
- IndexableField fval = createField(field, value, boost);
+ public List<IndexableField> createFields(SchemaField field, Object value) {
+ IndexableField fval = createField(field, value);
if (field.hasDocValues()) {
IndexableField docval;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/CollationField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/CollationField.java b/solr/core/src/java/org/apache/solr/schema/CollationField.java
index 805e204..a498c01 100644
--- a/solr/core/src/java/org/apache/solr/schema/CollationField.java
+++ b/solr/core/src/java/org/apache/solr/schema/CollationField.java
@@ -254,10 +254,10 @@ public class CollationField extends FieldType {
}
@Override
- public List<IndexableField> createFields(SchemaField field, Object value, float boost) {
+ public List<IndexableField> createFields(SchemaField field, Object value) {
if (field.hasDocValues()) {
List<IndexableField> fields = new ArrayList<>();
- fields.add(createField(field, value, boost));
+ fields.add(createField(field, value));
final BytesRef bytes = getCollationKey(field.getName(), value.toString());
if (field.multiValued()) {
fields.add(new SortedSetDocValuesField(field.getName(), bytes));
@@ -266,7 +266,7 @@ public class CollationField extends FieldType {
}
return fields;
} else {
- return Collections.singletonList(createField(field, value, boost));
+ return Collections.singletonList(createField(field, value));
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/CurrencyField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/CurrencyField.java b/solr/core/src/java/org/apache/solr/schema/CurrencyField.java
index 7ad285b..9e994cf 100644
--- a/solr/core/src/java/org/apache/solr/schema/CurrencyField.java
+++ b/solr/core/src/java/org/apache/solr/schema/CurrencyField.java
@@ -171,21 +171,21 @@ public class CurrencyField extends FieldType implements SchemaAware, ResourceLoa
}
@Override
- public List<IndexableField> createFields(SchemaField field, Object externalVal, float boost) {
+ public List<IndexableField> createFields(SchemaField field, Object externalVal) {
CurrencyValue value = CurrencyValue.parse(externalVal.toString(), defaultCurrency);
List<IndexableField> f = new ArrayList<>();
SchemaField amountField = getAmountField(field);
- f.add(amountField.createField(String.valueOf(value.getAmount()), amountField.indexed() && !amountField.omitNorms() ? boost : 1F));
+ f.add(amountField.createField(String.valueOf(value.getAmount())));
SchemaField currencyField = getCurrencyField(field);
- f.add(currencyField.createField(value.getCurrencyCode(), currencyField.indexed() && !currencyField.omitNorms() ? boost : 1F));
+ f.add(currencyField.createField(value.getCurrencyCode()));
if (field.stored()) {
String storedValue = externalVal.toString().trim();
if (storedValue.indexOf(",") < 0) {
storedValue += "," + defaultCurrency;
}
- f.add(createField(field.getName(), storedValue, StoredField.TYPE, 1F));
+ f.add(createField(field.getName(), storedValue, StoredField.TYPE));
}
return f;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/DateRangeField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/DateRangeField.java b/solr/core/src/java/org/apache/solr/schema/DateRangeField.java
index d51c1f1..8dde953 100644
--- a/solr/core/src/java/org/apache/solr/schema/DateRangeField.java
+++ b/solr/core/src/java/org/apache/solr/schema/DateRangeField.java
@@ -63,10 +63,10 @@ public class DateRangeField extends AbstractSpatialPrefixTreeFieldType<NumberRan
}
@Override
- public List<IndexableField> createFields(SchemaField field, Object val, float boost) {
+ public List<IndexableField> createFields(SchemaField field, Object val) {
if (val instanceof Date || val instanceof Calendar)//From URP?
val = tree.toUnitShape(val);
- return super.createFields(field, val, boost);
+ return super.createFields(field, val);
}
@Override
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/DoublePointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/DoublePointField.java b/solr/core/src/java/org/apache/solr/schema/DoublePointField.java
index 3a90eec..e34ebec 100644
--- a/solr/core/src/java/org/apache/solr/schema/DoublePointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/DoublePointField.java
@@ -178,12 +178,9 @@ public class DoublePointField extends PointField implements DoubleValueFieldType
}
@Override
- public IndexableField createField(SchemaField field, Object value, float boost) {
+ public IndexableField createField(SchemaField field, Object value) {
if (!isFieldUsed(field)) return null;
- if (boost != 1.0 && log.isTraceEnabled()) {
- log.trace("Can't use document/field boost for PointField. Field: " + field.getName() + ", boost: " + boost);
- }
double doubleValue = (value instanceof Number) ? ((Number) value).doubleValue() : Double.parseDouble(value.toString());
return new DoublePoint(field.getName(), doubleValue);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/EnumField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/EnumField.java b/solr/core/src/java/org/apache/solr/schema/EnumField.java
index 37cd24b..3e83db4 100644
--- a/solr/core/src/java/org/apache/solr/schema/EnumField.java
+++ b/solr/core/src/java/org/apache/solr/schema/EnumField.java
@@ -391,7 +391,7 @@ public class EnumField extends PrimitiveFieldType {
* {@inheritDoc}
*/
@Override
- public IndexableField createField(SchemaField field, Object value, float boost) {
+ public IndexableField createField(SchemaField field, Object value) {
final boolean indexed = field.indexed();
final boolean stored = field.stored();
final boolean docValues = field.hasDocValues();
@@ -418,21 +418,17 @@ public class EnumField extends PrimitiveFieldType {
newType.setNumericType(LegacyNumericType.INT);
newType.setNumericPrecisionStep(DEFAULT_PRECISION_STEP);
- final org.apache.lucene.document.Field f;
- f = new LegacyIntField(field.getName(), intValue.intValue(), newType);
-
- f.setBoost(boost);
- return f;
+ return new LegacyIntField(field.getName(), intValue.intValue(), newType);
}
/**
* {@inheritDoc}
*/
@Override
- public List<IndexableField> createFields(SchemaField sf, Object value, float boost) {
+ public List<IndexableField> createFields(SchemaField sf, Object value) {
if (sf.hasDocValues()) {
List<IndexableField> fields = new ArrayList<>();
- final IndexableField field = createField(sf, value, boost);
+ final IndexableField field = createField(sf, value);
fields.add(field);
if (sf.multiValued()) {
@@ -445,7 +441,7 @@ public class EnumField extends PrimitiveFieldType {
}
return fields;
} else {
- return Collections.singletonList(createField(sf, value, boost));
+ return Collections.singletonList(createField(sf, value));
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/FieldType.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/FieldType.java b/solr/core/src/java/org/apache/solr/schema/FieldType.java
index c21b23f..c542a95 100644
--- a/solr/core/src/java/org/apache/solr/schema/FieldType.java
+++ b/solr/core/src/java/org/apache/solr/schema/FieldType.java
@@ -86,7 +86,7 @@ public abstract class FieldType extends FieldProperties {
/**
* The default poly field separator.
*
- * @see #createFields(SchemaField, Object, float)
+ * @see #createFields(SchemaField, Object)
* @see #isPolyField()
*/
public static final String POLY_FIELD_SEPARATOR = "___";
@@ -119,9 +119,9 @@ public abstract class FieldType extends FieldProperties {
}
/**
- * A "polyField" is a FieldType that can produce more than one IndexableField instance for a single value, via the {@link #createFields(org.apache.solr.schema.SchemaField, Object, float)} method. This is useful
+ * A "polyField" is a FieldType that can produce more than one IndexableField instance for a single value, via the {@link #createFields(org.apache.solr.schema.SchemaField, Object)} method. This is useful
* when hiding the implementation details of a field from the Solr end user. For instance, a spatial point may be represented by multiple different fields.
- * @return true if the {@link #createFields(org.apache.solr.schema.SchemaField, Object, float)} method may return more than one field
+ * @return true if the {@link #createFields(org.apache.solr.schema.SchemaField, Object)} method may return more than one field
*/
public boolean isPolyField(){
return false;
@@ -263,7 +263,7 @@ public abstract class FieldType extends FieldProperties {
*
*
*/
- public IndexableField createField(SchemaField field, Object value, float boost) {
+ public IndexableField createField(SchemaField field, Object value) {
if (!field.indexed() && !field.stored()) {
if (log.isTraceEnabled())
log.trace("Ignoring unindexed/unstored field: " + field);
@@ -287,7 +287,7 @@ public abstract class FieldType extends FieldProperties {
newType.setStoreTermVectorOffsets(field.storeTermOffsets());
newType.setStoreTermVectorPositions(field.storeTermPositions());
newType.setStoreTermVectorPayloads(field.storeTermPayloads());*/
- return createField(field.getName(), val, field, boost);
+ return createField(field.getName(), val, field);
}
/**
@@ -296,27 +296,23 @@ public abstract class FieldType extends FieldProperties {
* @param name The name of the field
* @param val The _internal_ value to index
* @param type {@link org.apache.lucene.document.FieldType}
- * @param boost The boost value
* @return the {@link org.apache.lucene.index.IndexableField}.
*/
- protected IndexableField createField(String name, String val, org.apache.lucene.index.IndexableFieldType type, float boost){
- Field f = new Field(name, val, type);
- f.setBoost(boost);
- return f;
+ protected IndexableField createField(String name, String val, org.apache.lucene.index.IndexableFieldType type){
+ return new Field(name, val, type);
}
/**
* Given a {@link org.apache.solr.schema.SchemaField}, create one or more {@link org.apache.lucene.index.IndexableField} instances
* @param field the {@link org.apache.solr.schema.SchemaField}
* @param value The value to add to the field
- * @param boost The boost to apply
* @return An array of {@link org.apache.lucene.index.IndexableField}
*
- * @see #createField(SchemaField, Object, float)
+ * @see #createField(SchemaField, Object)
* @see #isPolyField()
*/
- public List<IndexableField> createFields(SchemaField field, Object value, float boost) {
- IndexableField f = createField( field, value, boost);
+ public List<IndexableField> createFields(SchemaField field, Object value) {
+ IndexableField f = createField( field, value);
if (field.hasDocValues() && f.fieldType().docValuesType() == null) {
// field types that support doc values should either override createField
// to return a field with doc values or extend createFields if this can't
@@ -366,7 +362,7 @@ public abstract class FieldType extends FieldProperties {
public Object toObject(SchemaField sf, BytesRef term) {
final CharsRefBuilder ref = new CharsRefBuilder();
indexedToReadable(term, ref);
- final IndexableField f = createField(sf, ref.toString(), 1.0f);
+ final IndexableField f = createField(sf, ref.toString());
return toObject(f);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/FloatPointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/FloatPointField.java b/solr/core/src/java/org/apache/solr/schema/FloatPointField.java
index 6647286..39453e7 100644
--- a/solr/core/src/java/org/apache/solr/schema/FloatPointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/FloatPointField.java
@@ -179,12 +179,9 @@ public class FloatPointField extends PointField implements FloatValueFieldType {
}
@Override
- public IndexableField createField(SchemaField field, Object value, float boost) {
+ public IndexableField createField(SchemaField field, Object value) {
if (!isFieldUsed(field)) return null;
- if (boost != 1.0 && log.isTraceEnabled()) {
- log.trace("Can't use document/field boost for PointField. Field: " + field.getName() + ", boost: " + boost);
- }
float floatValue = (value instanceof Number) ? ((Number) value).floatValue() : Float.parseFloat(value.toString());
return new FloatPoint(field.getName(), floatValue);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/IntPointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/IntPointField.java b/solr/core/src/java/org/apache/solr/schema/IntPointField.java
index b25bc9f..db26988 100644
--- a/solr/core/src/java/org/apache/solr/schema/IntPointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/IntPointField.java
@@ -169,12 +169,9 @@ public class IntPointField extends PointField implements IntValueFieldType {
}
@Override
- public IndexableField createField(SchemaField field, Object value, float boost) {
+ public IndexableField createField(SchemaField field, Object value) {
if (!isFieldUsed(field)) return null;
- if (boost != 1.0 && log.isTraceEnabled()) {
- log.trace("Can't use document/field boost for PointField. Field: " + field.getName() + ", boost: " + boost);
- }
int intValue = (value instanceof Number) ? ((Number) value).intValue() : Integer.parseInt(value.toString());
return new IntPoint(field.getName(), intValue);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/LatLonType.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/LatLonType.java b/solr/core/src/java/org/apache/solr/schema/LatLonType.java
index 856c8bb..c484f3a 100644
--- a/solr/core/src/java/org/apache/solr/schema/LatLonType.java
+++ b/solr/core/src/java/org/apache/solr/schema/LatLonType.java
@@ -67,7 +67,7 @@ public class LatLonType extends AbstractSubTypeFieldType implements SpatialQuery
}
@Override
- public List<IndexableField> createFields(SchemaField field, Object value, float boost) {
+ public List<IndexableField> createFields(SchemaField field, Object value) {
String externalVal = value.toString();
//we could have 3 fields (two for the lat & lon, one for storage)
List<IndexableField> f = new ArrayList<>(3);
@@ -75,14 +75,14 @@ public class LatLonType extends AbstractSubTypeFieldType implements SpatialQuery
Point point = SpatialUtils.parsePointSolrException(externalVal, SpatialContext.GEO);
//latitude
SchemaField subLatSF = subField(field, LAT, schema);
- f.add(subLatSF.createField(String.valueOf(point.getY()), subLatSF.indexed() && !subLatSF.omitNorms() ? boost : 1f));
+ f.add(subLatSF.createField(String.valueOf(point.getY())));
//longitude
SchemaField subLonSF = subField(field, LON, schema);
- f.add(subLonSF.createField(String.valueOf(point.getX()), subLonSF.indexed() && !subLonSF.omitNorms() ? boost : 1f));
+ f.add(subLonSF.createField(String.valueOf(point.getX())));
}
if (field.stored()) {
- f.add(createField(field.getName(), externalVal, StoredField.TYPE, 1f));
+ f.add(createField(field.getName(), externalVal, StoredField.TYPE));
}
return f;
}
@@ -245,7 +245,7 @@ public class LatLonType extends AbstractSubTypeFieldType implements SpatialQuery
//It never makes sense to create a single field, so make it impossible to happen
@Override
- public IndexableField createField(SchemaField field, Object value, float boost) {
+ public IndexableField createField(SchemaField field, Object value) {
throw new UnsupportedOperationException("LatLonType uses multiple fields. field=" + field.getName());
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/LongPointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/LongPointField.java b/solr/core/src/java/org/apache/solr/schema/LongPointField.java
index 5631938..f5d0948 100644
--- a/solr/core/src/java/org/apache/solr/schema/LongPointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/LongPointField.java
@@ -174,12 +174,9 @@ public class LongPointField extends PointField implements LongValueFieldType {
}
@Override
- public IndexableField createField(SchemaField field, Object value, float boost) {
+ public IndexableField createField(SchemaField field, Object value) {
if (!isFieldUsed(field)) return null;
- if (boost != 1.0 && log.isTraceEnabled()) {
- log.trace("Can't use document/field boost for PointField. Field: " + field.getName() + ", boost: " + boost);
- }
long longValue = (value instanceof Number) ? ((Number) value).longValue() : Long.parseLong(value.toString());
return new LongPoint(field.getName(), longValue);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/PointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/PointField.java b/solr/core/src/java/org/apache/solr/schema/PointField.java
index 8746dac..6c75105 100644
--- a/solr/core/src/java/org/apache/solr/schema/PointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/PointField.java
@@ -203,12 +203,12 @@ public abstract class PointField extends NumericFieldType {
}
@Override
- public List<IndexableField> createFields(SchemaField sf, Object value, float boost) {
+ public List<IndexableField> createFields(SchemaField sf, Object value) {
if (!(sf.hasDocValues() || sf.stored())) {
- return Collections.singletonList(createField(sf, value, boost));
+ return Collections.singletonList(createField(sf, value));
}
List<IndexableField> fields = new ArrayList<>();
- final IndexableField field = createField(sf, value, boost);
+ final IndexableField field = createField(sf, value);
fields.add(field);
if (sf.hasDocValues()) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/PointType.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/PointType.java b/solr/core/src/java/org/apache/solr/schema/PointType.java
index 1c13097..4c022b8 100644
--- a/solr/core/src/java/org/apache/solr/schema/PointType.java
+++ b/solr/core/src/java/org/apache/solr/schema/PointType.java
@@ -66,7 +66,7 @@ public class PointType extends CoordinateFieldType implements SpatialQueryable {
}
@Override
- public List<IndexableField> createFields(SchemaField field, Object value, float boost) {
+ public List<IndexableField> createFields(SchemaField field, Object value) {
String externalVal = value.toString();
String[] point = parseCommaSeparatedList(externalVal, dimension);
@@ -76,13 +76,13 @@ public class PointType extends CoordinateFieldType implements SpatialQueryable {
if (field.indexed()) {
for (int i=0; i<dimension; i++) {
SchemaField sf = subField(field, i, schema);
- f.add(sf.createField(point[i], sf.indexed() && !sf.omitNorms() ? boost : 1f));
+ f.add(sf.createField(point[i]));
}
}
if (field.stored()) {
String storedVal = externalVal; // normalize or not?
- f.add(createField(field.getName(), storedVal, StoredField.TYPE, 1f));
+ f.add(createField(field.getName(), storedVal, StoredField.TYPE));
}
return f;
@@ -105,7 +105,7 @@ public class PointType extends CoordinateFieldType implements SpatialQueryable {
*
*/
@Override
- public IndexableField createField(SchemaField field, Object value, float boost) {
+ public IndexableField createField(SchemaField field, Object value) {
throw new UnsupportedOperationException("PointType uses multiple fields. field=" + field.getName());
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java b/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java
index 5f125d9..d2dc811 100644
--- a/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java
+++ b/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java
@@ -119,11 +119,10 @@ public class PreAnalyzedField extends TextField implements HasImplicitIndexAnaly
}
@Override
- public IndexableField createField(SchemaField field, Object value,
- float boost) {
+ public IndexableField createField(SchemaField field, Object value) {
IndexableField f = null;
try {
- f = fromString(field, String.valueOf(value), boost);
+ f = fromString(field, String.valueOf(value));
} catch (Exception e) {
LOG.warn("Error parsing pre-analyzed field '" + field.getName() + "'", e);
return null;
@@ -225,7 +224,7 @@ public class PreAnalyzedField extends TextField implements HasImplicitIndexAnaly
}
- public IndexableField fromString(SchemaField field, String val, float boost) throws Exception {
+ public IndexableField fromString(SchemaField field, String val) throws Exception {
if (val == null || val.trim().length() == 0) {
return null;
}
@@ -269,9 +268,6 @@ public class PreAnalyzedField extends TextField implements HasImplicitIndexAnaly
}
}
}
- if (f != null) {
- f.setBoost(boost);
- }
return f;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/SchemaField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/SchemaField.java b/solr/core/src/java/org/apache/solr/schema/SchemaField.java
index 009e5fc..e690a13 100644
--- a/solr/core/src/java/org/apache/solr/schema/SchemaField.java
+++ b/solr/core/src/java/org/apache/solr/schema/SchemaField.java
@@ -113,16 +113,16 @@ public final class SchemaField extends FieldProperties implements IndexableField
boolean isTokenized() { return (properties & TOKENIZED)!=0; }
boolean isBinary() { return (properties & BINARY)!=0; }
- public IndexableField createField(Object val, float boost) {
- return type.createField(this,val,boost);
+ public IndexableField createField(Object val) {
+ return type.createField(this,val);
}
- public List<IndexableField> createFields(Object val, float boost) {
- return type.createFields(this,val,boost);
+ public List<IndexableField> createFields(Object val) {
+ return type.createFields(this,val);
}
/**
- * If true, then use {@link #createFields(Object, float)}, else use {@link #createField} to save an extra allocation
+ * If true, then use {@link #createFields(Object)}, else use {@link #createField} to save an extra allocation
* @return true if this field is a poly field
*/
public boolean isPolyField(){
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/StrField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/StrField.java b/solr/core/src/java/org/apache/solr/schema/StrField.java
index a00cfb7..0b1576b 100644
--- a/solr/core/src/java/org/apache/solr/schema/StrField.java
+++ b/solr/core/src/java/org/apache/solr/schema/StrField.java
@@ -40,8 +40,8 @@ public class StrField extends PrimitiveFieldType {
}
@Override
- public List<IndexableField> createFields(SchemaField field, Object value, float boost) {
- IndexableField fval = createField(field, value, boost);
+ public List<IndexableField> createFields(SchemaField field, Object value) {
+ IndexableField fval = createField(field, value);
if (field.hasDocValues()) {
IndexableField docval;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/schema/TrieField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/TrieField.java b/solr/core/src/java/org/apache/solr/schema/TrieField.java
index e470155..b70b2b0 100644
--- a/solr/core/src/java/org/apache/solr/schema/TrieField.java
+++ b/solr/core/src/java/org/apache/solr/schema/TrieField.java
@@ -572,7 +572,7 @@ public class TrieField extends NumericFieldType {
}
@Override
- public IndexableField createField(SchemaField field, Object value, float boost) {
+ public IndexableField createField(SchemaField field, Object value) {
boolean indexed = field.indexed();
boolean stored = field.stored();
boolean docValues = field.hasDocValues();
@@ -647,15 +647,14 @@ public class TrieField extends NumericFieldType {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + type);
}
- f.setBoost(boost);
return f;
}
@Override
- public List<IndexableField> createFields(SchemaField sf, Object value, float boost) {
+ public List<IndexableField> createFields(SchemaField sf, Object value) {
if (sf.hasDocValues()) {
List<IndexableField> fields = new ArrayList<>();
- final IndexableField field = createField(sf, value, boost);
+ final IndexableField field = createField(sf, value);
fields.add(field);
if (sf.multiValued()) {
@@ -677,7 +676,7 @@ public class TrieField extends NumericFieldType {
return fields;
} else {
- return Collections.singletonList(createField(sf, value, boost));
+ return Collections.singletonList(createField(sf, value));
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/search/Grouping.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/Grouping.java b/solr/core/src/java/org/apache/solr/search/Grouping.java
index 75011e7..327e5bb 100644
--- a/solr/core/src/java/org/apache/solr/search/Grouping.java
+++ b/solr/core/src/java/org/apache/solr/search/Grouping.java
@@ -819,7 +819,7 @@ public class Grouping {
SchemaField schemaField = searcher.getSchema().getField(groupBy);
FieldType fieldType = schemaField.getType();
String readableValue = fieldType.indexedToReadable(group.groupValue.utf8ToString());
- IndexableField field = schemaField.createField(readableValue, 1.0f);
+ IndexableField field = schemaField.createField(readableValue);
nl.add("groupValue", fieldType.toObject(field));
} else {
nl.add("groupValue", null);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/search/grouping/endresulttransformer/GroupedEndResultTransformer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/endresulttransformer/GroupedEndResultTransformer.java b/solr/core/src/java/org/apache/solr/search/grouping/endresulttransformer/GroupedEndResultTransformer.java
index 47b5276..de2dee4 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/endresulttransformer/GroupedEndResultTransformer.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/endresulttransformer/GroupedEndResultTransformer.java
@@ -69,7 +69,7 @@ public class GroupedEndResultTransformer implements EndResultTransformer {
SimpleOrderedMap<Object> groupResult = new SimpleOrderedMap<>();
if (group.groupValue != null) {
groupResult.add(
- "groupValue", groupFieldType.toObject(groupField.createField(group.groupValue.utf8ToString(), 1.0f))
+ "groupValue", groupFieldType.toObject(groupField.createField(group.groupValue.utf8ToString()))
);
} else {
groupResult.add("groupValue", null);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java b/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
index eb6612e..abf4a1f 100644
--- a/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
+++ b/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
@@ -43,25 +43,22 @@ public class DocumentBuilder {
* @param doc Document that the field needs to be added to
* @param field The schema field object for the field
* @param val The value for the field to be added
- * @param boost Boost value for the field
* @param forInPlaceUpdate Whether the field is to be added for in-place update. If true,
* only numeric docValues based fields are added to the document. This can be true
* when constructing a Lucene document for writing an in-place update, and we don't need
* presence of non-updatable fields (non NDV) in such a document.
*/
- private static void addField(Document doc, SchemaField field, Object val, float boost,
+ private static void addField(Document doc, SchemaField field, Object val,
boolean forInPlaceUpdate) {
if (val instanceof IndexableField) {
if (forInPlaceUpdate) {
assert val instanceof NumericDocValuesField: "Expected in-place update to be done on"
+ " NDV fields only.";
}
- // set boost to the calculated compound boost
- ((Field)val).setBoost(boost);
doc.add((Field)val);
return;
}
- for (IndexableField f : field.getType().createFields(field, val, boost)) {
+ for (IndexableField f : field.getType().createFields(field, val)) {
if (f != null) { // null fields are not added
// HACK: workaround for SOLR-9809
// even though at this point in the code we know the field is single valued and DV only
@@ -126,7 +123,6 @@ public class DocumentBuilder {
final String uniqueKeyFieldName = null == uniqueKeyField ? null : uniqueKeyField.getName();
Document out = new Document();
- final float docBoost = doc.getDocumentBoost();
Set<String> usedFields = Sets.newHashSet();
// Load fields from SolrDocument to Document
@@ -141,19 +137,6 @@ public class DocumentBuilder {
"ERROR: "+getID(doc, schema)+"multiple values encountered for non multiValued field " +
sfield.getName() + ": " +field.getValue() );
}
-
- float fieldBoost = field.getBoost();
- boolean applyBoost = sfield != null && sfield.indexed() && !sfield.omitNorms();
-
- if (applyBoost == false && fieldBoost != 1.0F) {
- throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,
- "ERROR: "+getID(doc, schema)+"cannot set an index-time boost, unindexed or norms are omitted for field " +
- sfield.getName() + ": " +field.getValue() );
- }
-
- // Lucene no longer has a native docBoost, so we have to multiply
- // it ourselves
- float compoundBoost = fieldBoost * docBoost;
List<CopyField> copyFields = schema.getCopyFieldsList(name);
if( copyFields.size() == 0 ) copyFields = null;
@@ -168,7 +151,7 @@ public class DocumentBuilder {
hasField = true;
if (sfield != null) {
used = true;
- addField(out, sfield, v, applyBoost ? compoundBoost : 1f,
+ addField(out, sfield, v,
name.equals(uniqueKeyFieldName) ? false : forInPlaceUpdate);
// record the field as having a value
usedFields.add(sfield.getName());
@@ -200,27 +183,13 @@ public class DocumentBuilder {
val = cf.getLimitedValue((String)val);
}
- // we can't copy any boost unless the dest field is
- // indexed & !omitNorms, but which boost we copy depends
- // on whether the dest field already contains values (we
- // don't want to apply the compounded docBoost more then once)
- final float destBoost =
- (destinationField.indexed() && !destinationField.omitNorms()) ?
- (destHasValues ? fieldBoost : compoundBoost) : 1.0F;
-
- addField(out, destinationField, val, destBoost,
+ addField(out, destinationField, val,
destinationField.getName().equals(uniqueKeyFieldName) ? false : forInPlaceUpdate);
// record the field as having a value
usedFields.add(destinationField.getName());
}
}
}
-
- // The final boost for a given field named is the product of the
- // *all* boosts on values of that field.
- // For multi-valued fields, we only want to set the boost on the
- // first field.
- fieldBoost = compoundBoost = 1.0f;
}
}
catch( SolrException ex ) {
@@ -250,7 +219,7 @@ public class DocumentBuilder {
for (SchemaField field : schema.getRequiredFields()) {
if (out.getField(field.getName() ) == null) {
if (field.getDefaultValue() != null) {
- addField(out, field, field.getDefaultValue(), 1.0f, false);
+ addField(out, field, field.getDefaultValue(), false);
}
else {
String msg = getID(doc, schema) + "missing required field: " + field.getName();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/update/processor/AllValuesOrNoneFieldMutatingUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/AllValuesOrNoneFieldMutatingUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/AllValuesOrNoneFieldMutatingUpdateProcessor.java
index 52726ea..1d56a62 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/AllValuesOrNoneFieldMutatingUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/AllValuesOrNoneFieldMutatingUpdateProcessor.java
@@ -104,10 +104,9 @@ public abstract class AllValuesOrNoneFieldMutatingUpdateProcessor extends FieldM
srcField.getName(), srcVal.getClass().getSimpleName(), srcVal,
destVal.getClass().getSimpleName(), destVal));
}
- result.addValue(destVal, 1.0F);
+ result.addValue(destVal);
}
}
- result.setBoost(srcField.getBoost());
if (null != messages && log.isDebugEnabled()) {
for (String message : messages) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
index 0c17758..093149a 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
@@ -306,18 +306,18 @@ public class AtomicUpdateDocumentMerger {
protected void doSet(SolrInputDocument toDoc, SolrInputField sif, Object fieldVal) {
SchemaField sf = schema.getField(sif.getName());
- toDoc.setField(sif.getName(), sf.getType().toNativeType(fieldVal), sif.getBoost());
+ toDoc.setField(sif.getName(), sf.getType().toNativeType(fieldVal));
}
protected void doAdd(SolrInputDocument toDoc, SolrInputField sif, Object fieldVal) {
SchemaField sf = schema.getField(sif.getName());
- toDoc.addField(sif.getName(), sf.getType().toNativeType(fieldVal), sif.getBoost());
+ toDoc.addField(sif.getName(), sf.getType().toNativeType(fieldVal));
}
protected void doInc(SolrInputDocument toDoc, SolrInputField sif, Object fieldVal) {
SolrInputField numericField = toDoc.get(sif.getName());
if (numericField == null) {
- toDoc.setField(sif.getName(), fieldVal, sif.getBoost());
+ toDoc.setField(sif.getName(), fieldVal);
} else {
// TODO: fieldtype needs externalToObject?
String oldValS = numericField.getFirstValue().toString();
@@ -339,7 +339,7 @@ public class AtomicUpdateDocumentMerger {
result = ((Integer) oldVal).intValue() + Integer.parseInt(fieldValS);
}
- toDoc.setField(sif.getName(), result, sif.getBoost());
+ toDoc.setField(sif.getName(), result);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java
index ad5c772..2ffc5b9 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java
@@ -450,8 +450,7 @@ public class CloneFieldUpdateProcessorFactory
}
for (Object val : srcFieldValues) {
- // preserve existing dest boost (multiplicitive), ignore src boost
- destField.addValue(val, 1.0f);
+ destField.addValue(val);
}
// put it in map to avoid concurrent modification...
destMap.put(resolvedDest, destField);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/update/processor/ConcatFieldUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/ConcatFieldUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/ConcatFieldUpdateProcessorFactory.java
index 65ca976..ae260e9 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/ConcatFieldUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/ConcatFieldUpdateProcessorFactory.java
@@ -81,8 +81,7 @@ public final class ConcatFieldUpdateProcessorFactory extends FieldMutatingUpdate
if (src.getValueCount() <= 1) return src;
SolrInputField result = new SolrInputField(src.getName());
- result.setValue(StringUtils.join(src.getValues(), delimiter),
- src.getBoost());
+ result.setValue(StringUtils.join(src.getValues(), delimiter));
return result;
});
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/update/processor/CountFieldValuesUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/CountFieldValuesUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/CountFieldValuesUpdateProcessorFactory.java
index 5ffd0f6..1ed52c0 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/CountFieldValuesUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/CountFieldValuesUpdateProcessorFactory.java
@@ -72,8 +72,7 @@ public final class CountFieldValuesUpdateProcessorFactory extends FieldMutatingU
UpdateRequestProcessor next) {
return mutator(getSelector(), next, src -> {
SolrInputField result = new SolrInputField(src.getName());
- result.setValue(src.getValueCount(),
- src.getBoost());
+ result.setValue(src.getValueCount());
return result;
});
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/update/processor/FieldValueMutatingUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/FieldValueMutatingUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/FieldValueMutatingUpdateProcessor.java
index 51d99f1..b08ed9b 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/FieldValueMutatingUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/FieldValueMutatingUpdateProcessor.java
@@ -78,10 +78,9 @@ public abstract class FieldValueMutatingUpdateProcessor
log.debug("replace value from field '{}': {} with {}",
new Object[] { src.getName(), srcVal, destVal });
}
- result.addValue(destVal, 1.0F);
+ result.addValue(destVal);
}
}
- result.setBoost(src.getBoost());
return 0 == result.getValueCount() ? null : result;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/update/processor/FieldValueSubsetUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/FieldValueSubsetUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/FieldValueSubsetUpdateProcessorFactory.java
index 1fda07d..89c94bb 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/FieldValueSubsetUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/FieldValueSubsetUpdateProcessorFactory.java
@@ -39,8 +39,7 @@ public abstract class FieldValueSubsetUpdateProcessorFactory extends FieldMutati
if (src.getValueCount() <= 1) return src;
SolrInputField result = new SolrInputField(src.getName());
- result.setValue(pickSubset(src.getValues()),
- src.getBoost());
+ result.setValue(pickSubset(src.getValues()));
return result;
});
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/java/org/apache/solr/update/processor/PreAnalyzedUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/PreAnalyzedUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/PreAnalyzedUpdateProcessorFactory.java
index 2b76fbf..690c0f0 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/PreAnalyzedUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/PreAnalyzedUpdateProcessorFactory.java
@@ -54,7 +54,7 @@ import org.slf4j.LoggerFactory;
* <p>This update processor uses {@link PreAnalyzedParser}
* to parse the original field content (interpreted as a string value), and thus
* obtain the stored part and the token stream part. Then it creates the "template"
- * {@link Field}-s using the original {@link SchemaField#createFields(Object, float)}
+ * {@link Field}-s using the original {@link SchemaField#createFields(Object)}
* as declared in the current schema. Finally it sets the pre-analyzed parts if
* available (string value and the token
* stream value) on the first field of these "template" fields. If the declared
@@ -155,17 +155,16 @@ class PreAnalyzedUpdateProcessor extends FieldMutatingUpdateProcessor {
return null;
}
SolrInputField res = new SolrInputField(src.getName());
- res.setBoost(src.getBoost());
for (Object o : src) {
if (o == null) {
continue;
}
- Field pre = (Field)parser.createField(sf, o, 1.0f);
+ Field pre = (Field)parser.createField(sf, o);
if (pre != null) {
- res.addValue(pre, 1.0f);
+ res.addValue(pre);
} else { // restore the original value
log.warn("Could not parse field {} - using original value as is: {}", src.getName(), o);
- res.addValue(o, 1.0f);
+ res.addValue(o);
}
}
return res;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java b/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java
index 579ccf0..f4a14db 100644
--- a/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java
+++ b/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java
@@ -506,32 +506,32 @@ public class BasicFunctionalityTest extends SolrTestCaseJ4 {
IndexableField luf; // Lucene field
f = ischema.getField("test_basictv");
- luf = f.createField("test", 0f);
+ luf = f.createField("test");
assertTrue(f.storeTermVector());
assertTrue(luf.fieldType().storeTermVectors());
f = ischema.getField("test_notv");
- luf = f.createField("test", 0f);
+ luf = f.createField("test");
assertTrue(!f.storeTermVector());
assertTrue(!luf.fieldType().storeTermVectors());
f = ischema.getField("test_postv");
- luf = f.createField("test", 0f);
+ luf = f.createField("test");
assertTrue(f.storeTermVector() && f.storeTermPositions());
assertTrue(luf.fieldType().storeTermVectorPositions());
f = ischema.getField("test_offtv");
- luf = f.createField("test", 0f);
+ luf = f.createField("test");
assertTrue(f.storeTermVector() && f.storeTermOffsets());
assertTrue(luf.fieldType().storeTermVectorOffsets());
f = ischema.getField("test_posofftv");
- luf = f.createField("test", 0f);
+ luf = f.createField("test");
assertTrue(f.storeTermVector() && f.storeTermPositions() && f.storeTermOffsets());
assertTrue(luf.fieldType().storeTermVectorOffsets() && luf.fieldType().storeTermVectorPositions());
f = ischema.getField("test_posoffpaytv");
- luf = f.createField("test", 0f);
+ luf = f.createField("test");
assertTrue(f.storeTermVector() && f.storeTermPositions() && f.storeTermOffsets() && f.storeTermPayloads());
assertTrue(luf.fieldType().storeTermVectorOffsets() && luf.fieldType().storeTermVectorPositions() && luf.fieldType().storeTermVectorPayloads());
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/TestDocumentBuilder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/TestDocumentBuilder.java b/solr/core/src/test/org/apache/solr/TestDocumentBuilder.java
index df8ef00..a3ca089 100644
--- a/solr/core/src/test/org/apache/solr/TestDocumentBuilder.java
+++ b/solr/core/src/test/org/apache/solr/TestDocumentBuilder.java
@@ -41,7 +41,6 @@ public class TestDocumentBuilder extends LuceneTestCase {
list.add(33);
list.add(20);
doc.addField("field5", list);
- doc.setDocumentBoost(5f);
SolrInputDocument clone = doc.deepCopy();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java
index 2f6c5e9..ec2dac6 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java
@@ -244,7 +244,7 @@ public class TestCloudDeleteByQuery extends SolrCloudTestCase {
public static SolrInputField f(String fieldName, Object... values) {
SolrInputField f = new SolrInputField(fieldName);
- f.setValue(values, 1.0F);
+ f.setValue(values);
return f;
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java b/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java
index 9f29d3d..9c04f75 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java
@@ -437,13 +437,13 @@ public class TestStressCloudBlindAtomicUpdates extends SolrCloudTestCase {
public static SolrInputField f(String fieldName, Object... values) {
SolrInputField f = new SolrInputField(fieldName);
- f.setValue(values, 1.0F);
+ f.setValue(values);
// TODO: soooooooooo stupid (but currently neccessary because atomic updates freak out
// if the Map with the "inc" operation is inside of a collection - even if it's the only "value") ...
if (1 == values.length) {
- f.setValue(values[0], 1.0F);
+ f.setValue(values[0]);
} else {
- f.setValue(values, 1.0F);
+ f.setValue(values);
}
return f;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
index f5dead9..f66f892 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
@@ -1036,7 +1036,7 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
public static SolrInputField f(String fieldName, Object... values) {
SolrInputField f = new SolrInputField(fieldName);
- f.setValue(values, 1.0F);
+ f.setValue(values);
return f;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/handler/JsonLoaderTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/JsonLoaderTest.java b/solr/core/src/test/org/apache/solr/handler/JsonLoaderTest.java
index 5ca51c8..4f8b7da 100644
--- a/solr/core/src/test/org/apache/solr/handler/JsonLoaderTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/JsonLoaderTest.java
@@ -52,7 +52,7 @@ public class JsonLoaderTest extends SolrTestCaseJ4 {
" },\n" +
" 'array': [ 'aaa', 'bbb' ],\n" +
" 'boosted': {\n" +
- " 'boost': 6.7,\n" +
+ " 'boost': 6.7,\n" + // make sure we still accept boosts
" 'value': [ 'aaa', 'bbb' ]\n" +
" }\n" +
" }\n" +
@@ -94,7 +94,6 @@ public class JsonLoaderTest extends SolrTestCaseJ4 {
AddUpdateCommand add = p.addCommands.get(0);
SolrInputDocument d = add.solrDoc;
SolrInputField f = d.getField( "boosted" );
- assertEquals(6.7f, f.getBoost(), 0.1);
assertEquals(2, f.getValues().size());
//
@@ -102,7 +101,6 @@ public class JsonLoaderTest extends SolrTestCaseJ4 {
d = add.solrDoc;
f = d.getField( "f1" );
assertEquals(2, f.getValues().size());
- assertEquals(3.45f, d.getDocumentBoost(), 0.001);
assertEquals(false, add.overwrite);
assertEquals(0, d.getField("f2").getValueCount());
@@ -262,7 +260,6 @@ public class JsonLoaderTest extends SolrTestCaseJ4 {
assertEquals(2, d.getFieldNames().size());
assertEquals("1", d.getFieldValue("id"));
assertEquals(new Object[] {45L, 67L, 89L} , d.getFieldValues("f").toArray());
- assertEquals(0.0F, fBoost, d.getField("f").getBoost());
d = p.addCommands.get(1).solrDoc;
assertEquals(1, d.getFieldNames().size());
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/handler/XmlUpdateRequestHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/XmlUpdateRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/XmlUpdateRequestHandlerTest.java
index 7282e19..6d46722 100644
--- a/solr/core/src/test/org/apache/solr/handler/XmlUpdateRequestHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/XmlUpdateRequestHandlerTest.java
@@ -76,13 +76,6 @@ public class XmlUpdateRequestHandlerTest extends SolrTestCaseJ4 {
XMLLoader loader = new XMLLoader();
SolrInputDocument doc = loader.readDoc( parser );
- // Read boosts
- assertEquals( 5.5f, doc.getDocumentBoost(), 0.1);
- assertEquals( 1.0f, doc.getField( "name" ).getBoost(), 0.1);
- assertEquals( 2.2f, doc.getField( "id" ).getBoost(), 0.1);
- // Boost is the product of each value
- assertEquals( (3*4*5.0f), doc.getField( "cat" ).getBoost(), 0.1);
-
// Read values
assertEquals( "12345", doc.getField( "id" ).getValue() );
assertEquals( "kitten", doc.getField( "name").getValue() );
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/schema/AbstractCurrencyFieldTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/AbstractCurrencyFieldTest.java b/solr/core/src/test/org/apache/solr/schema/AbstractCurrencyFieldTest.java
index e633a13..a195794 100644
--- a/solr/core/src/test/org/apache/solr/schema/AbstractCurrencyFieldTest.java
+++ b/solr/core/src/test/org/apache/solr/schema/AbstractCurrencyFieldTest.java
@@ -98,7 +98,7 @@ public abstract class AbstractCurrencyFieldTest extends SolrTestCaseJ4 {
FieldType tmp = amount.getType();
assertTrue(tmp instanceof CurrencyField);
String currencyValue = "1.50,EUR";
- List<IndexableField> fields = amount.createFields(currencyValue, 2);
+ List<IndexableField> fields = amount.createFields(currencyValue);
assertEquals(fields.size(), 3);
// First field is currency code, second is value, third is stored.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/schema/DateFieldTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/DateFieldTest.java b/solr/core/src/test/org/apache/solr/schema/DateFieldTest.java
index 9e88b6e..ac451bf 100644
--- a/solr/core/src/test/org/apache/solr/schema/DateFieldTest.java
+++ b/solr/core/src/test/org/apache/solr/schema/DateFieldTest.java
@@ -50,13 +50,13 @@ public class DateFieldTest extends SolrTestCaseJ4 {
int props = FieldProperties.INDEXED ^ FieldProperties.STORED;
SchemaField sf = new SchemaField( "test", f, props, null );
// String
- IndexableField out = f.createField(sf, "1995-12-31T23:59:59Z", 1.0f );
+ IndexableField out = f.createField(sf, "1995-12-31T23:59:59Z" );
assertEquals(820454399000L, f.toObject( out ).getTime() );
// Date obj
- out = f.createField(sf, new Date(820454399000L), 1.0f );
+ out = f.createField(sf, new Date(820454399000L) );
assertEquals(820454399000L, f.toObject( out ).getTime() );
// Date math
- out = f.createField(sf, "1995-12-31T23:59:59.99Z+5MINUTES", 1.0f);
+ out = f.createField(sf, "1995-12-31T23:59:59.99Z+5MINUTES");
assertEquals(820454699990L, f.toObject( out ).getTime() );
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java b/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java
index cc3486e..cf43a68 100644
--- a/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java
+++ b/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java
@@ -152,7 +152,7 @@ public class DocValuesTest extends SolrTestCaseJ4 {
}
private void tstToObj(SchemaField sf, Object o) {
- List<IndexableField> fields = sf.createFields(o, 1.0f);
+ List<IndexableField> fields = sf.createFields(o);
for (IndexableField field : fields) {
assertEquals( sf.getType().toObject(field), o);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/schema/PolyFieldTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/PolyFieldTest.java b/solr/core/src/test/org/apache/solr/schema/PolyFieldTest.java
index f788ba0..6839c70 100644
--- a/solr/core/src/test/org/apache/solr/schema/PolyFieldTest.java
+++ b/solr/core/src/test/org/apache/solr/schema/PolyFieldTest.java
@@ -83,7 +83,7 @@ public class PolyFieldTest extends SolrTestCaseJ4 {
assertEquals(pt.getDimension(), 2);
double[] xy = new double[]{35.0, -79.34};
String point = xy[0] + "," + xy[1];
- List<IndexableField> fields = home.createFields(point, 2);
+ List<IndexableField> fields = home.createFields(point);
assertEquals(fields.size(), 3);//should be 3, we have a stored field
//first two fields contain the values, third is just stored and contains the original
for (int i = 0; i < 3; i++) {
@@ -99,13 +99,13 @@ public class PolyFieldTest extends SolrTestCaseJ4 {
home = schema.getField("home_ns");
assertNotNull(home);
- fields = home.createFields(point, 2);
+ fields = home.createFields(point);
assertEquals(fields.size(), 2);//should be 2, since we aren't storing
home = schema.getField("home_ns");
assertNotNull(home);
try {
- fields = home.createFields("35.0,foo", 2);
+ fields = home.createFields("35.0,foo");
assertTrue(false);
} catch (Exception e) {
//
@@ -115,7 +115,7 @@ public class PolyFieldTest extends SolrTestCaseJ4 {
SchemaField s1 = schema.getField("test_p");
SchemaField s2 = schema.getField("test_p");
// If we use [Int/Double/Long/Float]PointField, we can't get the valueSource, since docValues is false
- if (s1.createFields("1,2", 0).get(0).fieldType().pointDimensionCount() == 0) {
+ if (s1.createFields("1,2").get(0).fieldType().pointDimensionCount() == 0) {
assertFalse(s2.getType().isPointField());
ValueSource v1 = s1.getType().getValueSource(s1, null);
ValueSource v2 = s2.getType().getValueSource(s2, null);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/schema/PreAnalyzedFieldTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/PreAnalyzedFieldTest.java b/solr/core/src/test/org/apache/solr/schema/PreAnalyzedFieldTest.java
index 622a634..a494654 100644
--- a/solr/core/src/test/org/apache/solr/schema/PreAnalyzedFieldTest.java
+++ b/solr/core/src/test/org/apache/solr/schema/PreAnalyzedFieldTest.java
@@ -105,7 +105,7 @@ public class PreAnalyzedFieldTest extends SolrTestCaseJ4 {
for (int i = 0; i < valid.length; i++) {
String s = valid[i];
try {
- Field f = (Field)paf.fromString(field, s, 1.0f);
+ Field f = (Field)paf.fromString(field, s);
//System.out.println(" - toString: '" + sb.toString() + "'");
assertEquals(validParsed[i], parser.toFormattedString(f));
} catch (Exception e) {
@@ -179,7 +179,7 @@ public class PreAnalyzedFieldTest extends SolrTestCaseJ4 {
paf.init(h.getCore().getLatestSchema(), Collections.<String,String>emptyMap());
for (String s : invalidSimple) {
try {
- paf.fromString(field, s, 1.0f);
+ paf.fromString(field, s);
fail("should fail: '" + s + "'");
} catch (Exception e) {
//
@@ -232,7 +232,7 @@ public class PreAnalyzedFieldTest extends SolrTestCaseJ4 {
args.put(PreAnalyzedField.PARSER_IMPL, SimplePreAnalyzedParser.class.getName());
paf.init(h.getCore().getLatestSchema(), args);
try {
- Field f = (Field)paf.fromString(field, valid[0], 1.0f);
+ Field f = (Field)paf.fromString(field, valid[0]);
} catch (Exception e) {
fail("Should pass: '" + valid[0] + "', exception: " + e);
}
@@ -240,14 +240,14 @@ public class PreAnalyzedFieldTest extends SolrTestCaseJ4 {
args.put(PreAnalyzedField.PARSER_IMPL, JsonPreAnalyzedParser.class.getName());
paf.init(h.getCore().getLatestSchema(), args);
try {
- Field f = (Field)paf.fromString(field, valid[0], 1.0f);
+ Field f = (Field)paf.fromString(field, valid[0]);
fail("Should fail JSON parsing: '" + valid[0] + "'");
} catch (Exception e) {
}
byte[] deadbeef = new byte[]{(byte)0xd, (byte)0xe, (byte)0xa, (byte)0xd, (byte)0xb, (byte)0xe, (byte)0xe, (byte)0xf};
PreAnalyzedParser parser = new JsonPreAnalyzedParser();
try {
- Field f = (Field)paf.fromString(field, jsonValid, 1.0f);
+ Field f = (Field)paf.fromString(field, jsonValid);
assertEquals(jsonValid, parser.toFormattedString(f));
} catch (Exception e) {
fail("Should pass: '" + jsonValid + "', exception: " + e);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/schema/SortableBinaryField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/SortableBinaryField.java b/solr/core/src/test/org/apache/solr/schema/SortableBinaryField.java
index 1ebf4cc..b8ed296 100644
--- a/solr/core/src/test/org/apache/solr/schema/SortableBinaryField.java
+++ b/solr/core/src/test/org/apache/solr/schema/SortableBinaryField.java
@@ -40,10 +40,10 @@ public class SortableBinaryField extends BinaryField {
}
@Override
- public List<IndexableField> createFields(SchemaField field, Object value, float boost) {
+ public List<IndexableField> createFields(SchemaField field, Object value) {
if (field.hasDocValues()) {
List<IndexableField> fields = new ArrayList<>();
- IndexableField storedField = createField(field, value, boost);
+ IndexableField storedField = createField(field, value);
fields.add(storedField);
ByteBuffer byteBuffer = toObject(storedField);
BytesRef bytes = new BytesRef
@@ -55,7 +55,7 @@ public class SortableBinaryField extends BinaryField {
}
return fields;
} else {
- return Collections.singletonList(createField(field, value, boost));
+ return Collections.singletonList(createField(field, value));
}
}
[47/50] [abbrv] lucene-solr:jira/solr-6736: remove stale comment
Posted by is...@apache.org.
remove stale comment
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a3f48963
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a3f48963
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a3f48963
Branch: refs/heads/jira/solr-6736
Commit: a3f4896359bd0a113eacb0756ec2afe6c8d5d7b9
Parents: 0fb386a
Author: Mike McCandless <mi...@apache.org>
Authored: Sat Mar 11 06:41:49 2017 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Sat Mar 11 06:41:49 2017 -0500
----------------------------------------------------------------------
lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java | 1 -
1 file changed, 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a3f48963/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
index 2ea0d0e..b1f507a 100644
--- a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
@@ -183,7 +183,6 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
private BooleanQuery rewriteNoScoring() {
BooleanQuery.Builder newQuery = new BooleanQuery.Builder();
- // ignore disableCoord, which only matters for scores
newQuery.setMinimumNumberShouldMatch(getMinimumNumberShouldMatch());
for (BooleanClause clause : clauses) {
if (clause.getOccur() == Occur.MUST) {
[48/50] [abbrv] lucene-solr:jira/solr-6736: Merge branch 'master'
into jira/solr-6736
Posted by is...@apache.org.
Merge branch 'master' into jira/solr-6736
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/10c2bebc
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/10c2bebc
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/10c2bebc
Branch: refs/heads/jira/solr-6736
Commit: 10c2bebc9f2c60d5202592c7c49b25e609c49e9b
Parents: ed99a21 8756be0
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Sun Mar 12 05:39:18 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Sun Mar 12 05:39:18 2017 +0530
----------------------------------------------------------------------
build.xml | 10 +-
dev-tools/doap/lucene.rdf | 7 +
dev-tools/doap/solr.rdf | 7 +
.../maven/solr/contrib/ltr/pom.xml.template | 10 +-
dev-tools/maven/solr/pom.xml.template | 9 -
dev-tools/scripts/buildAndPushRelease.py | 2 +-
lucene/CHANGES.txt | 48 ++
lucene/MIGRATE.txt | 14 +
.../lucene/analysis/shingle/ShingleFilter.java | 7 +-
.../analysis/shingle/ShingleFilterTest.java | 94 +++-
.../apache/lucene/index/FixBrokenOffsets.java | 10 +
.../index/TestBackwardsCompatibility.java | 53 +-
.../lucene/index/TestFixBrokenOffsets.java | 7 +-
.../lucene/index/TestIndexWriterOnOldIndex.java | 55 ++
.../org/apache/lucene/index/index.6.4.2-cfs.zip | Bin 0 -> 15856 bytes
.../apache/lucene/index/index.6.4.2-nocfs.zip | Bin 0 -> 15886 bytes
.../lucene/index/index.single-empty-doc.630.zip | Bin 0 -> 1363 bytes
.../apache/lucene/legacy/TestLegacyField.java | 10 -
lucene/build.xml | 6 +-
lucene/common-build.xml | 65 ++-
.../apache/lucene/codecs/DocValuesConsumer.java | 4 +-
.../lucene/codecs/StoredFieldsWriter.java | 5 -
.../codecs/blocktree/BlockTreeTermsReader.java | 30 +-
.../blocktree/IntersectTermsEnumFrame.java | 70 +--
.../codecs/blocktree/SegmentTermsEnumFrame.java | 154 ++---
.../CompressingStoredFieldsReader.java | 19 +-
.../CompressingStoredFieldsWriter.java | 5 +-
.../CompressingTermVectorsReader.java | 19 +-
.../CompressingTermVectorsWriter.java | 5 +-
.../java/org/apache/lucene/document/Field.java | 32 --
.../lucene/index/DefaultIndexingChain.java | 8 -
.../lucene/index/ExitableDirectoryReader.java | 21 +-
.../apache/lucene/index/FieldInvertState.java | 20 +-
.../apache/lucene/index/FilterCodecReader.java | 13 +-
.../apache/lucene/index/FilterLeafReader.java | 73 +--
.../org/apache/lucene/index/IndexReader.java | 115 ++--
.../org/apache/lucene/index/IndexWriter.java | 20 +-
.../org/apache/lucene/index/IndexableField.java | 24 -
.../org/apache/lucene/index/LeafReader.java | 84 +--
.../apache/lucene/index/MergeReaderWrapper.java | 20 +-
.../org/apache/lucene/index/MultiDocValues.java | 22 +-
.../org/apache/lucene/index/MultiReader.java | 11 +
.../lucene/index/ParallelCompositeReader.java | 16 +-
.../apache/lucene/index/ParallelLeafReader.java | 36 +-
.../apache/lucene/index/SegmentCoreReaders.java | 39 +-
.../org/apache/lucene/index/SegmentInfos.java | 77 ++-
.../org/apache/lucene/index/SegmentReader.java | 57 +-
.../lucene/index/SlowCodecReaderWrapper.java | 8 +-
.../apache/lucene/index/SortingLeafReader.java | 12 +
.../index/SortingStoredFieldsConsumer.java | 5 -
.../lucene/index/StandardDirectoryReader.java | 42 ++
.../java/org/apache/lucene/search/FieldDoc.java | 6 +-
.../org/apache/lucene/search/IndexSearcher.java | 4 +-
.../org/apache/lucene/search/LRUQueryCache.java | 39 +-
.../lucene/search/MultiLeafFieldComparator.java | 92 +++
.../java/org/apache/lucene/search/ScoreDoc.java | 2 +-
.../java/org/apache/lucene/search/TopDocs.java | 124 ++--
.../apache/lucene/search/TopFieldCollector.java | 212 ++-----
.../org/apache/lucene/search/package-info.java | 25 +-
.../search/similarities/BM25Similarity.java | 12 +-
.../search/similarities/ClassicSimilarity.java | 5 +-
.../search/similarities/SimilarityBase.java | 8 +-
.../search/similarities/TFIDFSimilarity.java | 46 +-
.../lucene/search/spans/NearSpansUnordered.java | 211 ++-----
.../java/org/apache/lucene/util/Version.java | 7 +
.../lucene/util/packed/BlockPackedReader.java | 3 +-
.../org/apache/lucene/document/TestField.java | 30 -
.../apache/lucene/index/TestCustomNorms.java | 14 +-
.../index/TestDemoParallelLeafReader.java | 11 +-
.../lucene/index/TestDirectoryReader.java | 8 +-
.../lucene/index/TestDirectoryReaderReopen.java | 12 +-
.../index/TestExitableDirectoryReader.java | 10 +
.../org/apache/lucene/index/TestFieldReuse.java | 5 -
.../lucene/index/TestFilterDirectoryReader.java | 5 +
.../lucene/index/TestFilterLeafReader.java | 21 +-
.../lucene/index/TestIndexReaderClose.java | 62 +-
.../apache/lucene/index/TestIndexSorting.java | 8 +-
.../apache/lucene/index/TestIndexWriter.java | 11 +-
.../lucene/index/TestIndexWriterExceptions.java | 66 ---
.../apache/lucene/index/TestIndexableField.java | 10 -
.../apache/lucene/index/TestMultiTermsEnum.java | 10 +
.../test/org/apache/lucene/index/TestNorms.java | 13 +-
.../org/apache/lucene/index/TestOmitTf.java | 2 +-
.../index/TestParallelCompositeReader.java | 33 +-
.../apache/lucene/index/TestReadOnlyIndex.java | 2 +-
.../apache/lucene/index/TestReaderClosed.java | 8 +-
.../apache/lucene/index/TestSegmentInfos.java | 11 +-
.../lucene/search/TermInSetQueryTest.java | 17 +-
.../lucene/search/TestDisjunctionMaxQuery.java | 2 +-
.../org/apache/lucene/search/TestDocBoost.java | 98 ----
.../apache/lucene/search/TestFuzzyQuery.java | 5 +-
.../apache/lucene/search/TestLRUQueryCache.java | 61 +-
.../lucene/search/TestMatchAllDocsQuery.java | 9 +-
.../lucene/search/TestSearcherManager.java | 15 +
.../apache/lucene/search/TestSimilarity.java | 2 +-
.../org/apache/lucene/search/TestTermQuery.java | 15 +
.../apache/lucene/search/TestTermScorer.java | 10 +
.../apache/lucene/search/TestTopDocsMerge.java | 81 ++-
.../similarities/TestBooleanSimilarity.java | 3 +-
.../search/similarities/TestSimilarityBase.java | 1 -
.../apache/lucene/expressions/Expression.java | 24 +-
.../DefaultSortedSetDocValuesReaderState.java | 3 +-
.../facet/taxonomy/CachedOrdinalsReader.java | 7 +-
.../taxonomy/OrdinalMappingLeafReader.java | 10 +
.../search/highlight/TermVectorLeafReader.java | 20 +-
.../highlight/WeightedSpanTermExtractor.java | 10 +
.../MultiTermHighlighting.java | 20 +-
.../uhighlight/MultiTermHighlighting.java | 20 +-
.../lucene/search/uhighlight/PhraseHelper.java | 10 +
.../TermVectorFilteredLeafReader.java | 10 +
.../search/uhighlight/UnifiedHighlighter.java | 15 +
.../uhighlight/TestUnifiedHighlighterMTQ.java | 30 +-
.../TestUnifiedHighlighterTermVec.java | 15 +
.../lucene/search/join/QueryBitSetProducer.java | 14 +-
.../apache/lucene/search/join/TestJoinUtil.java | 10 +-
.../search/join/TestQueryBitSetProducer.java | 110 ++++
.../apache/lucene/index/memory/MemoryIndex.java | 92 +--
.../lucene/index/memory/TestMemoryIndex.java | 8 +-
.../memory/TestMemoryIndexAgainstRAMDir.java | 5 +-
.../apache/lucene/document/LazyDocument.java | 5 -
.../org/apache/lucene/index/IndexSplitter.java | 4 +-
.../lucene/index/MultiPassIndexSplitter.java | 15 +
.../apache/lucene/index/PKIndexSplitter.java | 10 +
.../apache/lucene/misc/SweetSpotSimilarity.java | 2 +-
.../lucene/misc/SweetSpotSimilarityTest.java | 8 -
.../apache/lucene/queries/BoostingQuery.java | 2 +-
.../function/TestLongNormValueSource.java | 3 +-
.../queries/payloads/TestPayloadScoreQuery.java | 2 +-
.../queries/payloads/TestPayloadTermQuery.java | 2 +-
.../complexPhrase/ComplexPhraseQueryParser.java | 21 +-
.../queryparser/simple/SimpleQueryParser.java | 8 +-
.../complexPhrase/TestComplexPhraseQuery.java | 36 +-
.../simple/TestSimpleQueryParser.java | 2 +-
.../lucene/replicator/nrt/ReplicaNode.java | 3 +-
.../nrt/SegmentInfosSearcherManager.java | 8 +-
.../lucene/index/AllDeletedFilterReader.java | 10 +
.../lucene/index/AssertingDirectoryReader.java | 9 +-
.../lucene/index/AssertingLeafReader.java | 30 +-
.../index/BaseStoredFieldsFormatTestCase.java | 15 +
.../lucene/index/FieldFilterLeafReader.java | 12 +-
.../lucene/index/MismatchedDirectoryReader.java | 5 +
.../lucene/index/MismatchedLeafReader.java | 10 +
.../lucene/index/MockRandomMergePolicy.java | 13 +-
.../lucene/index/OwnCacheKeyMultiReader.java | 76 +++
.../lucene/search/BaseExplanationTestCase.java | 1 -
.../org/apache/lucene/search/QueryUtils.java | 43 +-
.../org/apache/lucene/util/LuceneTestCase.java | 30 +-
solr/CHANGES.txt | 84 ++-
solr/bin/install_solr_service.sh | 2 +-
solr/build.xml | 9 +-
solr/common-build.xml | 9 +-
.../apache/solr/schema/ICUCollationField.java | 6 +-
.../solr/handler/dataimport/DocBuilder.java | 24 +-
.../handler/dataimport/config/EntityField.java | 6 -
.../handler/dataimport/TestJdbcDataSource.java | 80 ++-
.../handler/extraction/ExtractingParams.java | 13 -
.../handler/extraction/SolrContentHandler.java | 17 +-
.../ExtractingRequestHandlerTest.java | 4 +-
.../LanguageIdentifierUpdateProcessor.java | 2 +-
.../test/org/apache/solr/hadoop/MRUnitBase.java | 2 -
.../MapReduceIndexerToolArgumentParserTest.java | 1 -
.../solr/hadoop/MorphlineBasicMiniMRTest.java | 1 -
.../morphlines/cell/SolrCellMorphlineTest.java | 2 -
.../solr/morphlines/solr/LoadSolrBuilder.java | 32 +-
.../test-morphlines/loadSolrBasic.conf | 7 +-
.../test-morphlines/solrCellDocumentTypes.conf | 23 +-
.../test-morphlines/solrCellJPGCompressed.conf | 17 +-
.../test-files/test-morphlines/solrCellXML.conf | 11 +-
.../test-morphlines/tokenizeText.conf | 6 +-
.../tutorialReadAvroContainer.conf | 11 +-
.../solr/AbstractSolrMorphlineTestBase.java | 2 -
.../solr/AbstractSolrMorphlineZkTestBase.java | 4 -
.../solr/uima/processor/UIMAToSolrMapper.java | 2 +-
.../solrj/embedded/EmbeddedSolrServer.java | 55 +-
.../org/apache/solr/cloud/ElectionContext.java | 13 +-
.../java/org/apache/solr/cloud/Overseer.java | 7 +-
.../solr/cloud/OverseerNodePrioritizer.java | 2 +-
.../solr/cloud/OverseerTaskProcessor.java | 6 +-
.../org/apache/solr/cloud/ZkController.java | 2 +-
.../org/apache/solr/core/CoreContainer.java | 30 +-
.../org/apache/solr/core/DirectoryFactory.java | 9 +-
.../org/apache/solr/core/JmxMonitoredMap.java | 9 +-
.../apache/solr/core/MMapDirectoryFactory.java | 4 +
.../solr/core/MetricsDirectoryFactory.java | 537 ------------------
.../src/java/org/apache/solr/core/SolrCore.java | 20 +-
.../apache/solr/core/SolrDeletionPolicy.java | 6 -
.../org/apache/solr/core/SolrInfoMBean.java | 4 +-
.../org/apache/solr/core/SolrXmlConfig.java | 3 +-
.../handler/DocumentAnalysisRequestHandler.java | 4 +-
.../org/apache/solr/handler/ExportWriter.java | 2 +-
.../apache/solr/handler/RequestHandlerBase.java | 7 +-
.../solr/handler/admin/LukeRequestHandler.java | 1 -
.../handler/admin/MetricsCollectorHandler.java | 228 ++++++++
.../solr/handler/admin/MetricsHandler.java | 2 +-
.../solr/handler/component/ExpandComponent.java | 21 +-
.../solr/handler/component/QueryComponent.java | 1 -
.../handler/component/RangeFacetRequest.java | 5 +-
.../component/SortedDateStatsValues.java | 89 +++
.../handler/component/StatsValuesFactory.java | 8 +-
.../solr/handler/loader/CSVLoaderBase.java | 4 +-
.../apache/solr/handler/loader/JsonLoader.java | 29 +-
.../apache/solr/handler/loader/XMLLoader.java | 23 +-
.../org/apache/solr/handler/sql/SolrRules.java | 14 +
.../apache/solr/handler/sql/SolrTableScan.java | 4 +
.../solr/highlight/DefaultSolrHighlighter.java | 10 +
.../solr/highlight/PostingsSolrHighlighter.java | 35 +-
.../solr/highlight/UnifiedSolrHighlighter.java | 28 +-
.../solr/index/SlowCompositeReaderWrapper.java | 35 +-
.../apache/solr/metrics/AggregateMetric.java | 200 +++++++
.../solr/metrics/SolrCoreMetricManager.java | 125 ++++-
.../apache/solr/metrics/SolrMetricManager.java | 325 ++++++++++-
.../metrics/reporters/JmxObjectNameFactory.java | 6 +-
.../reporters/solr/SolrClusterReporter.java | 277 +++++++++
.../metrics/reporters/solr/SolrReporter.java | 392 +++++++++++++
.../reporters/solr/SolrShardReporter.java | 188 +++++++
.../metrics/reporters/solr/package-info.java | 22 +
.../org/apache/solr/request/NumericFacets.java | 4 +-
.../org/apache/solr/request/SimpleFacets.java | 10 +-
.../solr/request/SolrQueryRequestBase.java | 17 +-
.../org/apache/solr/response/DocsStreamer.java | 2 +
.../solr/schema/AbstractSpatialFieldType.java | 6 +-
.../java/org/apache/solr/schema/BBoxField.java | 2 +-
.../org/apache/solr/schema/BinaryField.java | 7 +-
.../java/org/apache/solr/schema/BoolField.java | 4 +-
.../org/apache/solr/schema/CollationField.java | 6 +-
.../org/apache/solr/schema/CurrencyField.java | 8 +-
.../org/apache/solr/schema/DatePointField.java | 218 +++++++
.../org/apache/solr/schema/DateRangeField.java | 4 +-
.../apache/solr/schema/DoublePointField.java | 5 +-
.../java/org/apache/solr/schema/EnumField.java | 14 +-
.../java/org/apache/solr/schema/FieldType.java | 26 +-
.../org/apache/solr/schema/FloatPointField.java | 5 +-
.../org/apache/solr/schema/IntPointField.java | 5 +-
.../java/org/apache/solr/schema/LatLonType.java | 10 +-
.../org/apache/solr/schema/LongPointField.java | 5 +-
.../java/org/apache/solr/schema/PointField.java | 6 +-
.../java/org/apache/solr/schema/PointType.java | 8 +-
.../apache/solr/schema/PreAnalyzedField.java | 10 +-
.../schema/RptWithGeometrySpatialField.java | 7 +-
.../org/apache/solr/schema/SchemaField.java | 10 +-
.../java/org/apache/solr/schema/StrField.java | 4 +-
.../java/org/apache/solr/schema/TrieField.java | 9 +-
.../solr/search/CollapsingQParserPlugin.java | 19 +-
.../java/org/apache/solr/search/Grouping.java | 2 +-
.../java/org/apache/solr/search/Insanity.java | 9 +-
.../apache/solr/search/SolrIndexSearcher.java | 5 +-
.../GroupedEndResultTransformer.java | 2 +-
.../solr/security/PKIAuthenticationPlugin.java | 2 +-
.../apache/solr/servlet/SolrRequestParsers.java | 11 +-
.../solr/store/blockcache/BlockCache.java | 15 +-
.../store/blockcache/BlockDirectoryCache.java | 5 -
.../apache/solr/store/blockcache/Metrics.java | 121 ++--
.../org/apache/solr/uninverting/FieldCache.java | 18 +-
.../apache/solr/uninverting/FieldCacheImpl.java | 70 +--
.../uninverting/FieldCacheSanityChecker.java | 426 --------------
.../solr/uninverting/UninvertingReader.java | 21 +-
.../solr/update/DirectUpdateHandler2.java | 128 +++--
.../org/apache/solr/update/DocumentBuilder.java | 41 +-
.../java/org/apache/solr/update/PeerSync.java | 8 +-
.../apache/solr/update/SolrIndexSplitter.java | 10 +
...aluesOrNoneFieldMutatingUpdateProcessor.java | 3 +-
.../processor/AtomicUpdateDocumentMerger.java | 8 +-
.../CloneFieldUpdateProcessorFactory.java | 3 +-
.../ConcatFieldUpdateProcessorFactory.java | 3 +-
.../CountFieldValuesUpdateProcessorFactory.java | 3 +-
.../FieldValueMutatingUpdateProcessor.java | 3 +-
.../FieldValueSubsetUpdateProcessorFactory.java | 3 +-
.../PreAnalyzedUpdateProcessorFactory.java | 9 +-
.../org/apache/solr/util/stats/MetricUtils.java | 267 ++++++---
.../conf/schema-docValuesFaceting.xml | 5 +-
.../solr/collection1/conf/schema-point.xml | 11 +
.../solr/collection1/conf/schema-sorts.xml | 11 +-
.../test-files/solr/collection1/conf/schema.xml | 11 +-
.../solr/collection1/conf/schema11.xml | 3 +-
.../solr/collection1/conf/schema12.xml | 9 +-
.../solr/collection1/conf/schema_latest.xml | 13 +-
.../conf/solrconfig-indexmetrics.xml | 2 -
...lrconfig-parsing-update-processor-chains.xml | 7 +
.../conf/solrconfig-update-processor-chains.xml | 2 +
.../src/test-files/solr/solr-solrreporter.xml | 66 +++
.../org/apache/solr/BasicFunctionalityTest.java | 12 +-
.../org/apache/solr/TestDocumentBuilder.java | 1 -
.../TestEmbeddedSolrServerSchemaAPI.java | 111 ++++
.../apache/solr/cloud/CleanupOldIndexTest.java | 18 +-
.../solr/cloud/TestCloudDeleteByQuery.java | 2 +-
.../apache/solr/cloud/TestCloudRecovery.java | 6 +-
.../TestStressCloudBlindAtomicUpdates.java | 6 +-
.../cloud/TestTolerantUpdateProcessorCloud.java | 2 +-
.../HdfsWriteToMultipleCollectionsTest.java | 7 +-
.../org/apache/solr/core/TestCoreDiscovery.java | 4 +-
.../apache/solr/core/TestJmxMonitoredMap.java | 2 +-
.../test/org/apache/solr/core/TestNRTOpen.java | 2 +-
.../org/apache/solr/handler/JsonLoaderTest.java | 5 +-
.../solr/handler/TestReplicationHandler.java | 9 +-
.../org/apache/solr/handler/TestSQLHandler.java | 12 +
.../handler/XmlUpdateRequestHandlerTest.java | 7 -
.../admin/CoreMergeIndexesAdminHandlerTest.java | 8 +-
.../handler/admin/LukeRequestHandlerTest.java | 4 +-
.../highlight/TestPostingsSolrHighlighter.java | 13 +
.../highlight/TestUnifiedSolrHighlighter.java | 13 +
.../index/TestSlowCompositeReaderWrapper.java | 54 +-
.../solr/metrics/SolrCoreMetricManagerTest.java | 31 +-
.../solr/metrics/SolrMetricManagerTest.java | 34 +-
.../metrics/SolrMetricsIntegrationTest.java | 15 +-
.../metrics/reporters/SolrJmxReporterTest.java | 13 +-
.../reporters/solr/SolrCloudReportersTest.java | 163 ++++++
.../reporters/solr/SolrShardReporterTest.java | 117 ++++
.../solr/schema/AbstractCurrencyFieldTest.java | 2 +-
.../org/apache/solr/schema/DateFieldTest.java | 16 +-
.../org/apache/solr/schema/DocValuesTest.java | 2 +-
.../org/apache/solr/schema/PolyFieldTest.java | 8 +-
.../solr/schema/PreAnalyzedFieldTest.java | 10 +-
.../apache/solr/schema/SortableBinaryField.java | 6 +-
.../org/apache/solr/schema/TestPointFields.java | 561 ++++++++++++++++++-
.../schema/TestSchemalessBufferedUpdates.java | 2 +-
.../test/org/apache/solr/search/TestDocSet.java | 20 +-
.../apache/solr/search/TestSolr4Spatial2.java | 2 +-
.../apache/solr/search/TestSolrQueryParser.java | 12 +-
.../solr/search/function/TestFunctionQuery.java | 1 -
.../security/TestPKIAuthenticationPlugin.java | 10 +-
.../solr/store/blockcache/BlockCacheTest.java | 36 +-
.../solr/uninverting/TestDocTermOrds.java | 4 +-
.../apache/solr/uninverting/TestFieldCache.java | 4 +-
.../TestFieldCacheSanityChecker.java | 164 ------
.../solr/uninverting/TestLegacyFieldCache.java | 35 +-
.../solr/update/DirectUpdateHandlerTest.java | 108 +++-
.../apache/solr/update/DocumentBuilderTest.java | 317 +----------
.../solr/update/SolrIndexMetricsTest.java | 44 --
.../solr/update/TestInPlaceUpdatesDistrib.java | 21 +-
.../org/apache/solr/update/UpdateLogTest.java | 7 +-
.../update/processor/AtomicUpdatesTest.java | 25 +-
.../CloneFieldUpdateProcessorFactoryTest.java | 14 +-
.../DefaultValueUpdateProcessorTest.java | 7 +-
.../FieldMutatingUpdateProcessorTest.java | 14 +-
.../ParsingFieldUpdateProcessorsTest.java | 10 +-
.../processor/TolerantUpdateProcessorTest.java | 16 +-
.../UUIDUpdateProcessorFallbackTest.java | 3 +-
.../processor/UpdateProcessorTestBase.java | 7 +-
.../apache/solr/util/stats/MetricUtilsTest.java | 54 +-
solr/scripts/README.txt | 13 -
solr/scripts/abc | 159 ------
solr/scripts/abo | 158 ------
solr/scripts/backup | 109 ----
solr/scripts/backupcleaner | 134 -----
solr/scripts/commit | 109 ----
solr/scripts/optimize | 109 ----
solr/scripts/rsyncd-disable | 77 ---
solr/scripts/rsyncd-enable | 76 ---
solr/scripts/rsyncd-start | 147 -----
solr/scripts/rsyncd-stop | 105 ----
solr/scripts/scripts-util | 141 -----
solr/scripts/snapcleaner | 146 -----
solr/scripts/snapinstaller | 190 -------
solr/scripts/snappuller | 261 ---------
solr/scripts/snappuller-disable | 77 ---
solr/scripts/snappuller-enable | 77 ---
solr/scripts/snapshooter | 128 -----
solr/site/quickstart.mdtext | 2 -
.../solrj/beans/DocumentObjectBinder.java | 4 +-
.../client/solrj/impl/BinaryRequestWriter.java | 4 +-
.../solr/client/solrj/impl/CloudSolrClient.java | 3 +
.../solr/client/solrj/io/SolrClientCache.java | 26 +-
.../solrj/io/stream/ScoreNodesStream.java | 2 +-
.../solrj/io/stream/SignificantTermsStream.java | 2 +-
.../request/JavaBinUpdateRequestCodec.java | 29 +-
.../solr/client/solrj/util/ClientUtils.java | 32 +-
.../apache/solr/common/SolrInputDocument.java | 53 +-
.../org/apache/solr/common/SolrInputField.java | 28 +-
.../solr/common/params/HighlightParams.java | 1 +
.../apache/solr/common/util/JavaBinCodec.java | 34 +-
.../solr/client/solrj/SolrExampleTests.java | 76 +--
.../solr/client/solrj/SolrExampleTestsBase.java | 14 +-
.../client/solrj/impl/CloudSolrClientTest.java | 25 +
.../solrj/io/graph/GraphExpressionTest.java | 11 +-
.../solr/client/solrj/io/sql/JdbcTest.java | 19 +
.../client/solrj/request/TestCoreAdmin.java | 4 +-
.../solrj/request/TestUpdateRequestCodec.java | 20 +-
.../apache/solr/common/SolrDocumentTest.java | 6 +-
.../java/org/apache/solr/SolrTestCaseJ4.java | 86 +--
solr/webapp/web/css/angular/collections.css | 17 +
.../web/js/angular/controllers/collections.js | 14 +
solr/webapp/web/js/angular/services.js | 1 +
solr/webapp/web/partials/collections.html | 18 +-
383 files changed, 6928 insertions(+), 6733 deletions(-)
----------------------------------------------------------------------
[28/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10235: Fix DIH's
TestJdbcDataSource to work with Java 9 and other Java runtimes that do not
use the same DriverManager implementation like Oracle's original one
Posted by is...@apache.org.
SOLR-10235: Fix DIH's TestJdbcDataSource to work with Java 9 and other Java runtimes that do not use the same DriverManager implementation like Oracle's original one
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0d2c0278
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0d2c0278
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0d2c0278
Branch: refs/heads/jira/solr-6736
Commit: 0d2c027857bfca3486399b0e6b19a5887081287a
Parents: 2d51a42
Author: Uwe Schindler <us...@apache.org>
Authored: Tue Mar 7 19:01:15 2017 +0100
Committer: Uwe Schindler <us...@apache.org>
Committed: Tue Mar 7 19:01:15 2017 +0100
----------------------------------------------------------------------
solr/CHANGES.txt | 6 ++
.../handler/dataimport/TestJdbcDataSource.java | 78 +++++++++++++++++---
2 files changed, 73 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0d2c0278/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 5b0eb03..dc97456 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -300,6 +300,12 @@ Other Changes
* SOLR-10230: default TTL of PKIAuthenticationPlugin increased to 10secs (noble)
+* SOLR-10235: Fix DIH's TestJdbcDataSource to work with Java 9 and other Java runtimes that
+ do not use the same DriverManager implementation like Oracle's original one. The test now
+ uses a fully implemented Driver instance returning a mock connection. The test also works
+ correct now if other drivers were installed before test execution (e.g., through IDE).
+ (hossman, Uwe Schindler)
+
================== 6.4.2 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0d2c0278/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
index 2f6b24c..dcb4dbc 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
@@ -35,7 +35,6 @@ import java.util.Properties;
import javax.sql.DataSource;
-import org.apache.lucene.util.Constants;
import org.apache.solr.handler.dataimport.JdbcDataSource.ResultSetIterator;
import static org.mockito.Mockito.*;
import org.junit.After;
@@ -485,17 +484,16 @@ public class TestJdbcDataSource extends AbstractDataImportHandlerTestCase {
@Test
public void testRetrieveFromDriverManager() throws Exception {
- assumeFalse("In Java 9, Class.forName() does not work for mock classes", Constants.JRE_IS_MINIMUM_JAVA9);
- DriverManager.registerDriver(driver);
+ // we're not (directly) using a Mockito based mock class here because it won't have a consistent class name
+ // that will work with DriverManager's class bindings
+ MockDriver mockDriver = new MockDriver(connection);
+ DriverManager.registerDriver(mockDriver);
try {
- when(driver.connect(notNull(),notNull())).thenReturn(connection);
-
- props.put(JdbcDataSource.DRIVER, driver.getClass().getName());
- props.put(JdbcDataSource.URL, "jdbc:fakedb");
+ props.put(JdbcDataSource.DRIVER, MockDriver.class.getName());
+ props.put(JdbcDataSource.URL, MockDriver.MY_JDBC_URL);
props.put("holdability", "HOLD_CURSORS_OVER_COMMIT");
- Connection conn = jdbcDataSource.createConnectionFactory(context, props)
- .call();
+ Connection conn = jdbcDataSource.createConnectionFactory(context, props).call();
verify(connection).setAutoCommit(false);
verify(connection).setHoldability(1);
@@ -504,7 +502,7 @@ public class TestJdbcDataSource extends AbstractDataImportHandlerTestCase {
} catch(Exception e) {
throw e;
} finally {
- DriverManager.deregisterDriver(driver);
+ DriverManager.deregisterDriver(mockDriver);
}
}
@@ -594,5 +592,63 @@ public class TestJdbcDataSource extends AbstractDataImportHandlerTestCase {
byte[] content = "secret".getBytes(StandardCharsets.UTF_8);
createFile(tmpdir, "enckeyfile.txt", content, false);
return new File(tmpdir, "enckeyfile.txt").getAbsolutePath();
- }
+ }
+
+ /**
+ * A stub driver that returns our mocked connection for connection URL {@link #MY_JDBC_URL}.
+ * <p>
+ * This class is used instead of a Mockito mock because {@link DriverManager} uses the class
+ * name to lookup the driver and also requires the driver to behave in a sane way, if other
+ * drivers are registered in the runtime. A simple Mockito mock is likely to break
+ * depending on JVM runtime version. So this class implements a full {@link Driver},
+ * so {@code DriverManager} can do whatever it wants to find the correct driver for a URL.
+ */
+ public static final class MockDriver implements Driver {
+ public static final String MY_JDBC_URL = "jdbc:fakedb";
+ private final Connection conn;
+
+ public MockDriver() throws SQLException {
+ throw new AssertionError("The driver should never be directly instantiated by DIH's JdbcDataSource");
+ }
+
+ MockDriver(Connection conn) throws SQLException {
+ this.conn = conn;
+ }
+
+ @Override
+ public boolean acceptsURL(String url) throws java.sql.SQLException {
+ return MY_JDBC_URL.equals(url);
+ }
+
+ @Override
+ public Connection connect(String url, Properties info) throws java.sql.SQLException {
+ return acceptsURL(url) ? conn : null;
+ }
+
+ @Override
+ public int getMajorVersion() {
+ return 1;
+ }
+
+ @Override
+ public int getMinorVersion() {
+ return 0;
+ }
+
+ @Override
+ public java.util.logging.Logger getParentLogger() throws java.sql.SQLFeatureNotSupportedException {
+ throw new java.sql.SQLFeatureNotSupportedException();
+ }
+
+ @Override
+ public java.sql.DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException {
+ return new java.sql.DriverPropertyInfo[0];
+ }
+
+ @Override
+ public boolean jdbcCompliant() {
+ // we are not fully compliant:
+ return false;
+ }
+ }
}
[06/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10155: For numeric
types facet.contains= and facet.prefix= are now rejected. (Gus Heck,
Christine Poerschke)
Posted by is...@apache.org.
SOLR-10155: For numeric types facet.contains= and facet.prefix= are now rejected.
(Gus Heck, Christine Poerschke)
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/43474312
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/43474312
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/43474312
Branch: refs/heads/jira/solr-6736
Commit: 43474312eb2b66df4102bd653b9546e7eff47363
Parents: da113fd
Author: Christine Poerschke <cp...@apache.org>
Authored: Fri Mar 3 12:03:41 2017 +0000
Committer: Christine Poerschke <cp...@apache.org>
Committed: Fri Mar 3 12:03:41 2017 +0000
----------------------------------------------------------------------
solr/CHANGES.txt | 2 ++
.../src/java/org/apache/solr/request/SimpleFacets.java | 10 ++++------
2 files changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/43474312/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 09912ee..fa4d33b 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -271,6 +271,8 @@ Other Changes
* SOLR-10219: re-enable HDFS tests under JDK9 (hossman, Uwe Schindler)
+* SOLR-10155: For numeric types facet.contains= and facet.prefix= are now rejected.
+ (Gus Heck, Christine Poerschke)
================== 6.4.2 ==================
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/43474312/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
index 4190958..5370dd9 100644
--- a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
+++ b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
@@ -484,15 +484,13 @@ public class SimpleFacets {
case FCS:
assert ft.isPointField() || !multiToken;
if (ft.isPointField() || (ft.getNumberType() != null && !sf.multiValued())) {
- // force numeric faceting
- if (prefix != null && !prefix.isEmpty()) {
+ if (prefix != null) {
throw new SolrException(ErrorCode.BAD_REQUEST, FacetParams.FACET_PREFIX + " is not supported on numeric types");
}
if (termFilter != null) {
- final boolean supportedOperation = (termFilter instanceof SubstringBytesRefFilter) && ((SubstringBytesRefFilter) termFilter).substring().isEmpty();
- if (!supportedOperation) {
- throw new SolrException(ErrorCode.BAD_REQUEST, FacetParams.FACET_CONTAINS + " is not supported on numeric types");
- }
+ throw new SolrException(ErrorCode.BAD_REQUEST, "BytesRef term filters ("
+ + FacetParams.FACET_CONTAINS + ", "
+ + FacetParams.FACET_EXCLUDETERMS + ") are not supported on numeric types");
}
// We should do this, but mincount=0 is currently the default
// if (ft.isPointField() && mincount <= 0) {
[46/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-8045: Deploy V2 API
at /v2 instead of /solr/v2
Posted by is...@apache.org.
SOLR-8045: Deploy V2 API at /v2 instead of /solr/v2
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0fb386a8
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0fb386a8
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0fb386a8
Branch: refs/heads/jira/solr-6736
Commit: 0fb386a864ff5b7d32af3bef3f7eeca4d009acc1
Parents: 6415d91
Author: Cao Manh Dat <da...@apache.org>
Authored: Sat Mar 11 10:30:52 2017 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Sat Mar 11 10:30:52 2017 +0700
----------------------------------------------------------------------
dev-tools/scripts/smokeTestRelease.py | 4 ++
solr/CHANGES.txt | 2 +
.../java/org/apache/solr/api/V2HttpCall.java | 2 +-
.../java/org/apache/solr/core/PluginBag.java | 2 +-
.../apache/solr/servlet/SolrDispatchFilter.java | 2 +-
.../conf/solrconfig-managed-schema.xml | 2 +-
.../org/apache/solr/cloud/rule/RulesTest.java | 2 +-
.../apache/solr/core/TestDynamicLoading.java | 2 +-
.../apache/solr/core/TestSolrConfigHandler.java | 14 ++---
.../apache/solr/handler/TestReqParamsAPI.java | 3 ++
.../solr/handler/V2ApiIntegrationTest.java | 4 +-
.../solr/rest/schema/TestBulkSchemaAPI.java | 2 +-
.../solr/security/BasicAuthIntegrationTest.java | 4 +-
solr/server/etc/jetty.xml | 54 +++++++++++---------
.../conf/solrconfig.xml | 2 +-
.../solr/client/solrj/impl/CloudSolrClient.java | 4 +-
.../solrj/embedded/SolrExampleJettyTest.java | 2 +-
17 files changed, 62 insertions(+), 45 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/dev-tools/scripts/smokeTestRelease.py
----------------------------------------------------------------------
diff --git a/dev-tools/scripts/smokeTestRelease.py b/dev-tools/scripts/smokeTestRelease.py
index f9c3499..b0e76e9 100644
--- a/dev-tools/scripts/smokeTestRelease.py
+++ b/dev-tools/scripts/smokeTestRelease.py
@@ -855,6 +855,10 @@ def testSolrExample(unpackPath, javaPath, isSrc):
if s.find('<result name="response" numFound="3" start="0">') == -1:
print('FAILED: response is:\n%s' % s)
raise RuntimeError('query on solr example instance failed')
+ s = load('http://localhost:8983/v2/cores')
+ if s.find('"responseHeader":{"status":0,"QTime":1}') == -1:
+ print('FAILED: response is:\n%s' % s)
+ raise RuntimeError('query api v2 on solr example instance failed')
finally:
# Stop server:
print(' stop server using: bin/solr stop -p 8983')
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 7285e4f..b164405 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -164,6 +164,8 @@ New Features
* SOLR-9986: Implement DatePointField (Cao Manh Dat, Tom�s Fern�ndez L�bbe)
+* SOLR-8045: Deploy V2 API at /v2 instead of /solr/v2 (Cao Manh Dat, Noble Paul)
+
Bug Fixes
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/api/V2HttpCall.java b/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
index c996b25..fb4aa56 100644
--- a/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
+++ b/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
@@ -75,7 +75,7 @@ public class V2HttpCall extends HttpSolrCall {
protected void init() throws Exception {
String path = this.path;
- String fullPath = path = path.substring(3);//strip off '/v2'
+ String fullPath = path = path.substring(7);//strip off '/____v2'
try {
pieces = getPathSegments(path);
if (pieces.size() == 0) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/core/src/java/org/apache/solr/core/PluginBag.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/PluginBag.java b/solr/core/src/java/org/apache/solr/core/PluginBag.java
index ad8bdec..65978f3 100644
--- a/solr/core/src/java/org/apache/solr/core/PluginBag.java
+++ b/solr/core/src/java/org/apache/solr/core/PluginBag.java
@@ -194,7 +194,7 @@ public class PluginBag<T> implements AutoCloseable {
String registerAt = plugin.pluginInfo.attributes.get("registerPath");
if (registerAt != null) {
List<String> strs = StrUtils.splitSmart(registerAt, ',');
- disableHandler = !strs.contains("/");
+ disableHandler = !strs.contains("/solr");
registerApi = strs.contains("/v2");
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index ce65069..ff0db9b 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@ -388,7 +388,7 @@ public class SolrDispatchFilter extends BaseSolrFilter {
path += request.getPathInfo();
}
- if (isV2Enabled && (path.startsWith("/v2/") || path.equals("/v2"))) {
+ if (isV2Enabled && (path.startsWith("/____v2/") || path.equals("/____v2"))) {
return new V2HttpCall(this, cores, request, response, false);
} else {
return new HttpSolrCall(this, cores, request, response, retry);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml
index 31bbbb3..abd4fbe 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml
@@ -40,7 +40,7 @@
</requestHandler>
- <requestHandler name="/dump" class="DumpRequestHandler" initParams="a" registerPath="/,/v2">
+ <requestHandler name="/dump" class="DumpRequestHandler" initParams="a" registerPath="/solr,/v2">
<lst name="defaults">
<str name="a">${my.custom.variable.a:A}</str>
<str name="b">${my.custom.variable.b:B}</str>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java b/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java
index 13649e1..d4a72bf 100644
--- a/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java
@@ -168,7 +168,7 @@ public class RulesTest extends SolrCloudTestCase {
public void testInvokeApi() throws Exception {
JettySolrRunner jetty = cluster.getRandomJetty(random());
try (SolrClient client = getHttpSolrClient(jetty.getBaseUrl().toString())) {
- GenericSolrRequest req = new GenericSolrRequest(GET, "/v2/node/invoke", new ModifiableSolrParams()
+ GenericSolrRequest req = new GenericSolrRequest(GET, "/____v2/node/invoke", new ModifiableSolrParams()
.add("class", ImplicitSnitch.class.getName())
.add("cores", "1")
.add("freedisk", "1")
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java b/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
index 8479ae4..306b4b2 100644
--- a/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
+++ b/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
@@ -88,7 +88,7 @@ public class TestDynamicLoading extends AbstractFullDistribZkTestBase {
payload = "{\n" +
- "'create-requesthandler' : { 'name' : '/test1', 'class': 'org.apache.solr.core.BlobStoreTestRequestHandler' ,registerPath: '/,/v2', 'runtimeLib' : true }\n" +
+ "'create-requesthandler' : { 'name' : '/test1', 'class': 'org.apache.solr.core.BlobStoreTestRequestHandler' ,registerPath: '/solr,/v2', 'runtimeLib' : true }\n" +
"}";
client = restTestHarnesses.get(random().nextInt(restTestHarnesses.size()));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
index 3f85a79..ec81c25 100644
--- a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
+++ b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
@@ -86,7 +86,7 @@ public class TestSolrConfigHandler extends RestTestBase {
"/solr", true, extraServlets);
if (random().nextBoolean()) {
log.info("These tests are run with V2 API");
- restTestHarness.setServerProvider(() -> jetty.getBaseUrl().toString() + "/v2/cores/" + DEFAULT_TEST_CORENAME);
+ restTestHarness.setServerProvider(() -> jetty.getBaseUrl().toString() + "/____v2/cores/" + DEFAULT_TEST_CORENAME);
}
}
@@ -201,7 +201,7 @@ public class TestSolrConfigHandler extends RestTestBase {
10);
payload = "{\n" +
- "'update-requesthandler' : { 'name' : '/x', 'class': 'org.apache.solr.handler.DumpRequestHandler' ,registerPath :'/,/v2', " +
+ "'update-requesthandler' : { 'name' : '/x', 'class': 'org.apache.solr.handler.DumpRequestHandler' ,registerPath :'/solr,/v2', " +
" 'startup' : 'lazy' , 'a':'b' , 'defaults': {'def_a':'def A val', 'multival':['a','b','c']}}\n" +
"}";
runConfigCommand(writeHarness, "/config?wt=json", payload);
@@ -442,7 +442,7 @@ public class TestSolrConfigHandler extends RestTestBase {
payload = "{\n" +
" 'add-requesthandler': {\n" +
" name : '/dump100',\n" +
- " registerPath :'/,/v2',"+
+ " registerPath :'/solr,/v2',"+
" class : 'org.apache.solr.handler.DumpRequestHandler'," +
" suggester: [{name: s1,lookupImpl: FuzzyLookupFactory, dictionaryImpl : DocumentDictionaryFactory}," +
" {name: s2,lookupImpl: FuzzyLookupFactory , dictionaryImpl : DocumentExpressionDictionaryFactory}]" +
@@ -467,7 +467,7 @@ public class TestSolrConfigHandler extends RestTestBase {
payload = "{\n" +
"'add-requesthandler' : { 'name' : '/dump101', 'class': " +
"'" + CacheTest.class.getName() + "', " +
- " registerPath :'/,/v2'"+
+ " registerPath :'/solr,/v2'"+
", 'startup' : 'lazy'}\n" +
"}";
runConfigCommand(writeHarness, "/config?wt=json", payload);
@@ -589,7 +589,7 @@ public class TestSolrConfigHandler extends RestTestBase {
10);
payload = "{\n" +
- "'create-requesthandler' : { 'name' : '/d', registerPath :'/,/v2' , 'class': 'org.apache.solr.handler.DumpRequestHandler' }\n" +
+ "'create-requesthandler' : { 'name' : '/d', registerPath :'/solr,/v2' , 'class': 'org.apache.solr.handler.DumpRequestHandler' }\n" +
"}";
TestSolrConfigHandler.runConfigCommand(harness, "/config?wt=json", payload);
@@ -619,7 +619,7 @@ public class TestSolrConfigHandler extends RestTestBase {
5);
payload = "{\n" +
- "'create-requesthandler' : { 'name' : '/dump1', registerPath :'/,/v2' , 'class': 'org.apache.solr.handler.DumpRequestHandler', 'useParams':'x' }\n" +
+ "'create-requesthandler' : { 'name' : '/dump1', registerPath :'/solr,/v2' , 'class': 'org.apache.solr.handler.DumpRequestHandler', 'useParams':'x' }\n" +
"}";
TestSolrConfigHandler.runConfigCommand(harness, "/config?wt=json", payload);
@@ -794,7 +794,7 @@ public class TestSolrConfigHandler extends RestTestBase {
"org.apache.solr.handler.DumpRequestHandler",
10);
RESTfulServerProvider oldProvider = restTestHarness.getServerProvider();
- restTestHarness.setServerProvider(() -> jetty.getBaseUrl().toString() + "/v2/cores/" + DEFAULT_TEST_CORENAME);
+ restTestHarness.setServerProvider(() -> jetty.getBaseUrl().toString() + "/____v2/cores/" + DEFAULT_TEST_CORENAME);
Map rsp = TestSolrConfigHandler.testForResponseElement(
harness,
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java b/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java
index 3912011..de4a27a 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java
@@ -50,6 +50,9 @@ public class TestReqParamsAPI extends SolrCloudTestCase {
private void setupHarnesses() {
for (final JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) {
RestTestHarness harness = new RestTestHarness(() -> jettySolrRunner.getBaseUrl().toString() + "/" + COLL_NAME);
+ if (true) {
+ harness.setServerProvider(() -> jettySolrRunner.getBaseUrl().toString() + "/____v2/c/" + COLL_NAME);
+ }
restTestHarnesses.add(harness);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/core/src/test/org/apache/solr/handler/V2ApiIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/V2ApiIntegrationTest.java b/solr/core/src/test/org/apache/solr/handler/V2ApiIntegrationTest.java
index 1af5d93..4eb3de2 100644
--- a/solr/core/src/test/org/apache/solr/handler/V2ApiIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/V2ApiIntegrationTest.java
@@ -86,10 +86,10 @@ public class V2ApiIntegrationTest extends SolrCloudTestCase {
private void testApis() throws Exception {
RestTestHarness restHarness = restTestHarnesses.get(0);
ServerProvider serverProvider = (ServerProvider) restHarness.getServerProvider();
- serverProvider.baseurl = serverProvider.jettySolrRunner.getBaseUrl()+"/v2/c/"+ COLL_NAME;
+ serverProvider.baseurl = serverProvider.jettySolrRunner.getBaseUrl()+"/____v2/c/"+ COLL_NAME;
Map result = TestSolrConfigHandler.getRespMap("/get/_introspect", restHarness);
assertEquals("/c/collection1/get", Utils.getObjectByPath(result, true, "/spec[0]/url/paths[0]"));
- serverProvider.baseurl = serverProvider.jettySolrRunner.getBaseUrl()+"/v2/collections/"+ COLL_NAME;
+ serverProvider.baseurl = serverProvider.jettySolrRunner.getBaseUrl()+"/____v2/collections/"+ COLL_NAME;
result = TestSolrConfigHandler.getRespMap("/get/_introspect", restHarness);
assertEquals("/collections/collection1/get", Utils.getObjectByPath(result, true, "/spec[0]/url/paths[0]"));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
index ea8fd7b..e2dc2bf 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
@@ -69,7 +69,7 @@ public class TestBulkSchemaAPI extends RestTestBase {
restTestHarness.setServerProvider(new RESTfulServerProvider() {
@Override
public String getBaseURL() {
- return jetty.getBaseUrl().toString() + "/v2/cores/" + DEFAULT_TEST_CORENAME;
+ return jetty.getBaseUrl().toString() + "/____v2/cores/" + DEFAULT_TEST_CORENAME;
}
});
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
index 397f4e8..5231dd8 100644
--- a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
@@ -86,8 +86,8 @@ public class BasicAuthIntegrationTest extends SolrCloudTestCase {
String authcPrefix = "/admin/authentication";
String authzPrefix = "/admin/authorization";
if(random().nextBoolean()){
- authcPrefix = "/v2/cluster/security/authentication";
- authzPrefix = "/v2/cluster/security/authorization";
+ authcPrefix = "/____v2/cluster/security/authentication";
+ authzPrefix = "/____v2/cluster/security/authorization";
}
NamedList<Object> rsp;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/server/etc/jetty.xml
----------------------------------------------------------------------
diff --git a/solr/server/etc/jetty.xml b/solr/server/etc/jetty.xml
index 8cb8223..b512c51 100644
--- a/solr/server/etc/jetty.xml
+++ b/solr/server/etc/jetty.xml
@@ -97,35 +97,43 @@
</New>
</Arg>
</Call>
+ <Call name="addRule">
+ <Arg>
+ <New class="org.eclipse.jetty.rewrite.handler.RewritePatternRule">
+ <Set name="pattern">/v2/*</Set>
+ <Set name="replacement">/solr/____v2</Set>
+ </New>
+ </Arg>
+ </Call>
+ <Set name="handler">
+ <New id="Handlers" class="org.eclipse.jetty.server.handler.HandlerCollection">
+ <Set name="handlers">
+ <Array type="org.eclipse.jetty.server.Handler">
+ <Item>
+ <New id="Contexts" class="org.eclipse.jetty.server.handler.ContextHandlerCollection"/>
+ </Item>
+ <Item>
+ <New id="InstrumentedHandler" class="com.codahale.metrics.jetty9.InstrumentedHandler">
+ <Arg><Ref refid="solrJettyMetricRegistry"/></Arg>
+ <Set name="handler">
+ <New id="DefaultHandler" class="org.eclipse.jetty.server.handler.DefaultHandler"/>
+ </Set>
+ </New>
+ </Item>
+ <Item>
+ <New id="RequestLog" class="org.eclipse.jetty.server.handler.RequestLogHandler"/>
+ </Item>
+ </Array>
+ </Set>
+ </New>
+ </Set>
</New>
<!-- =========================================================== -->
<!-- Set handler Collection Structure -->
<!-- =========================================================== -->
<Set name="handler">
- <New id="Handlers" class="org.eclipse.jetty.server.handler.HandlerCollection">
- <Set name="handlers">
- <Array type="org.eclipse.jetty.server.Handler">
- <Item>
- <Ref id="RewriteHandler"/>
- </Item>
- <Item>
- <New id="Contexts" class="org.eclipse.jetty.server.handler.ContextHandlerCollection"/>
- </Item>
- <Item>
- <New id="InstrumentedHandler" class="com.codahale.metrics.jetty9.InstrumentedHandler">
- <Arg><Ref refid="solrJettyMetricRegistry"/></Arg>
- <Set name="handler">
- <New id="DefaultHandler" class="org.eclipse.jetty.server.handler.DefaultHandler"/>
- </Set>
- </New>
- </Item>
- <Item>
- <New id="RequestLog" class="org.eclipse.jetty.server.handler.RequestLogHandler"/>
- </Item>
- </Array>
- </Set>
- </New>
+ <Ref id="RewriteHandler"/>
</Set>
<!-- =========================================================== -->
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml
----------------------------------------------------------------------
diff --git a/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml b/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml
index a9ddb25..3ff89c0 100644
--- a/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml
+++ b/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml
@@ -850,7 +850,7 @@
</requestHandler>
<!-- A request handler that returns indented JSON by default -->
- <requestHandler name="/query" class="solr.SearchHandler" registerPath="/,/v2">
+ <requestHandler name="/query" class="solr.SearchHandler" registerPath="/solr,/v2">
<lst name="defaults">
<str name="echoParams">explicit</str>
<str name="wt">json</str>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
index 3147d4e..d3938c8 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
@@ -1047,8 +1047,8 @@ public class CloudSolrClient extends SolrClient {
CONFIGSETS_HANDLER_PATH,
AUTHC_PATH,
AUTHZ_PATH,
- "/v2/cluster/security/authentication",
- "/v2/cluster/security/authorization"
+ "/____v2/cluster/security/authentication",
+ "/____v2/cluster/security/authorization"
));
/**
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0fb386a8/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleJettyTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleJettyTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleJettyTest.java
index b7ac7de..cb4ba50 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleJettyTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleJettyTest.java
@@ -98,7 +98,7 @@ public class SolrExampleJettyTest extends SolrExampleTests {
private String getUri(HttpSolrClient client) {
String baseURL = client.getBaseURL();
return random().nextBoolean() ?
- baseURL.replace("/collection1", "/v2/cores/collection1/update") :
+ baseURL.replace("/collection1", "/____v2/cores/collection1/update") :
baseURL + "/update/json/docs";
}
}
[38/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-8876: change
morphline test config files to work around 'importCommands' bug when using
java9
Posted by is...@apache.org.
SOLR-8876: change morphline test config files to work around 'importCommands' bug when using java9
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8756be05
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8756be05
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8756be05
Branch: refs/heads/jira/solr-6736
Commit: 8756be05404758155b850748f807245fdaab6a8b
Parents: e35881a
Author: Chris Hostetter <ho...@apache.org>
Authored: Wed Mar 8 09:52:25 2017 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Wed Mar 8 09:52:46 2017 -0700
----------------------------------------------------------------------
solr/CHANGES.txt | 2 ++
.../test/org/apache/solr/hadoop/MRUnitBase.java | 2 --
.../MapReduceIndexerToolArgumentParserTest.java | 1 -
.../solr/hadoop/MorphlineBasicMiniMRTest.java | 1 -
.../morphlines/cell/SolrCellMorphlineTest.java | 2 --
.../test-morphlines/loadSolrBasic.conf | 7 +++++-
.../test-morphlines/solrCellDocumentTypes.conf | 23 +++++++++++++++++++-
.../test-morphlines/solrCellJPGCompressed.conf | 17 ++++++++++++++-
.../test-files/test-morphlines/solrCellXML.conf | 11 +++++++++-
.../test-morphlines/tokenizeText.conf | 6 ++++-
.../tutorialReadAvroContainer.conf | 11 +++++++---
.../solr/AbstractSolrMorphlineTestBase.java | 2 --
.../solr/AbstractSolrMorphlineZkTestBase.java | 4 ----
13 files changed, 69 insertions(+), 20 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 0e78535..7285e4f 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -310,6 +310,8 @@ Other Changes
correct now if other drivers were installed before test execution (e.g., through IDE).
(hossman, Uwe Schindler)
+* SOLR-8876: change morphline test config files to work around 'importCommands' bug when using java9 (hossman)
+
================== 6.4.2 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MRUnitBase.java
----------------------------------------------------------------------
diff --git a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MRUnitBase.java b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MRUnitBase.java
index 73323b9..558d662 100644
--- a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MRUnitBase.java
+++ b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MRUnitBase.java
@@ -23,7 +23,6 @@ import java.util.Locale;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
-import org.apache.lucene.util.Constants;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.hadoop.morphline.MorphlineMapRunner;
import org.apache.solr.morphlines.solr.AbstractSolrMorphlineTestBase;
@@ -38,7 +37,6 @@ public abstract class MRUnitBase extends SolrTestCaseJ4 {
@BeforeClass
public static void setupClass() throws Exception {
- assumeFalse("This test fails on Java 9 (https://issues.apache.org/jira/browse/SOLR-8876)", Constants.JRE_IS_MINIMUM_JAVA9);
assumeFalse("This test fails on UNIX with Turkish default locale (https://issues.apache.org/jira/browse/SOLR-6387)",
new Locale("tr").getLanguage().equals(Locale.getDefault().getLanguage()));
solrHomeZip = SolrOutputFormat.createSolrHomeZip(new File(RESOURCES_DIR + "/solr/mrunit"));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MapReduceIndexerToolArgumentParserTest.java
----------------------------------------------------------------------
diff --git a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MapReduceIndexerToolArgumentParserTest.java b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MapReduceIndexerToolArgumentParserTest.java
index 5dfb5cc..1aebcf7 100644
--- a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MapReduceIndexerToolArgumentParserTest.java
+++ b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MapReduceIndexerToolArgumentParserTest.java
@@ -57,7 +57,6 @@ public class MapReduceIndexerToolArgumentParserTest extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() {
- assumeFalse("This test fails on Java 9 (https://issues.apache.org/jira/browse/SOLR-8876)", Constants.JRE_IS_MINIMUM_JAVA9);
assumeFalse("Does not work on Windows, because it uses UNIX shell commands or POSIX paths", Constants.WINDOWS);
assumeFalse("This test fails on UNIX with Turkish default locale (https://issues.apache.org/jira/browse/SOLR-6387)",
new Locale("tr").getLanguage().equals(Locale.getDefault().getLanguage()));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java
----------------------------------------------------------------------
diff --git a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java
index b32f112..6479a20 100644
--- a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java
+++ b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java
@@ -121,7 +121,6 @@ public class MorphlineBasicMiniMRTest extends SolrTestCaseJ4 {
assumeFalse("HDFS tests were disabled by -Dtests.disableHdfs",
Boolean.parseBoolean(System.getProperty("tests.disableHdfs", "false")));
- assumeFalse("This test fails on Java 9 (https://issues.apache.org/jira/browse/SOLR-8876)", Constants.JRE_IS_MINIMUM_JAVA9);
assumeFalse("FIXME: This test does not work with Windows because of native library requirements", Constants.WINDOWS);
AbstractZkTestCase.SOLRHOME = solrHomeDirectory;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java b/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java
index 3607252..e0872b6 100644
--- a/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java
+++ b/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java
@@ -42,8 +42,6 @@ public class SolrCellMorphlineTest extends AbstractSolrMorphlineTestBase {
@BeforeClass
public static void beforeClass2() {
assumeFalse("FIXME: Morphlines currently has issues with Windows paths", Constants.WINDOWS);
- assumeFalse("This test fails with Java 9 (https://issues.apache.org/jira/browse/SOLR-8876)",
- Constants.JRE_IS_MINIMUM_JAVA9);
}
@Before
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test-files/test-morphlines/loadSolrBasic.conf
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test-files/test-morphlines/loadSolrBasic.conf b/solr/contrib/morphlines-core/src/test-files/test-morphlines/loadSolrBasic.conf
index 190d0e4..1c02a9a 100644
--- a/solr/contrib/morphlines-core/src/test-files/test-morphlines/loadSolrBasic.conf
+++ b/solr/contrib/morphlines-core/src/test-files/test-morphlines/loadSolrBasic.conf
@@ -39,7 +39,12 @@ SOLR_LOCATOR : ${?ENV_SOLR_LOCATOR}
morphlines : [
{
id : morphline1
- importCommands : ["org.kitesdk.**", "org.apache.solr.**"]
+
+ # using globs (foo.bar.* or foo.bar.**) will not work in Java9 due to classpath scanning limitations
+ # so we enumarate every command (builder) we know this config uses below. (see SOLR-8876)
+ importCommands : ["org.kitesdk.morphline.stdlib.LogDebugBuilder",
+ "org.apache.solr.morphlines.solr.SanitizeUnknownSolrFieldsBuilder",
+ "org.apache.solr.morphlines.solr.LoadSolrBuilder"]
commands : [
{
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellDocumentTypes.conf
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellDocumentTypes.conf b/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellDocumentTypes.conf
index 7d232dd..4d38256 100644
--- a/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellDocumentTypes.conf
+++ b/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellDocumentTypes.conf
@@ -22,7 +22,28 @@
morphlines : [
{
id : morphline1
- importCommands : ["org.kitesdk.**", "org.apache.solr.**"]
+
+ # using globs (foo.bar.* or foo.bar.**) will not work in Java9 due to classpath scanning limitations
+ # so we enumarate every command (builder) we know this config uses below. (see SOLR-8876)
+ importCommands : ["org.kitesdk.morphline.stdlib.LogDebugBuilder",
+ "org.kitesdk.morphline.stdlib.SetValuesBuilder",
+ "org.kitesdk.morphline.stdlib.CallParentPipeBuilder",
+ "org.kitesdk.morphline.stdlib.GenerateUUIDBuilder",
+ "org.kitesdk.morphline.stdlib.JavaBuilder",
+ "org.kitesdk.morphline.stdlib.TryRulesBuilder",
+ "org.kitesdk.morphline.stdlib.SeparateAttachmentsBuilder",
+ "org.kitesdk.morphline.stdio.ReadCSVBuilder",
+ "org.kitesdk.morphline.avro.ReadAvroContainerBuilder",
+ "org.kitesdk.morphline.avro.ExtractAvroPathsBuilder",
+ "org.kitesdk.morphline.avro.ExtractAvroTreeBuilder",
+ "org.kitesdk.morphline.tika.DetectMimeTypeBuilder",
+ "org.kitesdk.morphline.tika.decompress.DecompressBuilder",
+ "org.kitesdk.morphline.tika.decompress.UnpackBuilder",
+ "org.kitesdk.morphline.twitter.ReadJsonTestTweetsBuilder",
+ "org.apache.solr.morphlines.cell.SolrCellBuilder",
+ "org.apache.solr.morphlines.solr.SanitizeUnknownSolrFieldsBuilder",
+ "org.apache.solr.morphlines.solr.GenerateSolrSequenceKeyBuilder",
+ "org.apache.solr.morphlines.solr.LoadSolrBuilder"]
commands : [
{ separateAttachments {} }
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellJPGCompressed.conf
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellJPGCompressed.conf b/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellJPGCompressed.conf
index 66e7d40..85cb2a7 100644
--- a/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellJPGCompressed.conf
+++ b/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellJPGCompressed.conf
@@ -25,7 +25,22 @@
morphlines : [
{
id : morphline1
- importCommands : ["org.kitesdk.**", "org.apache.solr.**"]
+
+ # using globs (foo.bar.* or foo.bar.**) will not work in Java9 due to classpath scanning limitations
+ # so we enumarate every command (builder) we know this config uses below. (see SOLR-8876)
+ importCommands : ["org.kitesdk.morphline.stdlib.LogDebugBuilder",
+ "org.kitesdk.morphline.stdlib.CallParentPipeBuilder",
+ "org.kitesdk.morphline.stdlib.GenerateUUIDBuilder",
+ "org.kitesdk.morphline.stdlib.JavaBuilder",
+ "org.kitesdk.morphline.stdlib.TryRulesBuilder",
+ "org.kitesdk.morphline.stdlib.SeparateAttachmentsBuilder",
+ "org.kitesdk.morphline.tika.DetectMimeTypeBuilder",
+ "org.kitesdk.morphline.tika.decompress.DecompressBuilder",
+ "org.kitesdk.morphline.tika.decompress.UnpackBuilder",
+ "org.apache.solr.morphlines.cell.SolrCellBuilder",
+ "org.apache.solr.morphlines.solr.GenerateSolrSequenceKeyBuilder",
+ "org.apache.solr.morphlines.solr.LoadSolrBuilder"]
+
commands : [
{ separateAttachments {} }
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellXML.conf
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellXML.conf b/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellXML.conf
index 43009bd..9e840a9 100644
--- a/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellXML.conf
+++ b/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellXML.conf
@@ -25,7 +25,16 @@
morphlines : [
{
id : morphline1
- importCommands : ["org.kitesdk.**", "org.apache.solr.**"]
+
+ # using globs (foo.bar.* or foo.bar.**) will not work in Java9 due to classpath scanning limitations
+ # so we enumarate every command (builder) we know this config uses below. (see SOLR-8876)
+ importCommands : ["org.kitesdk.morphline.stdlib.LogDebugBuilder",
+ "org.kitesdk.morphline.stdlib.AddValuesBuilder",
+ "org.apache.solr.morphlines.cell.SolrCellBuilder",
+ "org.apache.solr.morphlines.solr.GenerateSolrSequenceKeyBuilder",
+ "org.apache.solr.morphlines.solr.SanitizeUnknownSolrFieldsBuilder",
+ "org.apache.solr.morphlines.solr.LoadSolrBuilder"]
+
commands : [
{
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test-files/test-morphlines/tokenizeText.conf
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test-files/test-morphlines/tokenizeText.conf b/solr/contrib/morphlines-core/src/test-files/test-morphlines/tokenizeText.conf
index 9b62276..d9354c4 100644
--- a/solr/contrib/morphlines-core/src/test-files/test-morphlines/tokenizeText.conf
+++ b/solr/contrib/morphlines-core/src/test-files/test-morphlines/tokenizeText.conf
@@ -16,7 +16,11 @@
morphlines : [
{
id : morphline1
- importCommands : ["org.kitesdk.**", "org.apache.solr.**"]
+
+ # using globs (foo.bar.* or foo.bar.**) will not work in Java9 due to classpath scanning limitations
+ # so we enumarate every command (builder) we know this config uses below. (see SOLR-8876)
+ importCommands : ["org.kitesdk.morphline.stdlib.LogDebugBuilder",
+ "org.apache.solr.morphlines.solr.TokenizeTextBuilder"]
commands : [
{
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test-files/test-morphlines/tutorialReadAvroContainer.conf
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test-files/test-morphlines/tutorialReadAvroContainer.conf b/solr/contrib/morphlines-core/src/test-files/test-morphlines/tutorialReadAvroContainer.conf
index 0c00686..eee4ba5 100644
--- a/solr/contrib/morphlines-core/src/test-files/test-morphlines/tutorialReadAvroContainer.conf
+++ b/solr/contrib/morphlines-core/src/test-files/test-morphlines/tutorialReadAvroContainer.conf
@@ -42,9 +42,14 @@ morphlines : [
# morphline config file
id : morphline1
- # Import all morphline commands in these java packages and their subpackages.
- # Other commands that may be present on the classpath are not visible to this morphline.
- importCommands : ["org.kitesdk.**", "org.apache.solr.**"]
+ # using globs (foo.bar.* or foo.bar.**) will not work in Java9 due to classpath scanning limitations
+ # so we enumarate every command (builder) we know this config uses below. (see SOLR-8876)
+ importCommands : ["org.kitesdk.morphline.stdlib.LogDebugBuilder",
+ "org.kitesdk.morphline.avro.ReadAvroContainerBuilder",
+ "org.kitesdk.morphline.avro.ExtractAvroPathsBuilder",
+ "org.kitesdk.morphline.stdlib.ConvertTimestampBuilder",
+ "org.apache.solr.morphlines.solr.SanitizeUnknownSolrFieldsBuilder",
+ "org.apache.solr.morphlines.solr.LoadSolrBuilder"]
commands : [
{
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
index 9a5791e..c91f31b 100644
--- a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
+++ b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
@@ -20,7 +20,6 @@ import com.codahale.metrics.MetricRegistry;
import com.google.common.io.Files;
import com.typesafe.config.Config;
import org.apache.commons.io.FileUtils;
-import org.apache.lucene.util.Constants;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
@@ -85,7 +84,6 @@ public class AbstractSolrMorphlineTestBase extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
- assumeFalse("This test fails on Java 9 (https://issues.apache.org/jira/browse/SOLR-8876)", Constants.JRE_IS_MINIMUM_JAVA9);
// TODO: test doesn't work with some Locales, see SOLR-6458
savedLocale = Locale.getDefault();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineZkTestBase.java
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineZkTestBase.java b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineZkTestBase.java
index 9aa27c4..31e7ebf 100644
--- a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineZkTestBase.java
+++ b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineZkTestBase.java
@@ -24,7 +24,6 @@ import java.util.Locale;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.ListMultimap;
import com.typesafe.config.Config;
-import org.apache.lucene.util.Constants;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.cloud.AbstractDistribZkTestBase;
@@ -79,9 +78,6 @@ public abstract class AbstractSolrMorphlineZkTestBase extends SolrCloudTestCase
@BeforeClass
public static void setupClass() throws Exception {
- assumeFalse("This test fails on Java 9 (https://issues.apache.org/jira/browse/SOLR-8876)",
- Constants.JRE_IS_MINIMUM_JAVA9);
-
assumeFalse("This test fails on UNIX with Turkish default locale (https://issues.apache.org/jira/browse/SOLR-6387)",
new Locale("tr").getLanguage().equals(Locale.getDefault().getLanguage()));
[20/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-9999: Instrument
DirectUpdateHandler2.
Posted by is...@apache.org.
SOLR-9999: Instrument DirectUpdateHandler2.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/acb185b2
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/acb185b2
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/acb185b2
Branch: refs/heads/jira/solr-6736
Commit: acb185b2dc7522e6a4fa55d54e82910736668f8d
Parents: fb1d2d1
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Mon Mar 6 11:09:59 2017 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Mon Mar 6 11:10:43 2017 +0100
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +
.../src/java/org/apache/solr/core/SolrCore.java | 3 +
.../solr/update/DirectUpdateHandler2.java | 128 +++++++++++++------
.../solr/update/DirectUpdateHandlerTest.java | 108 +++++++++++++---
4 files changed, 185 insertions(+), 57 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/acb185b2/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 0b05749..4cfcb72 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -151,6 +151,9 @@ New Features
* SOLR-10146: Added button to the Admin UI 'Collection' tab for deleting an inactive shard (Amrit Sarkar, janhoy)
+* SOLR-9999: Instrument DirectUpdateHandler2. This registers existing statistics under metrics API and adds
+ more metrics to track the rates of update and delete commands. (ab)
+
Bug Fixes
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/acb185b2/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 1c30e4c..f22c472 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -1072,6 +1072,9 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
} else {
newUpdateHandler = createUpdateHandler(updateHandlerClass, updateHandler);
}
+ if (newUpdateHandler instanceof SolrMetricProducer) {
+ coreMetricManager.registerMetricProducer("updateHandler", (SolrMetricProducer)newUpdateHandler);
+ }
infoRegistry.put("updateHandler", newUpdateHandler);
return newUpdateHandler;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/acb185b2/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
index ebff564..4592bcf 100644
--- a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
+++ b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
@@ -26,6 +26,8 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.LongAdder;
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.Meter;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.CodecReader;
@@ -49,6 +51,8 @@ import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrConfig.UpdateHandlerInfo;
import org.apache.solr.core.SolrCore;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestInfo;
@@ -71,24 +75,25 @@ import org.slf4j.LoggerFactory;
* <p>
* TODO: add soft commitWithin support
*/
-public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState.IndexWriterCloser {
+public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState.IndexWriterCloser, SolrMetricProducer {
protected final SolrCoreState solrCoreState;
// stats
LongAdder addCommands = new LongAdder();
- LongAdder addCommandsCumulative = new LongAdder();
+ Meter addCommandsCumulative;
LongAdder deleteByIdCommands= new LongAdder();
- LongAdder deleteByIdCommandsCumulative= new LongAdder();
- LongAdder deleteByQueryCommands= new LongAdder();
- LongAdder deleteByQueryCommandsCumulative= new LongAdder();
- LongAdder expungeDeleteCommands = new LongAdder();
- LongAdder mergeIndexesCommands = new LongAdder();
- LongAdder commitCommands= new LongAdder();
- LongAdder optimizeCommands= new LongAdder();
- LongAdder rollbackCommands= new LongAdder();
- LongAdder numDocsPending= new LongAdder();
+ Meter deleteByIdCommandsCumulative;
+ LongAdder deleteByQueryCommands = new LongAdder();
+ Meter deleteByQueryCommandsCumulative;
+ Meter expungeDeleteCommands;
+ Meter mergeIndexesCommands;
+ Meter commitCommands;
+ Meter splitCommands;
+ Meter optimizeCommands;
+ Meter rollbackCommands;
+ LongAdder numDocsPending = new LongAdder();
LongAdder numErrors = new LongAdder();
- LongAdder numErrorsCumulative = new LongAdder();
+ Meter numErrorsCumulative;
// tracks when auto-commit should occur
protected final CommitTracker commitTracker;
@@ -146,6 +151,35 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
}
}
+ @Override
+ public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
+ commitCommands = manager.meter(registry, "commits", getCategory().toString(), scope);
+ Gauge<Integer> autoCommits = () -> commitTracker.getCommitCount();
+ manager.register(registry, autoCommits, true, "autoCommits", getCategory().toString(), scope);
+ Gauge<Integer> softAutoCommits = () -> softCommitTracker.getCommitCount();
+ manager.register(registry, softAutoCommits, true, "softAutoCommits", getCategory().toString(), scope);
+ optimizeCommands = manager.meter(registry, "optimizes", getCategory().toString(), scope);
+ rollbackCommands = manager.meter(registry, "rollbacks", getCategory().toString(), scope);
+ splitCommands = manager.meter(registry, "splits", getCategory().toString(), scope);
+ mergeIndexesCommands = manager.meter(registry, "merges", getCategory().toString(), scope);
+ expungeDeleteCommands = manager.meter(registry, "expungeDeletes", getCategory().toString(), scope);
+ Gauge<Long> docsPending = () -> numDocsPending.longValue();
+ manager.register(registry, docsPending, true, "docsPending", getCategory().toString(), scope);
+ Gauge<Long> adds = () -> addCommands.longValue();
+ manager.register(registry, adds, true, "adds", getCategory().toString(), scope);
+ Gauge<Long> deletesById = () -> deleteByIdCommands.longValue();
+ manager.register(registry, deletesById, true, "deletesById", getCategory().toString(), scope);
+ Gauge<Long> deletesByQuery = () -> deleteByQueryCommands.longValue();
+ manager.register(registry, deletesByQuery, true, "deletesByQuery", getCategory().toString(), scope);
+ Gauge<Long> errors = () -> numErrors.longValue();
+ manager.register(registry, errors, true, "errors", getCategory().toString(), scope);
+
+ addCommandsCumulative = manager.meter(registry, "cumulativeAdds", getCategory().toString(), scope);
+ deleteByIdCommandsCumulative = manager.meter(registry, "cumulativeDeletesById", getCategory().toString(), scope);
+ deleteByQueryCommandsCumulative = manager.meter(registry, "cumulativeDeletesByQuery", getCategory().toString(), scope);
+ numErrorsCumulative = manager.meter(registry, "cumulativeErrors", getCategory().toString(), scope);
+ }
+
private void deleteAll() throws IOException {
log.info(core.getLogId() + "REMOVING ALL DOCUMENTS FROM INDEX");
RefCounted<IndexWriter> iw = solrCoreState.getIndexWriter(core);
@@ -192,7 +226,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
int rc = -1;
addCommands.increment();
- addCommandsCumulative.increment();
+ addCommandsCumulative.mark();
// if there is no ID field, don't overwrite
if (idField == null) {
@@ -230,7 +264,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
} finally {
if (rc != 1) {
numErrors.increment();
- numErrorsCumulative.increment();
+ numErrorsCumulative.mark();
} else {
numDocsPending.increment();
}
@@ -368,7 +402,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
@Override
public void delete(DeleteUpdateCommand cmd) throws IOException {
deleteByIdCommands.increment();
- deleteByIdCommandsCumulative.increment();
+ deleteByIdCommandsCumulative.mark();
Term deleteTerm = new Term(idField.getName(), cmd.getIndexedId());
// SolrCore.verbose("deleteDocuments",deleteTerm,writer);
@@ -426,7 +460,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
@Override
public void deleteByQuery(DeleteUpdateCommand cmd) throws IOException {
deleteByQueryCommands.increment();
- deleteByQueryCommandsCumulative.increment();
+ deleteByQueryCommandsCumulative.mark();
boolean madeIt=false;
try {
Query q = getQuery(cmd);
@@ -478,7 +512,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
} finally {
if (!madeIt) {
numErrors.increment();
- numErrorsCumulative.increment();
+ numErrorsCumulative.mark();
}
}
}
@@ -486,7 +520,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
@Override
public int mergeIndexes(MergeIndexesCommand cmd) throws IOException {
- mergeIndexesCommands.increment();
+ mergeIndexesCommands.mark();
int rc;
log.info("start " + cmd);
@@ -540,7 +574,10 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
error=false;
}
finally {
- if (error) numErrors.increment();
+ if (error) {
+ numErrors.increment();
+ numErrorsCumulative.mark();
+ }
}
}
@@ -552,10 +589,10 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
}
if (cmd.optimize) {
- optimizeCommands.increment();
+ optimizeCommands.mark();
} else {
- commitCommands.increment();
- if (cmd.expungeDeletes) expungeDeleteCommands.increment();
+ commitCommands.mark();
+ if (cmd.expungeDeletes) expungeDeleteCommands.mark();
}
Future[] waitSearcher = null;
@@ -674,7 +711,10 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
addCommands.reset();
deleteByIdCommands.reset();
deleteByQueryCommands.reset();
- if (error) numErrors.increment();
+ if (error) {
+ numErrors.increment();
+ numErrorsCumulative.mark();
+ }
}
// if we are supposed to wait for the searcher to be registered, then we should do it
@@ -702,7 +742,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
throw new UnsupportedOperationException("Rollback is currently not supported in SolrCloud mode. (SOLR-4895)");
}
- rollbackCommands.increment();
+ rollbackCommands.mark();
boolean error=true;
@@ -722,10 +762,13 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
error=false;
}
finally {
- addCommandsCumulative.add(-addCommands.sumThenReset());
- deleteByIdCommandsCumulative.add(-deleteByIdCommands.sumThenReset());
- deleteByQueryCommandsCumulative.add(-deleteByQueryCommands.sumThenReset());
- if (error) numErrors.increment();
+ addCommandsCumulative.mark(-addCommands.sumThenReset());
+ deleteByIdCommandsCumulative.mark(-deleteByIdCommands.sumThenReset());
+ deleteByQueryCommandsCumulative.mark(-deleteByQueryCommands.sumThenReset());
+ if (error) {
+ numErrors.increment();
+ numErrorsCumulative.mark();
+ }
}
}
@@ -834,7 +877,13 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
public void split(SplitIndexCommand cmd) throws IOException {
commit(new CommitUpdateCommand(cmd.req, false));
SolrIndexSplitter splitter = new SolrIndexSplitter(cmd);
- splitter.split();
+ splitCommands.mark();
+ try {
+ splitter.split();
+ } catch (IOException e) {
+ numErrors.increment();
+ numErrorsCumulative.mark();
+ }
}
/**
@@ -873,10 +922,10 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
}
private void updateDocument(AddUpdateCommand cmd, IndexWriter writer, Term updateTerm) throws IOException {
- if(cmd.isBlock()){
+ if (cmd.isBlock()) {
log.debug("updateDocuments({})", cmd);
writer.updateDocuments(updateTerm, cmd);
- }else{
+ } else {
Document luceneDocument = cmd.getLuceneDocument(false);
log.debug("updateDocument({})", cmd);
writer.updateDocument(updateTerm, luceneDocument);
@@ -916,7 +965,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
@Override
public NamedList getStatistics() {
NamedList lst = new SimpleOrderedMap();
- lst.add("commits", commitCommands.longValue());
+ lst.add("commits", commitCommands.getCount());
if (commitTracker.getDocsUpperBound() > 0) {
lst.add("autocommit maxDocs", commitTracker.getDocsUpperBound());
}
@@ -931,9 +980,9 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
lst.add("soft autocommit maxTime", "" + softCommitTracker.getTimeUpperBound() + "ms");
}
lst.add("soft autocommits", softCommitTracker.getCommitCount());
- lst.add("optimizes", optimizeCommands.longValue());
- lst.add("rollbacks", rollbackCommands.longValue());
- lst.add("expungeDeletes", expungeDeleteCommands.longValue());
+ lst.add("optimizes", optimizeCommands.getCount());
+ lst.add("rollbacks", rollbackCommands.getCount());
+ lst.add("expungeDeletes", expungeDeleteCommands.getCount());
lst.add("docsPending", numDocsPending.longValue());
// pset.size() not synchronized, but it should be fine to access.
// lst.add("deletesPending", pset.size());
@@ -941,10 +990,10 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
lst.add("deletesById", deleteByIdCommands.longValue());
lst.add("deletesByQuery", deleteByQueryCommands.longValue());
lst.add("errors", numErrors.longValue());
- lst.add("cumulative_adds", addCommandsCumulative.longValue());
- lst.add("cumulative_deletesById", deleteByIdCommandsCumulative.longValue());
- lst.add("cumulative_deletesByQuery", deleteByQueryCommandsCumulative.longValue());
- lst.add("cumulative_errors", numErrorsCumulative.longValue());
+ lst.add("cumulative_adds", addCommandsCumulative.getCount());
+ lst.add("cumulative_deletesById", deleteByIdCommandsCumulative.getCount());
+ lst.add("cumulative_deletesByQuery", deleteByQueryCommandsCumulative.getCount());
+ lst.add("cumulative_errors", numErrorsCumulative.getCount());
if (this.ulog != null) {
lst.add("transaction_logs_total_size", ulog.getTotalLogsSize());
lst.add("transaction_logs_total_number", ulog.getTotalLogsNumber());
@@ -971,4 +1020,5 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
public CommitTracker getSoftCommitTracker() {
return softCommitTracker;
}
+
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/acb185b2/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java b/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java
index ca604fe..2816354 100644
--- a/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java
@@ -23,6 +23,9 @@ import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.Meter;
+import com.codahale.metrics.Metric;
import org.apache.lucene.index.TieredMergePolicy;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.store.Directory;
@@ -99,7 +102,29 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
@Test
public void testBasics() throws Exception {
-
+
+ // get initial metrics
+ Map<String, Metric> metrics = h.getCoreContainer().getMetricManager()
+ .registry(h.getCore().getCoreMetricManager().getRegistryName()).getMetrics();
+
+ String PREFIX = "UPDATE.updateHandler.";
+
+ String commitsName = PREFIX + "commits";
+ assertTrue(metrics.containsKey(commitsName));
+ String addsName = PREFIX + "adds";
+ assertTrue(metrics.containsKey(addsName));
+ String cumulativeAddsName = PREFIX + "cumulativeAdds";
+ String delsIName = PREFIX + "deletesById";
+ String cumulativeDelsIName = PREFIX + "cumulativeDeletesById";
+ String delsQName = PREFIX + "deletesByQuery";
+ String cumulativeDelsQName = PREFIX + "cumulativeDeletesByQuery";
+ long commits = ((Meter) metrics.get(commitsName)).getCount();
+ long adds = ((Gauge<Long>) metrics.get(addsName)).getValue();
+ long cumulativeAdds = ((Meter) metrics.get(cumulativeAddsName)).getCount();
+ long cumulativeDelsI = ((Meter) metrics.get(cumulativeDelsIName)).getCount();
+ long cumulativeDelsQ = ((Meter) metrics.get(cumulativeDelsQName)).getCount();
+
+
assertNull("This test requires a schema that has no version field, " +
"it appears the schema file in use has been edited to violate " +
"this requirement",
@@ -112,8 +137,23 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
assertQ(req("q","id:5"), "//*[@numFound='0']");
assertQ(req("q","id:6"), "//*[@numFound='0']");
+ long newAdds = ((Gauge<Long>) metrics.get(addsName)).getValue();
+ long newCumulativeAdds = ((Meter) metrics.get(cumulativeAddsName)).getCount();
+ assertEquals("new adds", 2, newAdds - adds);
+ assertEquals("new cumulative adds", 2, newCumulativeAdds - cumulativeAdds);
+
assertU(commit());
+ long newCommits = ((Meter) metrics.get(commitsName)).getCount();
+ assertEquals("new commits", 1, newCommits - commits);
+
+ newAdds = ((Gauge<Long>) metrics.get(addsName)).getValue();
+ newCumulativeAdds = ((Meter) metrics.get(cumulativeAddsName)).getCount();
+ // adds should be reset to 0 after commit
+ assertEquals("new adds after commit", 0, newAdds);
+ // not so with cumulative ones!
+ assertEquals("new cumulative adds after commit", 2, newCumulativeAdds - cumulativeAdds);
+
// now they should be there
assertQ(req("q","id:5"), "//*[@numFound='1']");
assertQ(req("q","id:6"), "//*[@numFound='1']");
@@ -121,11 +161,21 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
// now delete one
assertU(delI("5"));
+ long newDelsI = ((Gauge<Long>) metrics.get(delsIName)).getValue();
+ long newCumulativeDelsI = ((Meter) metrics.get(cumulativeDelsIName)).getCount();
+ assertEquals("new delsI", 1, newDelsI);
+ assertEquals("new cumulative delsI", 1, newCumulativeDelsI - cumulativeDelsI);
+
// not committed yet
assertQ(req("q","id:5"), "//*[@numFound='1']");
assertU(commit());
-
+ // delsI should be reset to 0 after commit
+ newDelsI = ((Gauge<Long>) metrics.get(delsIName)).getValue();
+ newCumulativeDelsI = ((Meter) metrics.get(cumulativeDelsIName)).getCount();
+ assertEquals("new delsI after commit", 0, newDelsI);
+ assertEquals("new cumulative delsI after commit", 1, newCumulativeDelsI - cumulativeDelsI);
+
// 5 should be gone
assertQ(req("q","id:5"), "//*[@numFound='0']");
assertQ(req("q","id:6"), "//*[@numFound='1']");
@@ -133,14 +183,36 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
// now delete all
assertU(delQ("*:*"));
+ long newDelsQ = ((Gauge<Long>) metrics.get(delsQName)).getValue();
+ long newCumulativeDelsQ = ((Meter) metrics.get(cumulativeDelsQName)).getCount();
+ assertEquals("new delsQ", 1, newDelsQ);
+ assertEquals("new cumulative delsQ", 1, newCumulativeDelsQ - cumulativeDelsQ);
+
// not committed yet
assertQ(req("q","id:6"), "//*[@numFound='1']");
assertU(commit());
+ newDelsQ = ((Gauge<Long>) metrics.get(delsQName)).getValue();
+ newCumulativeDelsQ = ((Meter) metrics.get(cumulativeDelsQName)).getCount();
+ assertEquals("new delsQ after commit", 0, newDelsQ);
+ assertEquals("new cumulative delsQ after commit", 1, newCumulativeDelsQ - cumulativeDelsQ);
+
// 6 should be gone
assertQ(req("q","id:6"), "//*[@numFound='0']");
+ // verify final metrics
+ newCommits = ((Meter) metrics.get(commitsName)).getCount();
+ assertEquals("new commits", 3, newCommits - commits);
+ newAdds = ((Gauge<Long>) metrics.get(addsName)).getValue();
+ assertEquals("new adds", 0, newAdds);
+ newCumulativeAdds = ((Meter) metrics.get(cumulativeAddsName)).getCount();
+ assertEquals("new cumulative adds", 2, newCumulativeAdds - cumulativeAdds);
+ newDelsI = ((Gauge<Long>) metrics.get(delsIName)).getValue();
+ assertEquals("new delsI", 0, newDelsI);
+ newCumulativeDelsI = ((Meter) metrics.get(cumulativeDelsIName)).getCount();
+ assertEquals("new cumulative delsI", 1, newCumulativeDelsI - cumulativeDelsI);
+
}
@@ -161,12 +233,12 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
CommitUpdateCommand cmtCmd = new CommitUpdateCommand(ureq, false);
cmtCmd.waitSearcher = true;
assertEquals( 1, duh2.addCommands.longValue() );
- assertEquals( 1, duh2.addCommandsCumulative.longValue() );
- assertEquals( 0, duh2.commitCommands.longValue() );
+ assertEquals( 1, duh2.addCommandsCumulative.getCount() );
+ assertEquals( 0, duh2.commitCommands.getCount() );
updater.commit(cmtCmd);
assertEquals( 0, duh2.addCommands.longValue() );
- assertEquals( 1, duh2.addCommandsCumulative.longValue() );
- assertEquals( 1, duh2.commitCommands.longValue() );
+ assertEquals( 1, duh2.addCommandsCumulative.getCount() );
+ assertEquals( 1, duh2.commitCommands.getCount() );
ureq.close();
assertU(adoc("id","B"));
@@ -175,12 +247,12 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
ureq = req();
RollbackUpdateCommand rbkCmd = new RollbackUpdateCommand(ureq);
assertEquals( 1, duh2.addCommands.longValue() );
- assertEquals( 2, duh2.addCommandsCumulative.longValue() );
- assertEquals( 0, duh2.rollbackCommands.longValue() );
+ assertEquals( 2, duh2.addCommandsCumulative.getCount() );
+ assertEquals( 0, duh2.rollbackCommands.getCount() );
updater.rollback(rbkCmd);
assertEquals( 0, duh2.addCommands.longValue() );
- assertEquals( 1, duh2.addCommandsCumulative.longValue() );
- assertEquals( 1, duh2.rollbackCommands.longValue() );
+ assertEquals( 1, duh2.addCommandsCumulative.getCount() );
+ assertEquals( 1, duh2.rollbackCommands.getCount() );
ureq.close();
// search - "B" should not be found.
@@ -221,12 +293,12 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
CommitUpdateCommand cmtCmd = new CommitUpdateCommand(ureq, false);
cmtCmd.waitSearcher = true;
assertEquals( 2, duh2.addCommands.longValue() );
- assertEquals( 2, duh2.addCommandsCumulative.longValue() );
- assertEquals( 0, duh2.commitCommands.longValue() );
+ assertEquals( 2, duh2.addCommandsCumulative.getCount() );
+ assertEquals( 0, duh2.commitCommands.getCount() );
updater.commit(cmtCmd);
assertEquals( 0, duh2.addCommands.longValue() );
- assertEquals( 2, duh2.addCommandsCumulative.longValue() );
- assertEquals( 1, duh2.commitCommands.longValue() );
+ assertEquals( 2, duh2.addCommandsCumulative.getCount() );
+ assertEquals( 1, duh2.commitCommands.getCount() );
ureq.close();
// search - "A","B" should be found.
@@ -254,13 +326,13 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
ureq = req();
RollbackUpdateCommand rbkCmd = new RollbackUpdateCommand(ureq);
assertEquals( 1, duh2.deleteByIdCommands.longValue() );
- assertEquals( 1, duh2.deleteByIdCommandsCumulative.longValue() );
- assertEquals( 0, duh2.rollbackCommands.longValue() );
+ assertEquals( 1, duh2.deleteByIdCommandsCumulative.getCount() );
+ assertEquals( 0, duh2.rollbackCommands.getCount() );
updater.rollback(rbkCmd);
ureq.close();
assertEquals( 0, duh2.deleteByIdCommands.longValue() );
- assertEquals( 0, duh2.deleteByIdCommandsCumulative.longValue() );
- assertEquals( 1, duh2.rollbackCommands.longValue() );
+ assertEquals( 0, duh2.deleteByIdCommandsCumulative.getCount() );
+ assertEquals( 1, duh2.rollbackCommands.getCount() );
// search - "B" should be found.
assertQ("\"B\" should be found.", req
[35/50] [abbrv] lucene-solr:jira/solr-6736: LUCENE-7695: support
synonyms in ComplexPhraseQueryParser
Posted by is...@apache.org.
LUCENE-7695: support synonyms in ComplexPhraseQueryParser
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8a549293
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8a549293
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8a549293
Branch: refs/heads/jira/solr-6736
Commit: 8a5492930eff393de824450f77f27d98a204df3d
Parents: d844207
Author: Mikhail Khludnev <mk...@apache.org>
Authored: Sun Mar 5 12:24:47 2017 +0300
Committer: Mikhail Khludnev <mk...@apache.org>
Committed: Wed Mar 8 11:20:35 2017 +0300
----------------------------------------------------------------------
lucene/CHANGES.txt | 3 ++
.../complexPhrase/ComplexPhraseQueryParser.java | 21 +++++++++---
.../complexPhrase/TestComplexPhraseQuery.java | 36 +++++++++++++++++---
3 files changed, 52 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8a549293/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index b067fde..a8f7ee4 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -204,6 +204,9 @@ Improvements
IndexInput description instead of plain IOException (Mike Drob via
Mike McCandless)
+* LUCENE-7695: ComplexPhraseQueryParser to support query time synonyms (Markus Jelsma
+ via Mikhail Khludnev)
+
Optimizations
* LUCENE-7641: Optimized point range queries to compute documents that do not
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8a549293/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
index 6e18960..32f4fb3 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
@@ -28,6 +28,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.IndexSearcher;
@@ -35,6 +36,7 @@ import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.MultiTermQuery.RewriteMethod;
import org.apache.lucene.search.Query;
+import org.apache.lucene.search.SynonymQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.spans.SpanBoostQuery;
import org.apache.lucene.search.spans.SpanNearQuery;
@@ -257,6 +259,7 @@ public class ComplexPhraseQueryParser extends QueryParser {
// ArrayList spanClauses = new ArrayList();
if (contents instanceof TermQuery
|| contents instanceof MultiTermQuery
+ || contents instanceof SynonymQuery
) {
return contents;
}
@@ -287,9 +290,11 @@ public class ComplexPhraseQueryParser extends QueryParser {
qc = ((BoostQuery) qc).getQuery();
}
- if (qc instanceof BooleanQuery) {
+ if (qc instanceof BooleanQuery || qc instanceof SynonymQuery) {
ArrayList<SpanQuery> sc = new ArrayList<>();
- addComplexPhraseClause(sc, (BooleanQuery) qc);
+ BooleanQuery booleanCaluse = qc instanceof BooleanQuery ?
+ (BooleanQuery) qc : convert((SynonymQuery) qc);
+ addComplexPhraseClause(sc, booleanCaluse);
if (sc.size() > 0) {
allSpanClauses[i] = sc.get(0);
} else {
@@ -309,14 +314,14 @@ public class ComplexPhraseQueryParser extends QueryParser {
if (qc instanceof TermQuery) {
TermQuery tq = (TermQuery) qc;
allSpanClauses[i] = new SpanTermQuery(tq.getTerm());
- } else {
+ } else {
throw new IllegalArgumentException("Unknown query type \""
+ qc.getClass().getName()
+ "\" found in phrase query string \""
+ phrasedQueryStringContents + "\"");
}
-
}
+
i += 1;
}
if (numNegatives == 0) {
@@ -354,6 +359,14 @@ public class ComplexPhraseQueryParser extends QueryParser {
return snot;
}
+ private BooleanQuery convert(SynonymQuery qc) {
+ BooleanQuery.Builder bqb = new BooleanQuery.Builder();
+ for (Term t : qc.getTerms()){
+ bqb.add(new BooleanClause(new TermQuery(t), Occur.SHOULD));
+ }
+ return bqb.build();
+ }
+
private void addComplexPhraseClause(List<SpanQuery> spanClauses, BooleanQuery qc) {
ArrayList<SpanQuery> ors = new ArrayList<>();
ArrayList<SpanQuery> nots = new ArrayList<>();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8a549293/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java
index 28b600b..5c45e28 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java
@@ -20,6 +20,7 @@ import java.util.HashSet;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.analysis.MockSynonymAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.DirectoryReader;
@@ -39,7 +40,11 @@ public class TestComplexPhraseQuery extends LuceneTestCase {
new DocData("john smith", "1", "developer"),
new DocData("johathon smith", "2", "developer"),
new DocData("john percival smith", "3", "designer"),
- new DocData("jackson waits tom", "4", "project manager")
+ new DocData("jackson waits tom", "4", "project manager"),
+ new DocData("johny perkins", "5", "orders pizza"),
+ new DocData("hapax neverson", "6", "never matches"),
+ new DocData("dog cigar", "7", "just for synonyms"),
+ new DocData("dogs don't smoke cigarettes", "8", "just for synonyms"),
};
private IndexSearcher searcher;
@@ -73,12 +78,30 @@ public class TestComplexPhraseQuery extends LuceneTestCase {
}
public void testSingleTermPhrase() throws Exception {
- checkMatches("\"joh*\" \"tom\"", "1,2,3,4");
+ checkMatches("\"joh*\"","1,2,3,5");
+ checkMatches("\"joh~\"","1,3,5");
+ checkMatches("\"joh*\" \"tom\"", "1,2,3,4,5");
checkMatches("+\"j*\" +\"tom\"", "4");
- checkMatches("\"jo*\" \"[sma TO smZ]\" ", "1,2,3");
+ checkMatches("\"jo*\" \"[sma TO smZ]\" ", "1,2,3,5,8");
checkMatches("+\"j*hn\" +\"sm*h\"", "1,3");
}
+ public void testSynonyms() throws Exception {
+ checkMatches("\"dogs\"","8");
+ MockSynonymAnalyzer synonym = new MockSynonymAnalyzer();
+ checkMatches("\"dogs\"","7,8",synonym);
+ // synonym is unidirectional
+ checkMatches("\"dog\"","7",synonym);
+ checkMatches("\"dogs cigar*\"","");
+ checkMatches("\"dog cigar*\"","7");
+ checkMatches("\"dogs cigar*\"","7", synonym);
+ checkMatches("\"dog cigar*\"","7", synonym);
+ checkMatches("\"dogs cigar*\"~2","7,8", synonym);
+ // synonym is unidirectional
+ checkMatches("\"dog cigar*\"~2","7", synonym);
+
+ }
+
public void testUnOrderedProximitySearches() throws Exception {
inOrder = true;
@@ -98,8 +121,13 @@ public class TestComplexPhraseQuery extends LuceneTestCase {
}
private void checkMatches(String qString, String expectedVals)
+ throws Exception {
+ checkMatches(qString, expectedVals, analyzer);
+ }
+
+ private void checkMatches(String qString, String expectedVals, Analyzer anAnalyzer)
throws Exception {
- ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser(defaultFieldName, analyzer);
+ ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser(defaultFieldName, anAnalyzer);
qp.setInOrder(inOrder);
qp.setFuzzyPrefixLength(1); // usually a good idea
[04/50] [abbrv] lucene-solr:jira/solr-6736: LUCENE-6819: Remove
index-time boosts.
Posted by is...@apache.org.
LUCENE-6819: Remove index-time boosts.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8ed2b764
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8ed2b764
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8ed2b764
Branch: refs/heads/jira/solr-6736
Commit: 8ed2b764ed4d4d5203b5df1e16fdc1ffd640322c
Parents: 707d7b9
Author: Adrien Grand <jp...@gmail.com>
Authored: Thu Mar 2 19:18:16 2017 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Thu Mar 2 19:18:16 2017 +0100
----------------------------------------------------------------------
lucene/CHANGES.txt | 4 +
lucene/MIGRATE.txt | 6 +
.../apache/lucene/legacy/TestLegacyField.java | 10 -
.../lucene/codecs/StoredFieldsWriter.java | 5 -
.../java/org/apache/lucene/document/Field.java | 32 --
.../lucene/index/DefaultIndexingChain.java | 8 -
.../apache/lucene/index/FieldInvertState.java | 20 +-
.../org/apache/lucene/index/IndexableField.java | 24 --
.../index/SortingStoredFieldsConsumer.java | 5 -
.../org/apache/lucene/search/package-info.java | 23 +-
.../search/similarities/BM25Similarity.java | 12 +-
.../search/similarities/ClassicSimilarity.java | 5 +-
.../search/similarities/SimilarityBase.java | 8 +-
.../search/similarities/TFIDFSimilarity.java | 46 +--
.../org/apache/lucene/document/TestField.java | 30 --
.../apache/lucene/index/TestCustomNorms.java | 14 +-
.../org/apache/lucene/index/TestFieldReuse.java | 5 -
.../apache/lucene/index/TestIndexSorting.java | 8 +-
.../lucene/index/TestIndexWriterExceptions.java | 66 ----
.../apache/lucene/index/TestIndexableField.java | 10 -
.../test/org/apache/lucene/index/TestNorms.java | 13 +-
.../org/apache/lucene/index/TestOmitTf.java | 2 +-
.../lucene/search/TestDisjunctionMaxQuery.java | 2 +-
.../org/apache/lucene/search/TestDocBoost.java | 98 ------
.../lucene/search/TestMatchAllDocsQuery.java | 9 +-
.../apache/lucene/search/TestSimilarity.java | 2 +-
.../similarities/TestBooleanSimilarity.java | 3 +-
.../search/similarities/TestSimilarityBase.java | 1 -
.../apache/lucene/index/memory/MemoryIndex.java | 70 +---
.../lucene/index/memory/TestMemoryIndex.java | 8 +-
.../memory/TestMemoryIndexAgainstRAMDir.java | 5 +-
.../apache/lucene/document/LazyDocument.java | 5 -
.../apache/lucene/misc/SweetSpotSimilarity.java | 2 +-
.../lucene/misc/SweetSpotSimilarityTest.java | 8 -
.../function/TestLongNormValueSource.java | 3 +-
.../queries/payloads/TestPayloadScoreQuery.java | 2 +-
.../queries/payloads/TestPayloadTermQuery.java | 2 +-
.../lucene/search/BaseExplanationTestCase.java | 1 -
solr/CHANGES.txt | 5 +
.../apache/solr/schema/ICUCollationField.java | 6 +-
.../solr/handler/dataimport/DocBuilder.java | 24 +-
.../handler/dataimport/config/EntityField.java | 6 -
.../handler/extraction/ExtractingParams.java | 13 -
.../handler/extraction/SolrContentHandler.java | 17 +-
.../LanguageIdentifierUpdateProcessor.java | 2 +-
.../solr/morphlines/solr/LoadSolrBuilder.java | 32 +-
.../solr/uima/processor/UIMAToSolrMapper.java | 2 +-
.../handler/DocumentAnalysisRequestHandler.java | 4 +-
.../org/apache/solr/handler/ExportWriter.java | 2 +-
.../solr/handler/admin/LukeRequestHandler.java | 1 -
.../solr/handler/loader/CSVLoaderBase.java | 4 +-
.../apache/solr/handler/loader/JsonLoader.java | 29 +-
.../apache/solr/handler/loader/XMLLoader.java | 23 +-
.../solr/schema/AbstractSpatialFieldType.java | 6 +-
.../java/org/apache/solr/schema/BBoxField.java | 2 +-
.../org/apache/solr/schema/BinaryField.java | 7 +-
.../java/org/apache/solr/schema/BoolField.java | 4 +-
.../org/apache/solr/schema/CollationField.java | 6 +-
.../org/apache/solr/schema/CurrencyField.java | 8 +-
.../org/apache/solr/schema/DateRangeField.java | 4 +-
.../apache/solr/schema/DoublePointField.java | 5 +-
.../java/org/apache/solr/schema/EnumField.java | 14 +-
.../java/org/apache/solr/schema/FieldType.java | 26 +-
.../org/apache/solr/schema/FloatPointField.java | 5 +-
.../org/apache/solr/schema/IntPointField.java | 5 +-
.../java/org/apache/solr/schema/LatLonType.java | 10 +-
.../org/apache/solr/schema/LongPointField.java | 5 +-
.../java/org/apache/solr/schema/PointField.java | 6 +-
.../java/org/apache/solr/schema/PointType.java | 8 +-
.../apache/solr/schema/PreAnalyzedField.java | 10 +-
.../org/apache/solr/schema/SchemaField.java | 10 +-
.../java/org/apache/solr/schema/StrField.java | 4 +-
.../java/org/apache/solr/schema/TrieField.java | 9 +-
.../java/org/apache/solr/search/Grouping.java | 2 +-
.../GroupedEndResultTransformer.java | 2 +-
.../org/apache/solr/update/DocumentBuilder.java | 41 +--
...aluesOrNoneFieldMutatingUpdateProcessor.java | 3 +-
.../processor/AtomicUpdateDocumentMerger.java | 8 +-
.../CloneFieldUpdateProcessorFactory.java | 3 +-
.../ConcatFieldUpdateProcessorFactory.java | 3 +-
.../CountFieldValuesUpdateProcessorFactory.java | 3 +-
.../FieldValueMutatingUpdateProcessor.java | 3 +-
.../FieldValueSubsetUpdateProcessorFactory.java | 3 +-
.../PreAnalyzedUpdateProcessorFactory.java | 9 +-
.../org/apache/solr/BasicFunctionalityTest.java | 12 +-
.../org/apache/solr/TestDocumentBuilder.java | 1 -
.../solr/cloud/TestCloudDeleteByQuery.java | 2 +-
.../TestStressCloudBlindAtomicUpdates.java | 6 +-
.../cloud/TestTolerantUpdateProcessorCloud.java | 2 +-
.../org/apache/solr/handler/JsonLoaderTest.java | 5 +-
.../handler/XmlUpdateRequestHandlerTest.java | 7 -
.../solr/schema/AbstractCurrencyFieldTest.java | 2 +-
.../org/apache/solr/schema/DateFieldTest.java | 6 +-
.../org/apache/solr/schema/DocValuesTest.java | 2 +-
.../org/apache/solr/schema/PolyFieldTest.java | 8 +-
.../solr/schema/PreAnalyzedFieldTest.java | 10 +-
.../apache/solr/schema/SortableBinaryField.java | 6 +-
.../schema/TestSchemalessBufferedUpdates.java | 2 +-
.../solr/search/function/TestFunctionQuery.java | 1 -
.../apache/solr/update/DocumentBuilderTest.java | 317 +------------------
.../CloneFieldUpdateProcessorFactoryTest.java | 14 +-
.../DefaultValueUpdateProcessorTest.java | 7 +-
.../FieldMutatingUpdateProcessorTest.java | 14 +-
.../processor/TolerantUpdateProcessorTest.java | 16 +-
.../UUIDUpdateProcessorFallbackTest.java | 3 +-
.../processor/UpdateProcessorTestBase.java | 7 +-
.../solrj/beans/DocumentObjectBinder.java | 4 +-
.../request/JavaBinUpdateRequestCodec.java | 29 +-
.../solr/client/solrj/util/ClientUtils.java | 32 +-
.../apache/solr/common/SolrInputDocument.java | 53 +---
.../org/apache/solr/common/SolrInputField.java | 28 +-
.../apache/solr/common/util/JavaBinCodec.java | 34 +-
.../solr/client/solrj/SolrExampleTests.java | 76 ++---
.../solr/client/solrj/SolrExampleTestsBase.java | 14 +-
.../solrj/request/TestUpdateRequestCodec.java | 20 +-
.../apache/solr/common/SolrDocumentTest.java | 6 +-
.../java/org/apache/solr/SolrTestCaseJ4.java | 7 -
117 files changed, 438 insertions(+), 1296 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 5e468bf..a339760 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -37,6 +37,10 @@ API Changes
to be less trappy. See IndexReader.getReaderCacheHelper and
LeafReader.getCoreCacheHelper. (Adrien Grand)
+* LUCENE-6819: Index-time boosts are not supported anymore. As a replacement,
+ index-time scoring factors should be indexed into a doc value field and
+ combined at query time using eg. FunctionScoreQuery. (Adrien Grand)
+
Bug Fixes
* LUCENE-7626: IndexWriter will no longer accept broken token offsets
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/MIGRATE.txt
----------------------------------------------------------------------
diff --git a/lucene/MIGRATE.txt b/lucene/MIGRATE.txt
index 51f6435..1ccf56f 100644
--- a/lucene/MIGRATE.txt
+++ b/lucene/MIGRATE.txt
@@ -55,3 +55,9 @@ order to be less trappy. You should now use IndexReader.getReaderCacheHelper()
to have manage caches that take deleted docs and doc values updates into
account, and LeafReader.getCoreCacheHelper() to manage per-segment caches that
do not take deleted docs and doc values updates into account.
+
+## Index-time boosts removal (LUCENE-6819)
+
+Index-time boosts are not supported anymore. As a replacement, index-time
+scoring factors should be indexed in a doc value field and combined with the
+score at query time using FunctionScoreQuery for instance.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyField.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyField.java b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyField.java
index 65ff096..92d1dd6 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyField.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyField.java
@@ -33,7 +33,6 @@ public class TestLegacyField extends LuceneTestCase {
};
for (Field field : fields) {
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -57,7 +56,6 @@ public class TestLegacyField extends LuceneTestCase {
};
for (Field field : fields) {
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -81,7 +79,6 @@ public class TestLegacyField extends LuceneTestCase {
};
for (Field field : fields) {
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -105,7 +102,6 @@ public class TestLegacyField extends LuceneTestCase {
};
for (Field field : fields) {
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -187,10 +183,4 @@ public class TestLegacyField extends LuceneTestCase {
f.setTokenStream(new CannedTokenStream(new Token("foo", 0, 3)));
});
}
-
- private void trySetBoost(Field f) {
- expectThrows(IllegalArgumentException.class, () -> {
- f.setBoost(5.0f);
- });
- }
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsWriter.java
index 0540f4f..39ade42 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsWriter.java
@@ -249,11 +249,6 @@ public abstract class StoredFieldsWriter implements Closeable {
public Reader readerValue() {
return null;
}
-
- @Override
- public float boost() {
- return 1F;
- }
@Override
public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/java/org/apache/lucene/document/Field.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/Field.java b/lucene/core/src/java/org/apache/lucene/document/Field.java
index fa3f5a7..cbb559a 100644
--- a/lucene/core/src/java/org/apache/lucene/document/Field.java
+++ b/lucene/core/src/java/org/apache/lucene/document/Field.java
@@ -81,12 +81,6 @@ public class Field implements IndexableField {
protected TokenStream tokenStream;
/**
- * Field's boost
- * @see #boost()
- */
- protected float boost = 1.0f;
-
- /**
* Expert: creates a field with no initial value.
* Intended only for custom Field subclasses.
* @param name field name
@@ -432,32 +426,6 @@ public class Field implements IndexableField {
public String name() {
return name;
}
-
- /**
- * {@inheritDoc}
- * <p>
- * The default value is <code>1.0f</code> (no boost).
- * @see #setBoost(float)
- */
- @Override
- public float boost() {
- return boost;
- }
-
- /**
- * Sets the boost factor on this field.
- * @throws IllegalArgumentException if this field is not indexed,
- * or if it omits norms.
- * @see #boost()
- */
- public void setBoost(float boost) {
- if (boost != 1.0f) {
- if (type.indexOptions() == IndexOptions.NONE || type.omitNorms()) {
- throw new IllegalArgumentException("You cannot set an index-time boost on an unindexed field, or one that omits norms");
- }
- }
- this.boost = boost;
- }
@Override
public Number numericValue() {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java b/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java
index b118c13..ba65629 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java
@@ -426,12 +426,6 @@ final class DefaultIndexingChain extends DocConsumer {
// Invert indexed fields:
if (fieldType.indexOptions() != IndexOptions.NONE) {
-
- // if the field omits norms, the boost cannot be indexed.
- if (fieldType.omitNorms() && field.boost() != 1.0f) {
- throw new UnsupportedOperationException("You cannot set an index-time boost: norms are omitted for field '" + field.name() + "'");
- }
-
fp = getOrAddField(fieldName, fieldType, true);
boolean first = fp.fieldGen != fieldGen;
fp.invert(field, first);
@@ -823,8 +817,6 @@ final class DefaultIndexingChain extends DocConsumer {
invertState.position += docState.analyzer.getPositionIncrementGap(fieldInfo.name);
invertState.offset += docState.analyzer.getOffsetGap(fieldInfo.name);
}
-
- invertState.boost *= field.boost();
}
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/java/org/apache/lucene/index/FieldInvertState.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/FieldInvertState.java b/lucene/core/src/java/org/apache/lucene/index/FieldInvertState.java
index f7eef96..1da02b2 100644
--- a/lucene/core/src/java/org/apache/lucene/index/FieldInvertState.java
+++ b/lucene/core/src/java/org/apache/lucene/index/FieldInvertState.java
@@ -38,7 +38,6 @@ public final class FieldInvertState {
int offset;
int maxTermFrequency;
int uniqueTermCount;
- float boost;
// we must track these across field instances (multi-valued case)
int lastStartOffset = 0;
int lastPosition = 0;
@@ -57,13 +56,12 @@ public final class FieldInvertState {
/** Creates {code FieldInvertState} for the specified
* field name and values for all fields. */
- public FieldInvertState(String name, int position, int length, int numOverlap, int offset, float boost) {
+ public FieldInvertState(String name, int position, int length, int numOverlap, int offset) {
this.name = name;
this.position = position;
this.length = length;
this.numOverlap = numOverlap;
this.offset = offset;
- this.boost = boost;
}
/**
@@ -76,7 +74,6 @@ public final class FieldInvertState {
offset = 0;
maxTermFrequency = 0;
uniqueTermCount = 0;
- boost = 1.0f;
lastStartOffset = 0;
lastPosition = 0;
}
@@ -139,21 +136,6 @@ public final class FieldInvertState {
}
/**
- * Get boost value. This is the cumulative product of
- * document boost and field boost for all field instances
- * sharing the same field name.
- * @return the boost
- */
- public float getBoost() {
- return boost;
- }
-
- /** Set boost value. */
- public void setBoost(float boost) {
- this.boost = boost;
- }
-
- /**
* Get the maximum term-frequency encountered for any term in the field. A
* field containing "the quick brown fox jumps over the lazy dog" would have
* a value of 2, because "the" appears twice.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/java/org/apache/lucene/index/IndexableField.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexableField.java b/lucene/core/src/java/org/apache/lucene/index/IndexableField.java
index f6fc615..f08eab5 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexableField.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexableField.java
@@ -21,8 +21,6 @@ import java.io.Reader;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.search.similarities.ClassicSimilarity;
-import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.BytesRef;
// TODO: how to handle versioning here...?
@@ -57,28 +55,6 @@ public interface IndexableField {
*/
public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse);
- /**
- * Returns the field's index-time boost.
- * <p>
- * Only fields can have an index-time boost, if you want to simulate
- * a "document boost", then you must pre-multiply it across all the
- * relevant fields yourself.
- * <p>The boost is used to compute the norm factor for the field. By
- * default, in the {@link Similarity#computeNorm(FieldInvertState)} method,
- * the boost value is multiplied by the length normalization factor and then
- * rounded by {@link ClassicSimilarity#encodeNormValue(float)} before it is stored in the
- * index. One should attempt to ensure that this product does not overflow
- * the range of that encoding.
- * <p>
- * It is illegal to return a boost other than 1.0f for a field that is not
- * indexed ({@link IndexableFieldType#indexOptions()} is IndexOptions.NONE) or
- * omits normalization values ({@link IndexableFieldType#omitNorms()} returns true).
- *
- * @see Similarity#computeNorm(FieldInvertState)
- * @see ClassicSimilarity#encodeNormValue(float)
- */
- public float boost();
-
/** Non-null if this field has a binary value */
public BytesRef binaryValue();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/java/org/apache/lucene/index/SortingStoredFieldsConsumer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortingStoredFieldsConsumer.java b/lucene/core/src/java/org/apache/lucene/index/SortingStoredFieldsConsumer.java
index b3cc1f4..e5443b2 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortingStoredFieldsConsumer.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortingStoredFieldsConsumer.java
@@ -183,11 +183,6 @@ final class SortingStoredFieldsConsumer extends StoredFieldsConsumer {
}
@Override
- public float boost() {
- return 1F;
- }
-
- @Override
public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse) {
return null;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/java/org/apache/lucene/search/package-info.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/package-info.java b/lucene/core/src/java/org/apache/lucene/search/package-info.java
index fc0ab2e..c9d8e26 100644
--- a/lucene/core/src/java/org/apache/lucene/search/package-info.java
+++ b/lucene/core/src/java/org/apache/lucene/search/package-info.java
@@ -269,27 +269,8 @@
* Fields and the other in one Field may return different scores for the same query due to length
* normalization.
* <h3>Score Boosting</h3>
- * <p>Lucene allows influencing search results by "boosting" at different times:
- * <ul>
- * <li><b>Index-time boost</b> by calling
- * {@link org.apache.lucene.document.Field#setBoost(float) Field.setBoost()} before a document is
- * added to the index.</li>
- * <li><b>Query-time boost</b> by applying a boost to a query by wrapping with
- * {@link org.apache.lucene.search.BoostQuery}.</li>
- * </ul>
- * <p>Indexing time boosts are pre-processed for storage efficiency and written to
- * storage for a field as follows:
- * <ul>
- * <li>All boosts of that field (i.e. all boosts under the same field name in that doc) are
- * multiplied.</li>
- * <li>The boost is then encoded into a normalization value by the Similarity
- * object at index-time: {@link org.apache.lucene.search.similarities.Similarity#computeNorm computeNorm()}.
- * The actual encoding depends upon the Similarity implementation, but note that most
- * use a lossy encoding (such as multiplying the boost with document length or similar, packed
- * into a single byte!).</li>
- * <li>Decoding of any index-time normalization values and integration into the document's score is also performed
- * at search time by the Similarity.</li>
- * </ul>
+ * <p>Lucene allows influencing the score contribution of various parts of the query by wrapping with
+ * {@link org.apache.lucene.search.BoostQuery}.</p>
*
* <a name="changingScoring"></a>
* <h2>Changing Scoring — Similarity</h2>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java b/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
index 6763118..74978fd 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
@@ -96,12 +96,12 @@ public class BM25Similarity extends Similarity {
}
}
- /** The default implementation encodes <code>boost / sqrt(length)</code>
+ /** The default implementation encodes <code>1 / sqrt(length)</code>
* with {@link SmallFloat#floatToByte315(float)}. This is compatible with
- * Lucene's default implementation. If you change this, then you should
- * change {@link #decodeNormValue(byte)} to match. */
- protected byte encodeNormValue(float boost, int fieldLength) {
- return SmallFloat.floatToByte315(boost / (float) Math.sqrt(fieldLength));
+ * Lucene's historic implementation: {@link ClassicSimilarity}. If you
+ * change this, then you should change {@link #decodeNormValue(byte)} to match. */
+ protected byte encodeNormValue(int fieldLength) {
+ return SmallFloat.floatToByte315((float) (1 / Math.sqrt(fieldLength)));
}
/** The default implementation returns <code>1 / f<sup>2</sup></code>
@@ -146,7 +146,7 @@ public class BM25Similarity extends Similarity {
@Override
public final long computeNorm(FieldInvertState state) {
final int numTerms = discountOverlaps ? state.getLength() - state.getNumOverlap() : state.getLength();
- return encodeNormValue(state.getBoost(), numTerms);
+ return encodeNormValue(numTerms);
}
/**
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/java/org/apache/lucene/search/similarities/ClassicSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/ClassicSimilarity.java b/lucene/core/src/java/org/apache/lucene/search/similarities/ClassicSimilarity.java
index 5a1e237..f56575f 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/ClassicSimilarity.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/ClassicSimilarity.java
@@ -69,8 +69,7 @@ public class ClassicSimilarity extends TFIDFSimilarity {
* represent are rounded down to the largest representable value. Positive
* values too small to represent are rounded up to the smallest positive
* representable value.
- *
- * @see org.apache.lucene.document.Field#setBoost(float)
+ *
* @see org.apache.lucene.util.SmallFloat
*/
@Override
@@ -103,7 +102,7 @@ public class ClassicSimilarity extends TFIDFSimilarity {
numTerms = state.getLength() - state.getNumOverlap();
else
numTerms = state.getLength();
- return state.getBoost() * ((float) (1.0 / Math.sqrt(numTerms)));
+ return (float) (1.0 / Math.sqrt(numTerms));
}
/** Implemented as <code>sqrt(freq)</code>. */
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java b/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
index 925dc59..dbf8d45 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
@@ -235,19 +235,19 @@ public abstract class SimilarityBase extends Similarity {
numTerms = state.getLength() - state.getNumOverlap();
else
numTerms = state.getLength();
- return encodeNormValue(state.getBoost(), numTerms);
+ return encodeNormValue(numTerms);
}
/** Decodes a normalization factor (document length) stored in an index.
- * @see #encodeNormValue(float,float)
+ * @see #encodeNormValue(float)
*/
protected float decodeNormValue(byte norm) {
return NORM_TABLE[norm & 0xFF]; // & 0xFF maps negative bytes to positive above 127
}
/** Encodes the length to a byte via SmallFloat. */
- protected byte encodeNormValue(float boost, float length) {
- return SmallFloat.floatToByte315((boost / (float) Math.sqrt(length)));
+ protected byte encodeNormValue(float length) {
+ return SmallFloat.floatToByte315((float) (1 / Math.sqrt(length)));
}
// ----------------------------- Static methods ------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java b/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
index 12ab1a2..2246561 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
@@ -369,49 +369,9 @@ import org.apache.lucene.util.BytesRef;
*
* <li>
* <A NAME="formula_norm"></A>
- * <b><i>norm(t,d)</i></b> encapsulates a few (indexing time) boost and length factors:
- *
- * <ul>
- * <li><b>Field boost</b> - set by calling
- * {@link org.apache.lucene.document.Field#setBoost(float) field.setBoost()}
- * before adding the field to a document.
- * </li>
- * <li><b>lengthNorm</b> - computed
- * when the document is added to the index in accordance with the number of tokens
- * of this field in the document, so that shorter fields contribute more to the score.
- * LengthNorm is computed by the Similarity class in effect at indexing.
- * </li>
- * </ul>
- * The {@link #computeNorm} method is responsible for
- * combining all of these factors into a single float.
- *
- * <p>
- * When a document is added to the index, all the above factors are multiplied.
- * If the document has multiple fields with the same name, all their boosts are multiplied together:
- *
- * <br> <br>
- * <table cellpadding="1" cellspacing="0" border="0" style="width:auto; margin-left:auto; margin-right:auto" summary="index-time normalization">
- * <tr>
- * <td valign="middle" align="right" rowspan="1">
- * norm(t,d) =
- * lengthNorm
- * ·
- * </td>
- * <td valign="bottom" align="center" rowspan="1" style="text-align: center">
- * <big><big><big>∏</big></big></big>
- * </td>
- * <td valign="middle" align="right" rowspan="1">
- * {@link org.apache.lucene.index.IndexableField#boost() f.boost}()
- * </td>
- * </tr>
- * <tr valign="top">
- * <td></td>
- * <td align="center" style="text-align: center"><small>field <i><b>f</b></i> in <i>d</i> named as <i><b>t</b></i></small></td>
- * <td></td>
- * </tr>
- * </table>
- * Note that search time is too late to modify this <i>norm</i> part of scoring,
- * e.g. by using a different {@link Similarity} for search.
+ * <b><i>norm(t,d)</i></b> is an index-time boost factor that solely
+ * depends on the number of tokens of this field in the document, so
+ * that shorter fields contribute more to the score.
* </li>
* </ol>
*
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/test/org/apache/lucene/document/TestField.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/document/TestField.java b/lucene/core/src/test/org/apache/lucene/document/TestField.java
index 4ef7ffb..c087505 100644
--- a/lucene/core/src/test/org/apache/lucene/document/TestField.java
+++ b/lucene/core/src/test/org/apache/lucene/document/TestField.java
@@ -38,7 +38,6 @@ public class TestField extends LuceneTestCase {
public void testDoublePoint() throws Exception {
Field field = new DoublePoint("foo", 5d);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -58,7 +57,6 @@ public class TestField extends LuceneTestCase {
public void testDoublePoint2D() throws Exception {
DoublePoint field = new DoublePoint("foo", 5d, 4d);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -84,7 +82,6 @@ public class TestField extends LuceneTestCase {
public void testDoubleDocValuesField() throws Exception {
DoubleDocValuesField field = new DoubleDocValuesField("foo", 5d);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -103,7 +100,6 @@ public class TestField extends LuceneTestCase {
public void testFloatDocValuesField() throws Exception {
FloatDocValuesField field = new FloatDocValuesField("foo", 5f);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -122,7 +118,6 @@ public class TestField extends LuceneTestCase {
public void testFloatPoint() throws Exception {
Field field = new FloatPoint("foo", 5f);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -142,7 +137,6 @@ public class TestField extends LuceneTestCase {
public void testFloatPoint2D() throws Exception {
FloatPoint field = new FloatPoint("foo", 5f, 4f);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -166,7 +160,6 @@ public class TestField extends LuceneTestCase {
public void testIntPoint() throws Exception {
Field field = new IntPoint("foo", 5);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -186,7 +179,6 @@ public class TestField extends LuceneTestCase {
public void testIntPoint2D() throws Exception {
IntPoint field = new IntPoint("foo", 5, 4);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -210,7 +202,6 @@ public class TestField extends LuceneTestCase {
public void testNumericDocValuesField() throws Exception {
NumericDocValuesField field = new NumericDocValuesField("foo", 5L);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -229,7 +220,6 @@ public class TestField extends LuceneTestCase {
public void testLongPoint() throws Exception {
Field field = new LongPoint("foo", 5);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -249,7 +239,6 @@ public class TestField extends LuceneTestCase {
public void testLongPoint2D() throws Exception {
LongPoint field = new LongPoint("foo", 5, 4);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -273,7 +262,6 @@ public class TestField extends LuceneTestCase {
public void testSortedBytesDocValuesField() throws Exception {
SortedDocValuesField field = new SortedDocValuesField("foo", new BytesRef("bar"));
- trySetBoost(field);
trySetByteValue(field);
field.setBytesValue("fubar".getBytes(StandardCharsets.UTF_8));
field.setBytesValue(new BytesRef("baz"));
@@ -292,7 +280,6 @@ public class TestField extends LuceneTestCase {
public void testBinaryDocValuesField() throws Exception {
BinaryDocValuesField field = new BinaryDocValuesField("foo", new BytesRef("bar"));
- trySetBoost(field);
trySetByteValue(field);
field.setBytesValue("fubar".getBytes(StandardCharsets.UTF_8));
field.setBytesValue(new BytesRef("baz"));
@@ -315,7 +302,6 @@ public class TestField extends LuceneTestCase {
};
for (Field field : fields) {
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -339,7 +325,6 @@ public class TestField extends LuceneTestCase {
};
for (Field field : fields) {
- field.setBoost(5f);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -353,14 +338,12 @@ public class TestField extends LuceneTestCase {
field.setTokenStream(new CannedTokenStream(new Token("foo", 0, 3)));
assertEquals("baz", field.stringValue());
- assertEquals(5f, field.boost(), 0f);
}
}
public void testTextFieldReader() throws Exception {
Field field = new TextField("foo", new StringReader("bar"));
- field.setBoost(5f);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -374,7 +357,6 @@ public class TestField extends LuceneTestCase {
field.setTokenStream(new CannedTokenStream(new Token("foo", 0, 3)));
assertNotNull(field.readerValue());
- assertEquals(5f, field.boost(), 0f);
}
/* TODO: this is pretty expert and crazy
@@ -391,7 +373,6 @@ public class TestField extends LuceneTestCase {
};
for (Field field : fields) {
- trySetBoost(field);
trySetByteValue(field);
field.setBytesValue("baz".getBytes(StandardCharsets.UTF_8));
field.setBytesValue(new BytesRef("baz"));
@@ -410,7 +391,6 @@ public class TestField extends LuceneTestCase {
public void testStoredFieldString() throws Exception {
Field field = new StoredField("foo", "bar");
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -428,7 +408,6 @@ public class TestField extends LuceneTestCase {
public void testStoredFieldInt() throws Exception {
Field field = new StoredField("foo", 1);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -446,7 +425,6 @@ public class TestField extends LuceneTestCase {
public void testStoredFieldDouble() throws Exception {
Field field = new StoredField("foo", 1D);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -464,7 +442,6 @@ public class TestField extends LuceneTestCase {
public void testStoredFieldFloat() throws Exception {
Field field = new StoredField("foo", 1F);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -482,7 +459,6 @@ public class TestField extends LuceneTestCase {
public void testStoredFieldLong() throws Exception {
Field field = new StoredField("foo", 1L);
- trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
@@ -586,10 +562,4 @@ public class TestField extends LuceneTestCase {
});
}
- private void trySetBoost(Field f) {
- expectThrows(IllegalArgumentException.class, () -> {
- f.setBoost(5.0f);
- });
- }
-
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java b/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java
index 65310cf..f193140 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java
@@ -17,6 +17,8 @@
package org.apache.lucene.index;
import java.io.IOException;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
@@ -53,9 +55,9 @@ public class TestCustomNorms extends LuceneTestCase {
int num = atLeast(100);
for (int i = 0; i < num; i++) {
Document doc = docs.nextDoc();
- float nextFloat = random().nextFloat();
- Field f = new TextField(floatTestField, "" + nextFloat, Field.Store.YES);
- f.setBoost(nextFloat);
+ int boost = TestUtil.nextInt(random(), 1, 10);
+ String value = IntStream.range(0, boost).mapToObj(k -> Integer.toString(boost)).collect(Collectors.joining(" "));
+ Field f = new TextField(floatTestField, value, Field.Store.YES);
doc.add(f);
writer.addDocument(doc);
@@ -71,9 +73,9 @@ public class TestCustomNorms extends LuceneTestCase {
assertNotNull(norms);
for (int i = 0; i < open.maxDoc(); i++) {
Document document = open.document(i);
- float expected = Float.parseFloat(document.get(floatTestField));
+ int expected = Integer.parseInt(document.get(floatTestField).split(" ")[0]);
assertEquals(i, norms.nextDoc());
- assertEquals(expected, Float.intBitsToFloat((int)norms.longValue()), 0.0f);
+ assertEquals(expected, norms.longValue());
}
open.close();
dir.close();
@@ -97,7 +99,7 @@ public class TestCustomNorms extends LuceneTestCase {
@Override
public long computeNorm(FieldInvertState state) {
- return Float.floatToIntBits(state.getBoost());
+ return state.getLength();
}
@Override
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/test/org/apache/lucene/index/TestFieldReuse.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestFieldReuse.java b/lucene/core/src/test/org/apache/lucene/index/TestFieldReuse.java
index 977df3d..c5552a4 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestFieldReuse.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestFieldReuse.java
@@ -86,11 +86,6 @@ public class TestFieldReuse extends BaseTokenStreamTestCase {
lastSeen = reuse;
return lastReturned = new CannedTokenStream(new Token("unimportant", 0, 10));
}
-
- @Override
- public float boost() {
- return 1;
- }
@Override
public BytesRef binaryValue() {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
index c2b180a..4275056 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
@@ -29,6 +29,8 @@ import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
@@ -1939,7 +1941,7 @@ public class TestIndexSorting extends LuceneTestCase {
@Override
public long computeNorm(FieldInvertState state) {
if (state.getName().equals("norms")) {
- return Float.floatToIntBits(state.getBoost());
+ return state.getLength();
} else {
return in.computeNorm(state);
}
@@ -2021,8 +2023,8 @@ public class TestIndexSorting extends LuceneTestCase {
positions.setId(id);
doc.add(new Field("positions", positions, POSITIONS_TYPE));
doc.add(new NumericDocValuesField("numeric", id));
- TextField norms = new TextField("norms", Integer.toString(id), Store.NO);
- norms.setBoost(Float.intBitsToFloat(id));
+ String value = IntStream.range(0, id).mapToObj(k -> Integer.toString(id)).collect(Collectors.joining(" "));
+ TextField norms = new TextField("norms", value, Store.NO);
doc.add(norms);
doc.add(new BinaryDocValuesField("binary", new BytesRef(Integer.toString(id))));
doc.add(new SortedDocValuesField("sorted", new BytesRef(Integer.toString(id))));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
index 6fdfca8..987852f 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
@@ -19,7 +19,6 @@ package org.apache.lucene.index;
import java.io.FileNotFoundException;
import java.io.IOException;
-import java.io.Reader;
import java.io.StringReader;
import java.nio.file.NoSuchFileException;
import java.util.ArrayList;
@@ -1687,71 +1686,6 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
uoe.doFail = false;
d.close();
}
-
- public void testBoostOmitNorms() throws Exception {
- Directory dir = newDirectory();
- IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
- iwc.setMergePolicy(newLogMergePolicy());
- IndexWriter iw = new IndexWriter(dir, iwc);
- Document doc = new Document();
- doc.add(new StringField("field1", "sometext", Field.Store.YES));
- doc.add(new TextField("field2", "sometext", Field.Store.NO));
- doc.add(new StringField("foo", "bar", Field.Store.NO));
- iw.addDocument(doc); // add an 'ok' document
- expectThrows(UnsupportedOperationException.class, () -> {
- // try to boost with norms omitted
- List<IndexableField> list = new ArrayList<>();
- list.add(new IndexableField() {
- @Override
- public String name() {
- return "foo";
- }
-
- @Override
- public IndexableFieldType fieldType() {
- return StringField.TYPE_NOT_STORED;
- }
-
- @Override
- public float boost() {
- return 5f;
- }
-
- @Override
- public BytesRef binaryValue() {
- return null;
- }
-
- @Override
- public String stringValue() {
- return "baz";
- }
-
- @Override
- public Reader readerValue() {
- return null;
- }
-
- @Override
- public Number numericValue() {
- return null;
- }
-
- @Override
- public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse) {
- return null;
- }
- });
- iw.addDocument(list);
- });
-
- DirectoryReader ir = DirectoryReader.open(iw);
- assertEquals(1, ir.numDocs());
- assertEquals("sometext", ir.document(0).get("field1"));
- ir.close();
- iw.close();
- dir.close();
- }
// See LUCENE-4870 TooManyOpenFiles errors are thrown as
// FNFExceptions which can trigger data loss.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/test/org/apache/lucene/index/TestIndexableField.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexableField.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexableField.java
index 67edab9..e60a3e1 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexableField.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexableField.java
@@ -111,11 +111,6 @@ public class TestIndexableField extends LuceneTestCase {
}
@Override
- public float boost() {
- return 1.0f + random().nextFloat();
- }
-
- @Override
public BytesRef binaryValue() {
if ((counter%10) == 3) {
final byte[] bytes = new byte[10];
@@ -339,11 +334,6 @@ public class TestIndexableField extends LuceneTestCase {
}
@Override
- public float boost() {
- return 1.0f;
- }
-
- @Override
public Number numericValue() {
return null;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/test/org/apache/lucene/index/TestNorms.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestNorms.java b/lucene/core/src/test/org/apache/lucene/index/TestNorms.java
index 52038bc..363f57e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestNorms.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestNorms.java
@@ -19,6 +19,8 @@ package org.apache.lucene.index;
import java.io.IOException;
import java.util.Random;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
@@ -117,7 +119,7 @@ public class TestNorms extends LuceneTestCase {
assertNotNull(normValues);
for (int i = 0; i < open.maxDoc(); i++) {
Document document = open.document(i);
- int expected = Integer.parseInt(document.get(byteTestField));
+ int expected = Integer.parseInt(document.get(byteTestField).split(" ")[0]);
assertEquals(i, normValues.nextDoc());
assertEquals(expected, normValues.longValue());
}
@@ -139,9 +141,9 @@ public class TestNorms extends LuceneTestCase {
int num = atLeast(100);
for (int i = 0; i < num; i++) {
Document doc = docs.nextDoc();
- int boost = random().nextInt(255);
- Field f = new TextField(byteTestField, "" + boost, Field.Store.YES);
- f.setBoost(boost);
+ int boost = TestUtil.nextInt(random, 1, 255);
+ String value = IntStream.range(0, boost).mapToObj(k -> Integer.toString(boost)).collect(Collectors.joining(" "));
+ Field f = new TextField(byteTestField, value, Field.Store.YES);
doc.add(f);
writer.addDocument(doc);
doc.removeField(byteTestField);
@@ -173,8 +175,7 @@ public class TestNorms extends LuceneTestCase {
@Override
public long computeNorm(FieldInvertState state) {
- int boost = (int) state.getBoost();
- return (0xFF & boost);
+ return state.getLength();
}
@Override
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java b/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
index 3a13d9b..0deafdd 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
@@ -46,7 +46,7 @@ public class TestOmitTf extends LuceneTestCase {
public static class SimpleSimilarity extends TFIDFSimilarity {
@Override public float decodeNormValue(long norm) { return norm; }
@Override public long encodeNormValue(float f) { return (long) f; }
- @Override public float lengthNorm(FieldInvertState state) { return state.getBoost(); }
+ @Override public float lengthNorm(FieldInvertState state) { return 1; }
@Override public float tf(float freq) { return freq; }
@Override public float sloppyFreq(int distance) { return 2.0f; }
@Override public float idf(long docFreq, long docCount) { return 1.0f; }
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java
index 87046c7..e20163a 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java
@@ -74,7 +74,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
@Override
public float lengthNorm(FieldInvertState state) {
// Disable length norm
- return state.getBoost();
+ return 1;
}
@Override
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java b/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java
deleted file mode 100644
index ecc4645..0000000
--- a/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search;
-
-
-import java.io.IOException;
-
-import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.*;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.LuceneTestCase;
-
-/** Document boost unit test.
- *
- *
- */
-public class TestDocBoost extends LuceneTestCase {
-
- public void testDocBoost() throws Exception {
- Directory store = newDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(random(), store, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
-
- Field f1 = newTextField("field", "word", Field.Store.YES);
- Field f2 = newTextField("field", "word", Field.Store.YES);
- f2.setBoost(2.0f);
-
- Document d1 = new Document();
- Document d2 = new Document();
-
- d1.add(f1); // boost = 1
- d2.add(f2); // boost = 2
-
- writer.addDocument(d1);
- writer.addDocument(d2);
-
- IndexReader reader = writer.getReader();
- writer.close();
-
- final float[] scores = new float[4];
-
- IndexSearcher searcher = newSearcher(reader);
- searcher.search
- (new TermQuery(new Term("field", "word")),
- new SimpleCollector() {
- private int base = 0;
- private Scorer scorer;
- @Override
- public void setScorer(Scorer scorer) {
- this.scorer = scorer;
- }
- @Override
- public final void collect(int doc) throws IOException {
- scores[doc + base] = scorer.score();
- }
- @Override
- protected void doSetNextReader(LeafReaderContext context) throws IOException {
- base = context.docBase;
- }
- @Override
- public boolean needsScores() {
- return true;
- }
- });
-
- float lastScore = 0.0f;
-
- for (int i = 0; i < 2; i++) {
- if (VERBOSE) {
- System.out.println(searcher.explain(new TermQuery(new Term("field", "word")), i));
- }
- if (scores[i] != 0.0) {
- assertTrue("score: " + scores[i] + " should be > lastScore: " + lastScore, scores[i] > lastScore);
- }
- lastScore = scores[i];
- }
-
- reader.close();
- store.close();
- }
-}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java
index 8e9fb28..ee02eed 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java
@@ -45,9 +45,9 @@ public class TestMatchAllDocsQuery extends LuceneTestCase {
public void testQuery() throws Exception {
Directory dir = newDirectory();
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(analyzer).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
- addDoc("one", iw, 1f);
- addDoc("two", iw, 20f);
- addDoc("three four", iw, 300f);
+ addDoc("one", iw);
+ addDoc("two", iw);
+ addDoc("three four", iw);
IndexReader ir = DirectoryReader.open(iw);
IndexSearcher is = newSearcher(ir);
@@ -92,10 +92,9 @@ public class TestMatchAllDocsQuery extends LuceneTestCase {
assertTrue(q1.equals(q2));
}
- private void addDoc(String text, IndexWriter iw, float boost) throws IOException {
+ private void addDoc(String text, IndexWriter iw) throws IOException {
Document doc = new Document();
Field f = newTextField("key", text, Field.Store.YES);
- f.setBoost(boost);
doc.add(f);
iw.addDocument(doc);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/test/org/apache/lucene/search/TestSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSimilarity.java b/lucene/core/src/test/org/apache/lucene/search/TestSimilarity.java
index 966e5da..3faa5c2 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSimilarity.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSimilarity.java
@@ -39,7 +39,7 @@ import org.apache.lucene.document.Document;
public class TestSimilarity extends LuceneTestCase {
public static class SimpleSimilarity extends ClassicSimilarity {
- @Override public float lengthNorm(FieldInvertState state) { return state.getBoost(); }
+ @Override public float lengthNorm(FieldInvertState state) { return 1; }
@Override public float tf(float freq) { return freq; }
@Override public float sloppyFreq(int distance) { return 2.0f; }
@Override public float idf(long docFreq, long docCount) { return 1.0f; }
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/test/org/apache/lucene/search/similarities/TestBooleanSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/similarities/TestBooleanSimilarity.java b/lucene/core/src/test/org/apache/lucene/search/similarities/TestBooleanSimilarity.java
index 15b1448..23f65d3 100644
--- a/lucene/core/src/test/org/apache/lucene/search/similarities/TestBooleanSimilarity.java
+++ b/lucene/core/src/test/org/apache/lucene/search/similarities/TestBooleanSimilarity.java
@@ -106,8 +106,7 @@ public class TestBooleanSimilarity extends LuceneTestCase {
final int length = TestUtil.nextInt(random(), 1, 100);
final int position = random().nextInt(length);
final int numOverlaps = random().nextInt(50);
- final float boost = random().nextFloat() * 10;
- FieldInvertState state = new FieldInvertState("foo", position, length, numOverlaps, 100, boost);
+ FieldInvertState state = new FieldInvertState("foo", position, length, numOverlaps, 100);
assertEquals(
sim2.computeNorm(state),
sim1.computeNorm(state),
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/core/src/test/org/apache/lucene/search/similarities/TestSimilarityBase.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/similarities/TestSimilarityBase.java b/lucene/core/src/test/org/apache/lucene/search/similarities/TestSimilarityBase.java
index aec55de..373b9e6 100644
--- a/lucene/core/src/test/org/apache/lucene/search/similarities/TestSimilarityBase.java
+++ b/lucene/core/src/test/org/apache/lucene/search/similarities/TestSimilarityBase.java
@@ -593,7 +593,6 @@ public class TestSimilarityBase extends LuceneTestCase {
FieldInvertState state = new FieldInvertState("foo");
state.setLength(5);
state.setNumOverlap(2);
- state.setBoost(3);
assertEquals(expected.computeNorm(state), actual.computeNorm(state));
expected.setDiscountOverlaps(true);
actual.setDiscountOverlaps(true);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
----------------------------------------------------------------------
diff --git a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
index 32b2d24..0c8ea6d 100644
--- a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
+++ b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
@@ -275,7 +275,7 @@ public class MemoryIndex {
throw new IllegalArgumentException("analyzer must not be null");
TokenStream stream = analyzer.tokenStream(fieldName, text);
- storeTerms(getInfo(fieldName, defaultFieldType), stream, 1.0f,
+ storeTerms(getInfo(fieldName, defaultFieldType), stream,
analyzer.getPositionIncrementGap(fieldName), analyzer.getOffsetGap(fieldName));
}
@@ -358,31 +358,7 @@ public class MemoryIndex {
}
};
}
-
- /**
- * Equivalent to <code>addField(fieldName, stream, 1.0f)</code>.
- *
- * @param fieldName
- * a name to be associated with the text
- * @param stream
- * the token stream to retrieve tokens from
- */
- public void addField(String fieldName, TokenStream stream) {
- addField(fieldName, stream, 1.0f);
- }
- /**
- * Adds a lucene {@link IndexableField} to the MemoryIndex using the provided analyzer.
- * Also stores doc values based on {@link IndexableFieldType#docValuesType()} if set.
- *
- * @param field the field to add
- * @param analyzer the analyzer to use for term analysis
- * @throws IllegalArgumentException if the field is a DocValues or Point field, as these
- * structures are not supported by MemoryIndex
- */
- public void addField(IndexableField field, Analyzer analyzer) {
- addField(field, analyzer, 1.0f);
- }
/**
* Adds a lucene {@link IndexableField} to the MemoryIndex using the provided analyzer.
@@ -390,9 +366,8 @@ public class MemoryIndex {
*
* @param field the field to add
* @param analyzer the analyzer to use for term analysis
- * @param boost a field boost
*/
- public void addField(IndexableField field, Analyzer analyzer, float boost) {
+ public void addField(IndexableField field, Analyzer analyzer) {
Info info = getInfo(field.name(), field.fieldType());
@@ -409,7 +384,7 @@ public class MemoryIndex {
positionIncrementGap = 0;
}
if (tokenStream != null) {
- storeTerms(info, tokenStream, boost, positionIncrementGap, offsetGap);
+ storeTerms(info, tokenStream, positionIncrementGap, offsetGap);
}
DocValuesType docValuesType = field.fieldType().docValuesType();
@@ -451,13 +426,9 @@ public class MemoryIndex {
* a name to be associated with the text
* @param stream
* the token stream to retrieve tokens from.
- * @param boost
- * the boost factor for hits for this field
- *
- * @see org.apache.lucene.document.Field#setBoost(float)
*/
- public void addField(String fieldName, TokenStream stream, float boost) {
- addField(fieldName, stream, boost, 0);
+ public void addField(String fieldName, TokenStream stream) {
+ addField(fieldName, stream, 0);
}
@@ -472,17 +443,13 @@ public class MemoryIndex {
* a name to be associated with the text
* @param stream
* the token stream to retrieve tokens from.
- * @param boost
- * the boost factor for hits for this field
*
* @param positionIncrementGap
* the position increment gap if fields with the same name are added more than once
*
- *
- * @see org.apache.lucene.document.Field#setBoost(float)
*/
- public void addField(String fieldName, TokenStream stream, float boost, int positionIncrementGap) {
- addField(fieldName, stream, boost, positionIncrementGap, 1);
+ public void addField(String fieldName, TokenStream stream, int positionIncrementGap) {
+ addField(fieldName, stream, positionIncrementGap, 1);
}
/**
@@ -497,17 +464,14 @@ public class MemoryIndex {
* a name to be associated with the text
* @param tokenStream
* the token stream to retrieve tokens from. It's guaranteed to be closed no matter what.
- * @param boost
- * the boost factor for hits for this field
* @param positionIncrementGap
* the position increment gap if fields with the same name are added more than once
* @param offsetGap
* the offset gap if fields with the same name are added more than once
- * @see org.apache.lucene.document.Field#setBoost(float)
*/
- public void addField(String fieldName, TokenStream tokenStream, float boost, int positionIncrementGap, int offsetGap) {
+ public void addField(String fieldName, TokenStream tokenStream, int positionIncrementGap, int offsetGap) {
Info info = getInfo(fieldName, defaultFieldType);
- storeTerms(info, tokenStream, boost, positionIncrementGap, offsetGap);
+ storeTerms(info, tokenStream, positionIncrementGap, offsetGap);
}
private Info getInfo(String fieldName, IndexableFieldType fieldType) {
@@ -600,20 +564,13 @@ public class MemoryIndex {
}
}
- private void storeTerms(Info info, TokenStream tokenStream, float boost, int positionIncrementGap, int offsetGap) {
-
- if (boost <= 0.0f) {
- throw new IllegalArgumentException("boost factor must be greater than 0.0");
- }
+ private void storeTerms(Info info, TokenStream tokenStream, int positionIncrementGap, int offsetGap) {
int pos = -1;
int offset = 0;
- if (info.numTokens == 0) {
- info.boost = boost;
- } else if (info.numTokens > 0) {
+ if (info.numTokens > 0) {
pos = info.lastPosition + positionIncrementGap;
offset = info.lastOffset + offsetGap;
- info.boost *= boost;
}
try (TokenStream stream = tokenStream) {
@@ -846,9 +803,6 @@ public class MemoryIndex {
/** Number of overlapping tokens for this field */
private int numOverlapTokens;
-
- /** Boost factor for hits for this field */
- private float boost;
private long sumTotalTermFreq;
@@ -939,7 +893,7 @@ public class MemoryIndex {
NumericDocValues getNormDocValues() {
if (norm == null) {
FieldInvertState invertState = new FieldInvertState(fieldInfo.name, fieldInfo.number,
- numTokens, numOverlapTokens, 0, boost);
+ numTokens, numOverlapTokens, 0);
final long value = normSimilarity.computeNorm(invertState);
if (DEBUG) System.err.println("MemoryIndexReader.norms: " + fieldInfo.name + ":" + value + ":" + numTokens);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndex.java
----------------------------------------------------------------------
diff --git a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndex.java b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndex.java
index 4e2189c..f34f30c 100644
--- a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndex.java
+++ b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndex.java
@@ -422,11 +422,11 @@ public class TestMemoryIndex extends LuceneTestCase {
}
}
- public void testPointValuesDoNotAffectBoostPositionsOrOffset() throws Exception {
+ public void testPointValuesDoNotAffectPositionsOrOffset() throws Exception {
MemoryIndex mi = new MemoryIndex(true, true);
- mi.addField(new TextField("text", "quick brown fox", Field.Store.NO), analyzer, 5f);
- mi.addField(new BinaryPoint("text", "quick".getBytes(StandardCharsets.UTF_8)), analyzer, 5f);
- mi.addField(new BinaryPoint("text", "brown".getBytes(StandardCharsets.UTF_8)), analyzer, 5f);
+ mi.addField(new TextField("text", "quick brown fox", Field.Store.NO), analyzer);
+ mi.addField(new BinaryPoint("text", "quick".getBytes(StandardCharsets.UTF_8)), analyzer);
+ mi.addField(new BinaryPoint("text", "brown".getBytes(StandardCharsets.UTF_8)), analyzer);
LeafReader leafReader = mi.createSearcher().getIndexReader().leaves().get(0).reader();
TermsEnum tenum = leafReader.terms("text").iterator();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
----------------------------------------------------------------------
diff --git a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
index 03c17a5..8a751b1 100644
--- a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
+++ b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
@@ -536,14 +536,13 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
MemoryIndex mi = new MemoryIndex(true, true);
MockAnalyzer mockAnalyzer = new MockAnalyzer(random());
- mi.addField(new BinaryDocValuesField("text", new BytesRef("quick brown fox")), mockAnalyzer, 5f);
- mi.addField(new TextField("text", "quick brown fox", Field.Store.NO), mockAnalyzer, 5f);
+ mi.addField(new BinaryDocValuesField("text", new BytesRef("quick brown fox")), mockAnalyzer);
+ mi.addField(new TextField("text", "quick brown fox", Field.Store.NO), mockAnalyzer);
LeafReader leafReader = mi.createSearcher().getIndexReader().leaves().get(0).reader();
Document doc = new Document();
doc.add(new BinaryDocValuesField("text", new BytesRef("quick brown fox")));
Field field = new TextField("text", "quick brown fox", Field.Store.NO);
- field.setBoost(5f);
doc.add(field);
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random(), mockAnalyzer));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/misc/src/java/org/apache/lucene/document/LazyDocument.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/java/org/apache/lucene/document/LazyDocument.java b/lucene/misc/src/java/org/apache/lucene/document/LazyDocument.java
index c1683a7..7fd8d98 100644
--- a/lucene/misc/src/java/org/apache/lucene/document/LazyDocument.java
+++ b/lucene/misc/src/java/org/apache/lucene/document/LazyDocument.java
@@ -162,11 +162,6 @@ public class LazyDocument {
}
@Override
- public float boost() {
- return 1.0f;
- }
-
- @Override
public BytesRef binaryValue() {
return getRealValue().binaryValue();
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/misc/src/java/org/apache/lucene/misc/SweetSpotSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/java/org/apache/lucene/misc/SweetSpotSimilarity.java b/lucene/misc/src/java/org/apache/lucene/misc/SweetSpotSimilarity.java
index 7eeeae0..9307b94 100644
--- a/lucene/misc/src/java/org/apache/lucene/misc/SweetSpotSimilarity.java
+++ b/lucene/misc/src/java/org/apache/lucene/misc/SweetSpotSimilarity.java
@@ -111,7 +111,7 @@ public class SweetSpotSimilarity extends ClassicSimilarity {
else
numTokens = state.getLength();
- return state.getBoost() * computeLengthNorm(numTokens);
+ return computeLengthNorm(numTokens);
}
/**
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/misc/src/test/org/apache/lucene/misc/SweetSpotSimilarityTest.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/misc/SweetSpotSimilarityTest.java b/lucene/misc/src/test/org/apache/lucene/misc/SweetSpotSimilarityTest.java
index b618654..cd6a819 100644
--- a/lucene/misc/src/test/org/apache/lucene/misc/SweetSpotSimilarityTest.java
+++ b/lucene/misc/src/test/org/apache/lucene/misc/SweetSpotSimilarityTest.java
@@ -47,7 +47,6 @@ public class SweetSpotSimilarityTest extends LuceneTestCase {
// base case, should degrade
FieldInvertState invertState = new FieldInvertState("bogus");
- invertState.setBoost(1.0f);
for (int i = 1; i < 1000; i++) {
invertState.setLength(i);
assertEquals("base case: i="+i,
@@ -108,7 +107,6 @@ public class SweetSpotSimilarityTest extends LuceneTestCase {
};
invertState = new FieldInvertState("foo");
- invertState.setBoost(1.0f);
for (int i = 3; i <=10; i++) {
invertState.setLength(i);
assertEquals("f: 3,10: spot i="+i,
@@ -129,7 +127,6 @@ public class SweetSpotSimilarityTest extends LuceneTestCase {
}
invertState = new FieldInvertState("bar");
- invertState.setBoost(1.0f);
for (int i = 8; i <=13; i++) {
invertState.setLength(i);
assertEquals("f: 8,13: spot i="+i,
@@ -139,7 +136,6 @@ public class SweetSpotSimilarityTest extends LuceneTestCase {
}
invertState = new FieldInvertState("yak");
- invertState.setBoost(1.0f);
for (int i = 6; i <=9; i++) {
invertState.setLength(i);
assertEquals("f: 6,9: spot i="+i,
@@ -149,7 +145,6 @@ public class SweetSpotSimilarityTest extends LuceneTestCase {
}
invertState = new FieldInvertState("bar");
- invertState.setBoost(1.0f);
for (int i = 13; i < 1000; i++) {
invertState.setLength(i-12);
final byte normD = computeAndGetNorm(d, invertState);
@@ -162,7 +157,6 @@ public class SweetSpotSimilarityTest extends LuceneTestCase {
}
invertState = new FieldInvertState("yak");
- invertState.setBoost(1.0f);
for (int i = 9; i < 1000; i++) {
invertState.setLength(i-8);
final byte normD = computeAndGetNorm(d, invertState);
@@ -179,11 +173,9 @@ public class SweetSpotSimilarityTest extends LuceneTestCase {
for (int i = 9; i < 1000; i++) {
invertState = new FieldInvertState("a");
- invertState.setBoost(1.0f);
invertState.setLength(i);
final byte normSS = computeAndGetNorm(sp, invertState);
invertState = new FieldInvertState("b");
- invertState.setBoost(1.0f);
invertState.setLength(i);
final byte normS = computeAndGetNorm(sp, invertState);
assertTrue("s: i="+i+" : a="+normSS+
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/queries/src/test/org/apache/lucene/queries/function/TestLongNormValueSource.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestLongNormValueSource.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestLongNormValueSource.java
index cfebc47..11060e5 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestLongNormValueSource.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestLongNormValueSource.java
@@ -135,7 +135,6 @@ class PreciseClassicSimilarity extends TFIDFSimilarity {
* values too small to represent are rounded up to the smallest positive
* representable value.
*
- * @see org.apache.lucene.document.Field#setBoost(float)
* @see org.apache.lucene.util.SmallFloat
*/
@Override
@@ -169,7 +168,7 @@ class PreciseClassicSimilarity extends TFIDFSimilarity {
} else {
numTerms = state.getLength();
}
- return state.getBoost() * ((float) (1.0 / Math.sqrt(numTerms)));
+ return (float) (1.0 / Math.sqrt(numTerms));
}
/** Implemented as <code>sqrt(freq)</code>. */
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadScoreQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadScoreQuery.java b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadScoreQuery.java
index 188ef61..2afec44 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadScoreQuery.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadScoreQuery.java
@@ -267,7 +267,7 @@ public class TestPayloadScoreQuery extends LuceneTestCase {
//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@Override
public float lengthNorm(FieldInvertState state) {
- return state.getBoost();
+ return 1;
}
@Override
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java
index 9cc7067..da46a50 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java
@@ -269,7 +269,7 @@ public class TestPayloadTermQuery extends LuceneTestCase {
//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@Override
public float lengthNorm(FieldInvertState state) {
- return state.getBoost();
+ return 1;
}
@Override
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/lucene/test-framework/src/java/org/apache/lucene/search/BaseExplanationTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/BaseExplanationTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/search/BaseExplanationTestCase.java
index 2ea87f0..24e86e1 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/BaseExplanationTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/BaseExplanationTestCase.java
@@ -85,7 +85,6 @@ public abstract class BaseExplanationTestCase extends LuceneTestCase {
doc.add(newStringField(KEY, ""+index, Field.Store.NO));
doc.add(new SortedDocValuesField(KEY, new BytesRef(""+index)));
Field f = newTextField(FIELD, docFields[index], Field.Store.NO);
- f.setBoost(index);
doc.add(f);
doc.add(newTextField(ALTFIELD, docFields[index], Field.Store.NO));
return doc;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index fa02c39..152b344 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -45,6 +45,11 @@ Upgrading from Solr 6.x
* Deprecated collection and configset methods on MiniSolrCloudCluster have been
removed
+* Index-time boosts are not supported anymore. If any boosts are provided, they
+ will be ignored by the indexing chain. As a replacement, index-time scoring
+ factors should be indexed in a separate field and combined with the query
+ score using a function query.
+
Bug Fixes
----------------------
* SOLR-9262: Connection and read timeouts are being ignored by UpdateShardHandler after SOLR-4509.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java b/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
index 5152768..7d9e1c7 100644
--- a/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
+++ b/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
@@ -284,10 +284,10 @@ public class ICUCollationField extends FieldType {
}
@Override
- public List<IndexableField> createFields(SchemaField field, Object value, float boost) {
+ public List<IndexableField> createFields(SchemaField field, Object value) {
if (field.hasDocValues()) {
List<IndexableField> fields = new ArrayList<>();
- fields.add(createField(field, value, boost));
+ fields.add(createField(field, value));
final BytesRef bytes = getCollationKey(field.getName(), value.toString());
if (field.multiValued()) {
fields.add(new SortedSetDocValuesField(field.getName(), bytes));
@@ -296,7 +296,7 @@ public class ICUCollationField extends FieldType {
}
return fields;
} else {
- return Collections.singletonList(createField(field, value, boost));
+ return Collections.singletonList(createField(field, value));
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
index c80d275..a3d4756 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
@@ -51,6 +51,7 @@ import java.util.concurrent.atomic.AtomicLong;
public class DocBuilder {
private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final AtomicBoolean WARNED_ABOUT_INDEX_TIME_BOOSTS = new AtomicBoolean();
private static final Date EPOCH = new Date(0);
public static final String DELETE_DOC_BY_ID = "$deleteDocById";
@@ -617,13 +618,12 @@ public class DocBuilder {
}
value = arow.get(DOC_BOOST);
if (value != null) {
- float value1 = 1.0f;
- if (value instanceof Number) {
- value1 = ((Number) value).floatValue();
+ String message = "Ignoring document boost: " + value + " as index-time boosts are not supported anymore";
+ if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) {
+ LOG.warn(message);
} else {
- value1 = Float.parseFloat(value.toString());
+ LOG.debug(message);
}
- doc.setDocumentBoost(value1);
}
value = arow.get(SKIP_DOC);
@@ -659,7 +659,7 @@ public class DocBuilder {
sf = config.getSchemaField(key);
}
if (sf != null) {
- addFieldToDoc(entry.getValue(), sf.getName(), 1.0f, sf.multiValued(), doc);
+ addFieldToDoc(entry.getValue(), sf.getName(), sf.multiValued(), doc);
}
//else do nothing. if we add it it may fail
} else {
@@ -679,7 +679,7 @@ public class DocBuilder {
}
}
if (toWrite) {
- addFieldToDoc(entry.getValue(), name, f.getBoost(), multiValued, doc);
+ addFieldToDoc(entry.getValue(), name, multiValued, doc);
}
}
}
@@ -687,30 +687,30 @@ public class DocBuilder {
}
}
- private void addFieldToDoc(Object value, String name, float boost, boolean multiValued, DocWrapper doc) {
+ private void addFieldToDoc(Object value, String name, boolean multiValued, DocWrapper doc) {
if (value instanceof Collection) {
Collection collection = (Collection) value;
if (multiValued) {
for (Object o : collection) {
if (o != null)
- doc.addField(name, o, boost);
+ doc.addField(name, o);
}
} else {
if (doc.getField(name) == null)
for (Object o : collection) {
if (o != null) {
- doc.addField(name, o, boost);
+ doc.addField(name, o);
break;
}
}
}
} else if (multiValued) {
if (value != null) {
- doc.addField(name, value, boost);
+ doc.addField(name, value);
}
} else {
if (doc.getField(name) == null && value != null)
- doc.addField(name, value, boost);
+ doc.addField(name, value);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/EntityField.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/EntityField.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/EntityField.java
index b61198d..2b28cb7 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/EntityField.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/EntityField.java
@@ -30,7 +30,6 @@ import org.w3c.dom.Element;
public class EntityField {
private final String column;
private final String name;
- private final float boost;
private final boolean toWrite;
private final boolean multiValued;
private final boolean dynamicName;
@@ -40,7 +39,6 @@ public class EntityField {
public EntityField(Builder b) {
this.column = b.column;
this.name = b.name;
- this.boost = b.boost;
this.toWrite = b.toWrite;
this.multiValued = b.multiValued;
this.dynamicName = b.dynamicName;
@@ -60,10 +58,6 @@ public class EntityField {
return column;
}
- public float getBoost() {
- return boost;
- }
-
public boolean isToWrite() {
return toWrite;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingParams.java
----------------------------------------------------------------------
diff --git a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingParams.java b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingParams.java
index 6b587f1..f7917bb 100644
--- a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingParams.java
+++ b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingParams.java
@@ -47,19 +47,6 @@ public interface ExtractingParams {
public static final String MAP_PREFIX = "fmap.";
/**
- * The boost value for the name of the field. The boost can be specified by a name mapping.
- * <p>
- * For example
- * <pre>
- * map.title=solr.title
- * boost.solr.title=2.5
- * </pre>
- * will boost the solr.title field for this document by 2.5
- *
- */
- public static final String BOOST_PREFIX = "boost.";
-
- /**
* Pass in literal values to be added to the document, as in
* <pre>
* literal.myField=Foo
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java
----------------------------------------------------------------------
diff --git a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java
index 7779451..9268f1c 100644
--- a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java
+++ b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java
@@ -252,15 +252,13 @@ public class SolrContentHandler extends DefaultHandler implements ExtractingPara
vals=null;
}
- float boost = getBoost(name);
-
if (fval != null) {
- document.addField(name, transformValue(fval, sf), boost);
+ document.addField(name, transformValue(fval, sf));
}
if (vals != null) {
for (String val : vals) {
- document.addField(name, transformValue(val, sf), boost);
+ document.addField(name, transformValue(val, sf));
}
}
@@ -336,17 +334,6 @@ public class SolrContentHandler extends DefaultHandler implements ExtractingPara
return result;
}
-
- /**
- * Get the value of any boost factor for the mapped name.
- *
- * @param name The name of the field to see if there is a boost specified
- * @return The boost value
- */
- protected float getBoost(String name) {
- return params.getFloat(BOOST_PREFIX + name, 1.0f);
- }
-
/**
* Get the name mapping
*
[30/50] [abbrv] lucene-solr:jira/solr-6736: Add 6.4.2 back compat
test indexes
Posted by is...@apache.org.
Add 6.4.2 back compat test indexes
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3a993396
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3a993396
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3a993396
Branch: refs/heads/jira/solr-6736
Commit: 3a9933960ec35e3083f261549dfed0e75fd8268c
Parents: 57e8543
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Wed Mar 8 01:21:45 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Wed Mar 8 01:21:45 2017 +0530
----------------------------------------------------------------------
.../lucene/index/TestBackwardsCompatibility.java | 4 +++-
.../org/apache/lucene/index/index.6.4.2-cfs.zip | Bin 0 -> 15856 bytes
.../org/apache/lucene/index/index.6.4.2-nocfs.zip | Bin 0 -> 15886 bytes
3 files changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3a993396/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
index 57ce52a..1dda6b6 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
@@ -295,7 +295,9 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
"6.4.0-cfs",
"6.4.0-nocfs",
"6.4.1-cfs",
- "6.4.1-nocfs"
+ "6.4.1-nocfs",
+ "6.4.2-cfs",
+ "6.4.2-nocfs"
};
final String[] unsupportedNames = {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3a993396/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-cfs.zip
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-cfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-cfs.zip
new file mode 100644
index 0000000..eee89f4
Binary files /dev/null and b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-cfs.zip differ
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3a993396/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-nocfs.zip
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-nocfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-nocfs.zip
new file mode 100644
index 0000000..d55a6f6
Binary files /dev/null and b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-nocfs.zip differ
[22/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-9986: Implement
DatePointField
Posted by is...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java b/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
index 5c8e361..8195c05 100644
--- a/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
+++ b/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
@@ -122,9 +122,15 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
);
// length of date math caused issues...
- assertJQ(req("q", "foo_dt:\"2013-03-08T00:46:15Z/DAY+000MILLISECONDS+00SECONDS+00MINUTES+00HOURS+0000000000YEARS+6MONTHS+3DAYS\"", "debug", "query")
- , "/debug/parsedquery=='foo_dt:2013-09-11T00:00:00Z'"
- );
+ if (h.getCore().getLatestSchema().getField("foo_dt").getType().isPointField()) {
+ assertJQ(req("q", "foo_dt:\"2013-03-08T00:46:15Z/DAY+000MILLISECONDS+00SECONDS+00MINUTES+00HOURS+0000000000YEARS+6MONTHS+3DAYS\"", "debug", "query")
+ , "/debug/parsedquery=='IndexOrDocValuesQuery(foo_dt:[1378857600000 TO 1378857600000])'"
+ );
+ } else {
+ assertJQ(req("q", "foo_dt:\"2013-03-08T00:46:15Z/DAY+000MILLISECONDS+00SECONDS+00MINUTES+00HOURS+0000000000YEARS+6MONTHS+3DAYS\"", "debug", "query")
+ , "/debug/parsedquery=='foo_dt:2013-09-11T00:00:00Z'"
+ );
+ }
}
@Test
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java
index 28fdab3..7bae2c9 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java
@@ -552,7 +552,12 @@ public class AtomicUpdatesTest extends SolrTestCaseJ4 {
assertU(commit());
- assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
+ boolean isPointField = h.getCore().getLatestSchema().getField("dateRemove").getType().isPointField();
+ if (isPointField) {
+ assertQ(req("q", "dateRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
+ } else {
+ assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
+ }
assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']");
doc = new SolrInputDocument();
@@ -565,7 +570,11 @@ public class AtomicUpdatesTest extends SolrTestCaseJ4 {
assertU(adoc(doc));
assertU(commit());
- assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
+ if (isPointField) {
+ assertQ(req("q", "dateRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
+ } else {
+ assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
+ }
assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '2']");
doc = new SolrInputDocument();
@@ -577,7 +586,11 @@ public class AtomicUpdatesTest extends SolrTestCaseJ4 {
assertU(adoc(doc));
assertU(commit());
- assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
+ if (isPointField) {
+ assertQ(req("q", "dateRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
+ } else {
+ assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
+ }
assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '1']");
doc = new SolrInputDocument();
@@ -587,7 +600,11 @@ public class AtomicUpdatesTest extends SolrTestCaseJ4 {
assertU(adoc(doc));
assertU(commit());
- assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
+ if (isPointField) {
+ assertQ(req("q", "dateRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
+ } else {
+ assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
+ }
assertQ(req("q", "dateRemove:\"2014-09-01T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']");
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java b/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java
index b779f7a..31f4760 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java
@@ -18,6 +18,7 @@ package org.apache.solr.update.processor;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.schema.PointField;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
@@ -148,9 +149,15 @@ public class ParsingFieldUpdateProcessorsTest extends UpdateProcessorTestBase {
String dateString = "2010-11-12T13:14:15.168Z";
DateTimeFormatter dateTimeFormatter = ISODateTimeFormat.dateTime();
DateTime dateTime = dateTimeFormatter.parseDateTime(dateString);
+ SolrInputDocument d;
+ if (schema.getField("date_dt").getType().isPointField()) {
+ d = processAdd("parse-date-explicit-typeclass-point-selector-no-run-processor",
+ doc(f("id", "77"), f("date_dt", dateString)));
+ } else {
+ d = processAdd("parse-date-explicit-typeclass-selector-no-run-processor",
+ doc(f("id", "77"), f("date_dt", dateString)));
+ }
- SolrInputDocument d = processAdd("parse-date-explicit-typeclass-selector-no-run-processor",
- doc(f("id", "77"), f("date_dt", dateString)));
assertNotNull(d);
assertTrue(d.getFieldValue("date_dt") instanceof Date);
assertEquals(dateTime.getMillis(), ((Date)d.getFieldValue("date_dt")).getTime());
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
index c3c269c..a8c93d6 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
@@ -515,12 +515,14 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
System.setProperty("solr.tests.longClass", "long");
System.setProperty("solr.tests.doubleClass", "double");
System.setProperty("solr.tests.floatClass", "float");
+ System.setProperty("solr.tests.dateClass", "date");
} else {
log.info("Using PointFields");
System.setProperty("solr.tests.intClass", "pint");
System.setProperty("solr.tests.longClass", "plong");
System.setProperty("solr.tests.doubleClass", "pdouble");
System.setProperty("solr.tests.floatClass", "pfloat");
+ System.setProperty("solr.tests.dateClass", "pdate");
}
}
[15/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10088: Installer
script does not put zoo.cfg in SOLR_HOME
Posted by is...@apache.org.
SOLR-10088: Installer script does not put zoo.cfg in SOLR_HOME
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/59433bb4
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/59433bb4
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/59433bb4
Branch: refs/heads/jira/solr-6736
Commit: 59433bb460c2e55756cb3cea2aecd264e48e3b3a
Parents: 5ccc8e7
Author: Jan H�ydahl <ja...@apache.org>
Authored: Sat Mar 4 23:19:06 2017 +0100
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Sat Mar 4 23:19:06 2017 +0100
----------------------------------------------------------------------
solr/CHANGES.txt | 18 ++++++++++--------
solr/bin/install_solr_service.sh | 2 +-
2 files changed, 11 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/59433bb4/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 2ffcc46..ea56cbb 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -128,7 +128,7 @@ New Features
numeric, and conditional evaluators. BooleanOperations have been removed in preference of
BooleanEvaluators. (Dennis Gove)
-* SOLR-9903: Stop interrupting the update executor on shutdown, it can cause graceful shutdowns to put replicas into Leader
+* SOLR-9903: Stop interrupting the update executor on shutdown, it can cause graceful shutdowns to put replicas into Leader
Initiated Recovery among other undesirable things. (Mark Miller)
* SOLR-8396: Add support for PointFields in Solr (Ishan Chattopadhyaya, Tom�s Fern�ndez L�bbe)
@@ -203,6 +203,8 @@ Bug Fixes
* SOLR-10225: Fix HDFS BlockCache evictions metric to not count explicit removal
due to a directory close. (yonik)
+* SOLR-10088: Installer script does not put zoo.cfg in SOLR_HOME (janhoy)
+
Optimizations
----------------------
@@ -232,9 +234,9 @@ Other Changes
* SOLR-10018: Increase the default hl.maxAnalyzedChars to 51200 for the Unified & Postings Highlighter so that all
highlighters now have this same default. (David Smiley)
-
-* SOLR-6246: Added tests to check that the changes in LUCENE-7564 and LUCENE-7670
- enable AnalyzingInfixSuggester and BlendedInfixSuggester to play nicely with core reload.
+
+* SOLR-6246: Added tests to check that the changes in LUCENE-7564 and LUCENE-7670
+ enable AnalyzingInfixSuggester and BlendedInfixSuggester to play nicely with core reload.
SolrSuggester.build() now throws SolrCoreState.CoreIsClosedException when interrupted
by a core reload/shutdown. (Steve Rowe)
@@ -261,8 +263,8 @@ Other Changes
* SOLR-9842: UpdateRequestProcessors have no way to guarantee the closing of resources used for a request.
(Mark Miller)
-* SOLR-9848: Lower solr.cloud.wait-for-updates-with-stale-state-pause back down from 7 seconds.
- (Mark Miller)
+* SOLR-9848: Lower solr.cloud.wait-for-updates-with-stale-state-pause back down from 7 seconds.
+ (Mark Miller)
* SOLR-10020: Cannot reload a core if it fails initialization. (Mike Drob via Erick Erickson)
@@ -273,8 +275,8 @@ Other Changes
* SOLR-10214: Remove unused HDFS BlockCache metrics and add storeFails, as well as adding total
counts for lookups, hits, and evictions. (yonik)
-
-* SOLR-10134: EmbeddedSolrServer responds on Schema API requests (Robert Alexandersson via Mikhail Khludnev)
+
+* SOLR-10134: EmbeddedSolrServer responds on Schema API requests (Robert Alexandersson via Mikhail Khludnev)
* SOLR-10219: re-enable HDFS tests under JDK9 (hossman, Uwe Schindler)
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/59433bb4/solr/bin/install_solr_service.sh
----------------------------------------------------------------------
diff --git a/solr/bin/install_solr_service.sh b/solr/bin/install_solr_service.sh
index b331870..f42dd5a 100755
--- a/solr/bin/install_solr_service.sh
+++ b/solr/bin/install_solr_service.sh
@@ -340,7 +340,7 @@ mkdir -p "$SOLR_VAR_DIR/logs"
if [ -f "$SOLR_VAR_DIR/data/solr.xml" ]; then
echo -e "\n$SOLR_VAR_DIR/data/solr.xml already exists. Skipping install ...\n"
else
- cp "$SOLR_INSTALL_DIR/server/solr/solr.xml" "$SOLR_VAR_DIR/data/solr.xml"
+ cp "$SOLR_INSTALL_DIR/server/solr/"{solr.xml,zoo.cfg} "$SOLR_VAR_DIR/data/"
fi
if [ -f "$SOLR_VAR_DIR/log4j.properties" ]; then
echo -e "\n$SOLR_VAR_DIR/log4j.properties already exists. Skipping install ...\n"
[27/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10226 JMX metric
avgTimePerRequest broken.
Posted by is...@apache.org.
SOLR-10226 JMX metric avgTimePerRequest broken.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2d51a42d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2d51a42d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2d51a42d
Branch: refs/heads/jira/solr-6736
Commit: 2d51a42d3cae3eddc89f407cd3611fa2cd5d55d0
Parents: 190f4b6
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Tue Mar 7 17:59:57 2017 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Tue Mar 7 18:09:58 2017 +0100
----------------------------------------------------------------------
solr/CHANGES.txt | 6 ++++++
.../src/java/org/apache/solr/handler/RequestHandlerBase.java | 7 ++++++-
.../core/src/java/org/apache/solr/util/stats/MetricUtils.java | 2 +-
3 files changed, 13 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2d51a42d/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index db721da..5b0eb03 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -96,6 +96,10 @@ Detailed Change List
Upgrade Notes
----------------------
+* SOLR-10226: JMX metric "avgTimePerRequest" (and the corresponding metric in the metrics API for
+ each handler) used to be a simple non-decaying average based on total cumulative time and the
+ number of requests. New Codahale Metrics implementation applies exponential decay to this value,
+ which heavily biases the average towards the last 5 minutes. (ab)
New Features
----------------------
@@ -212,6 +216,8 @@ Bug Fixes
* SOLR-10088: Installer script does not put zoo.cfg in SOLR_HOME (janhoy)
+* SOLR-10226: add back "totalTime" metric to all handlers. See also the back-compat note. (ab)
+
Optimizations
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2d51a42d/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
index 3c6f5fa..1958e11 100644
--- a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
+++ b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
@@ -66,6 +66,7 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
private Meter numTimeouts = new Meter();
private Counter requests = new Counter();
private Timer requestTimes = new Timer();
+ private Counter totalTime = new Counter();
private final long handlerStart;
@@ -143,6 +144,7 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
numTimeouts = manager.meter(registryName, "timeouts", getCategory().toString(), scope);
requests = manager.counter(registryName, "requests", getCategory().toString(), scope);
requestTimes = manager.timer(registryName, "requestTimes", getCategory().toString(), scope);
+ totalTime = manager.counter(registryName, "totalTime", getCategory().toString(), scope);
}
public static SolrParams getSolrParamsFromNamedList(NamedList args, String key) {
@@ -209,7 +211,8 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
}
}
} finally {
- timer.stop();
+ long elapsed = timer.stop();
+ totalTime.inc(elapsed);
}
}
@@ -292,6 +295,8 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
lst.add("serverErrors", numServerErrors.getCount());
lst.add("clientErrors", numClientErrors.getCount());
lst.add("timeouts", numTimeouts.getCount());
+ // convert totalTime to ms
+ lst.add("totalTime", MetricUtils.nsToMs(totalTime.getCount()));
MetricUtils.addMetrics(lst, requestTimes);
return lst;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2d51a42d/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
index 0d386ae..80f035b 100644
--- a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
@@ -63,7 +63,7 @@ public class MetricUtils {
* @param ns the amount of time in nanoseconds
* @return the amount of time in milliseconds
*/
- static double nsToMs(double ns) {
+ public static double nsToMs(double ns) {
return ns / TimeUnit.MILLISECONDS.toNanos(1);
}
[13/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-9401:
TestPKIAuthenticationPlugin NPE. do the time consuming pub key creation
before header is set
Posted by is...@apache.org.
SOLR-9401: TestPKIAuthenticationPlugin NPE. do the time consuming pub key creation before header is set
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b66d1339
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b66d1339
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b66d1339
Branch: refs/heads/jira/solr-6736
Commit: b66d13398aef416ec6b64dd5d3e5c00219ae5ce4
Parents: 5ae51d4
Author: Noble Paul <no...@apache.org>
Authored: Sat Mar 4 16:12:57 2017 +1030
Committer: Noble Paul <no...@apache.org>
Committed: Sat Mar 4 16:12:57 2017 +1030
----------------------------------------------------------------------
.../security/TestPKIAuthenticationPlugin.java | 131 +++++++------------
1 file changed, 47 insertions(+), 84 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b66d1339/solr/core/src/test/org/apache/solr/security/TestPKIAuthenticationPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/TestPKIAuthenticationPlugin.java b/solr/core/src/test/org/apache/solr/security/TestPKIAuthenticationPlugin.java
index 4eb0a80..2595277 100644
--- a/solr/core/src/test/org/apache/solr/security/TestPKIAuthenticationPlugin.java
+++ b/solr/core/src/test/org/apache/solr/security/TestPKIAuthenticationPlugin.java
@@ -38,18 +38,10 @@ import org.apache.solr.util.CryptoKeys;
import static org.mockito.Mockito.*;
public class TestPKIAuthenticationPlugin extends SolrTestCaseJ4 {
- HttpServletRequest mockReq;
- FilterChain filterChain;
- final AtomicReference<ServletRequest> wrappedRequestByFilter = new AtomicReference<>();
- final AtomicReference<Header> header = new AtomicReference<>();
- AtomicReference<Principal> principal = new AtomicReference<>();
- BasicHttpRequest request;
-
static class MockPKIAuthenticationPlugin extends PKIAuthenticationPlugin {
SolrRequestInfo solrRequestInfo;
-
Map<String, PublicKey> remoteKeys = new HashMap<>();
public MockPKIAuthenticationPlugin(CoreContainer cores, String node) {
@@ -78,6 +70,7 @@ public class TestPKIAuthenticationPlugin extends SolrTestCaseJ4 {
}
public void test() throws Exception {
+ AtomicReference<Principal> principal = new AtomicReference<>();
String nodeName = "node_x_233";
final MockPKIAuthenticationPlugin mock = new MockPKIAuthenticationPlugin(null, nodeName);
@@ -92,96 +85,66 @@ public class TestPKIAuthenticationPlugin extends SolrTestCaseJ4 {
principal.set(new BasicUserPrincipal("solr"));
mock.solrRequestInfo = new SolrRequestInfo(localSolrQueryRequest, new SolrQueryResponse());
- request = new BasicHttpRequest("GET", "http://localhost:56565");
+ BasicHttpRequest request = new BasicHttpRequest("GET", "http://localhost:56565");
mock.setHeader(request);
+ final AtomicReference<Header> header = new AtomicReference<>();
header.set(request.getFirstHeader(PKIAuthenticationPlugin.HEADER));
assertNotNull(header.get());
assertTrue(header.get().getValue().startsWith(nodeName));
- mockReq = createMockRequest(header);
- filterChain = (servletRequest, servletResponse) -> wrappedRequestByFilter.set(servletRequest);
-
-
- run("solr", () -> {
- mock.doAuthenticate(mockReq, null, filterChain);
- });
+ final AtomicReference<ServletRequest> wrappedRequestByFilter = new AtomicReference<>();
+ HttpServletRequest mockReq = createMockRequest(header);
+ FilterChain filterChain = (servletRequest, servletResponse) -> wrappedRequestByFilter.set(servletRequest);
+ mock.doAuthenticate(mockReq, null, filterChain);
+ assertNotNull(wrappedRequestByFilter.get());
+ assertEquals("solr", ((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal().getName());
//test 2
-
- run(null, () -> {
- principal.set(null); // no user
- header.set(null);
- wrappedRequestByFilter.set(null);//
- request = new BasicHttpRequest("GET", "http://localhost:56565");
- mock.setHeader(request);
- assertNull(request.getFirstHeader(PKIAuthenticationPlugin.HEADER));
- mock.doAuthenticate(mockReq, null, filterChain);
- });
+ principal.set(null); // no user
+ header.set(null);
+ wrappedRequestByFilter.set(null);//
+ request = new BasicHttpRequest("GET", "http://localhost:56565");
+ mock.setHeader(request);
+ assertNull(request.getFirstHeader(PKIAuthenticationPlugin.HEADER));
+ mock.doAuthenticate(mockReq, null, filterChain);
+ assertNotNull(wrappedRequestByFilter.get());
+ assertNull(((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal());
//test 3 . No user request . Request originated from Solr
- run("$", () -> {
- mock.solrRequestInfo = null;
- header.set(null);
- wrappedRequestByFilter.set(null);
- request = new BasicHttpRequest("GET", "http://localhost:56565");
- mock.setHeader(request);
- header.set(request.getFirstHeader(PKIAuthenticationPlugin.HEADER));
- assertNotNull(header.get());
- assertTrue(header.get().getValue().startsWith(nodeName));
- mock.doAuthenticate(mockReq, null, filterChain);
- });
-
- run("$", () -> {
- mock.solrRequestInfo = null;
- header.set(null);
- wrappedRequestByFilter.set(null);
- request = new BasicHttpRequest("GET", "http://localhost:56565");
- mock.setHeader(request);
- header.set(request.getFirstHeader(PKIAuthenticationPlugin.HEADER));
- assertNotNull(header.get());
- assertTrue(header.get().getValue().startsWith(nodeName));
- MockPKIAuthenticationPlugin mock1 = new MockPKIAuthenticationPlugin(null, nodeName) {
- int called = 0;
-
- @Override
- PublicKey getRemotePublicKey(String nodename) {
- try {
- return called == 0 ? new CryptoKeys.RSAKeyPair().getPublicKey() : correctKey;
- } finally {
- called++;
- }
- }
- };
-
- mock1.doAuthenticate(mockReq, null, filterChain);
-
- });
-
- }
+ //create pub key in advance because it can take time and it should be
+ //created before the header is set
+ PublicKey key = new CryptoKeys.RSAKeyPair().getPublicKey();
+ mock.solrRequestInfo = null;
+ header.set(null);
+ wrappedRequestByFilter.set(null);
+ request = new BasicHttpRequest("GET", "http://localhost:56565");
+ mock.setHeader(request);
+ header.set(request.getFirstHeader(PKIAuthenticationPlugin.HEADER));
+ assertNotNull(header.get());
+ assertTrue(header.get().getValue().startsWith(nodeName));
- interface Runnable {
- void run() throws Exception;
- }
+ mock.doAuthenticate(mockReq, null, filterChain);
+ assertNotNull(wrappedRequestByFilter.get());
+ assertEquals("$", ((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal().getName());
- private void run(String expected, Runnable r) throws Exception {
- int failures = 0;
- for (; ; ) {
- r.run();
- if (expected == null) {
- assertTrue(wrappedRequestByFilter.get() == null || ((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal() == null);
- } else {
- assertNotNull(wrappedRequestByFilter.get());
- if (((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal() == null) {
- //may be timed out
- if (++failures < 3) continue;
- else
- fail("No principal obtained");
+ /*test4 mock the restart of a node*/
+ MockPKIAuthenticationPlugin mock1 = new MockPKIAuthenticationPlugin(null, nodeName) {
+ int called = 0;
+ @Override
+ PublicKey getRemotePublicKey(String nodename) {
+ try {
+ return called == 0 ? key : correctKey;
+ } finally {
+ called++;
}
- assertEquals(expected, ((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal().getName());
}
- return;
+ };
+
+ mock1.doAuthenticate(mockReq, null,filterChain );
+ assertNotNull(wrappedRequestByFilter.get());
+ assertEquals("$", ((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal().getName());
+
- }
}
private HttpServletRequest createMockRequest(final AtomicReference<Header> header) {
[02/50] [abbrv] lucene-solr:jira/solr-6736: LUCENE-6819: Remove
index-time boosts.
Posted by is...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/schema/TestSchemalessBufferedUpdates.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/TestSchemalessBufferedUpdates.java b/solr/core/src/test/org/apache/solr/schema/TestSchemalessBufferedUpdates.java
index c2e8b2e..ca48f78 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestSchemalessBufferedUpdates.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestSchemalessBufferedUpdates.java
@@ -149,7 +149,7 @@ public class TestSchemalessBufferedUpdates extends SolrTestCaseJ4 {
// Non-JSON types (Date in this case) aren't handled properly in noggit-0.6. Although this is fixed in
// https://github.com/yonik/noggit/commit/ec3e732af7c9425e8f40297463cbe294154682b1 to call obj.toString(),
// Date::toString produces a Date representation that Solr doesn't like, so we convert using Instant::toString
- cmd.solrDoc.get("f_dt").setValue(((Date) cmd.solrDoc.get("f_dt").getValue()).toInstant().toString(), 1.0f);
+ cmd.solrDoc.get("f_dt").setValue(((Date) cmd.solrDoc.get("f_dt").getValue()).toInstant().toString());
}
return cmd.solrDoc;
} finally {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java b/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java
index 8c65b58..92f2863 100644
--- a/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java
+++ b/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java
@@ -432,7 +432,6 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
"//float[@name='score']='" + similarity.tf(5) + "'");
FieldInvertState state = new FieldInvertState("a_tfidf");
- state.setBoost(1.0f);
state.setLength(4);
long norm = similarity.computeNorm(state);
float nrm = similarity.decodeNormValue((byte) norm);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java b/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java
index 2c0b634..2a78d6b 100644
--- a/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java
+++ b/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java
@@ -16,30 +16,15 @@
*/
package org.apache.solr.update;
-import java.util.List;
-
import org.apache.lucene.document.Document;
-import org.apache.lucene.index.IndexableField;
-import org.apache.lucene.index.LeafReader;
-import org.apache.lucene.index.NumericDocValues;
-import org.apache.lucene.search.similarities.ClassicSimilarity;
import org.apache.lucene.util.TestUtil;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.SolrInputField;
-import org.apache.solr.common.params.CommonParams;
import org.apache.solr.core.SolrCore;
-import org.apache.solr.index.SlowCompositeReaderWrapper;
-import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.response.ResultContext;
-import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.schema.CopyField;
import org.apache.solr.schema.FieldType;
-import org.apache.solr.schema.IndexSchema;
-import org.apache.solr.search.DocList;
-import org.apache.solr.search.SolrIndexSearcher;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -62,7 +47,7 @@ public class DocumentBuilderTest extends SolrTestCaseJ4 {
// undefined field
try {
SolrInputDocument doc = new SolrInputDocument();
- doc.setField( "unknown field", 12345, 1.0f );
+ doc.setField( "unknown field", 12345 );
DocumentBuilder.toDocument( doc, core.getLatestSchema() );
fail( "should throw an error" );
}
@@ -78,7 +63,7 @@ public class DocumentBuilderTest extends SolrTestCaseJ4 {
// make sure a null value is not indexed
SolrInputDocument doc = new SolrInputDocument();
- doc.addField( "name", null, 1.0f );
+ doc.addField( "name", null );
Document out = DocumentBuilder.toDocument( doc, core.getLatestSchema() );
assertNull( out.get( "name" ) );
}
@@ -90,8 +75,8 @@ public class DocumentBuilderTest extends SolrTestCaseJ4 {
// make sure a null value is not indexed
SolrInputDocument doc = new SolrInputDocument();
- doc.addField( "id", "123", 1.0f );
- doc.addField( "unknown", "something", 1.0f );
+ doc.addField( "id", "123" );
+ doc.addField( "unknown", "something" );
try {
DocumentBuilder.toDocument( doc, core.getLatestSchema() );
fail( "added an unknown field" );
@@ -102,7 +87,7 @@ public class DocumentBuilderTest extends SolrTestCaseJ4 {
doc.remove( "unknown" );
- doc.addField( "weight", "not a number", 1.0f );
+ doc.addField( "weight", "not a number" );
try {
DocumentBuilder.toDocument( doc, core.getLatestSchema() );
fail( "invalid 'float' field value" );
@@ -113,7 +98,7 @@ public class DocumentBuilderTest extends SolrTestCaseJ4 {
}
// now make sure it is OK
- doc.setField( "weight", "1.34", 1.0f );
+ doc.setField( "weight", "1.34" );
DocumentBuilder.toDocument( doc, core.getLatestSchema() );
}
@@ -123,95 +108,15 @@ public class DocumentBuilderTest extends SolrTestCaseJ4 {
// make sure a null value is not indexed
SolrInputDocument doc = new SolrInputDocument();
- doc.addField( "home", "2.2,3.3", 1.0f );
+ doc.addField( "home", "2.2,3.3" );
Document out = DocumentBuilder.toDocument( doc, core.getLatestSchema() );
assertNotNull( out.get( "home" ) );//contains the stored value and term vector, if there is one
assertNotNull( out.getField( "home_0" + FieldType.POLY_FIELD_SEPARATOR + "double" ) );
assertNotNull( out.getField( "home_1" + FieldType.POLY_FIELD_SEPARATOR + "double" ) );
}
- @Test
- public void testCopyFieldWithDocumentBoost() {
- SolrCore core = h.getCore();
- IndexSchema schema = core.getLatestSchema();
- assertFalse(schema.getField("title").omitNorms());
- assertTrue(schema.getField("title_stringNoNorms").omitNorms());
- SolrInputDocument doc = new SolrInputDocument();
- doc.setDocumentBoost(3f);
- doc.addField( "title", "mytitle");
- Document out = DocumentBuilder.toDocument( doc, schema );
- assertNotNull( out.get( "title_stringNoNorms" ) );
- assertTrue("title_stringNoNorms has the omitNorms attribute set to true, if the boost is different than 1.0, it will fail",1.0f == out.getField( "title_stringNoNorms" ).boost() );
- assertTrue("It is OK that title has a boost of 3",3.0f == out.getField( "title" ).boost() );
- }
-
-
- @Test
- public void testCopyFieldWithFieldBoost() {
- SolrCore core = h.getCore();
- IndexSchema schema = core.getLatestSchema();
- assertFalse(schema.getField("title").omitNorms());
- assertTrue(schema.getField("title_stringNoNorms").omitNorms());
- SolrInputDocument doc = new SolrInputDocument();
- doc.addField( "title", "mytitle", 3.0f );
- Document out = DocumentBuilder.toDocument( doc, schema );
- assertNotNull( out.get( "title_stringNoNorms" ) );
- assertTrue("title_stringNoNorms has the omitNorms attribute set to true, if the boost is different than 1.0, it will fail",1.0f == out.getField( "title_stringNoNorms" ).boost() );
- assertTrue("It is OK that title has a boost of 3",3.0f == out.getField( "title" ).boost() );
- }
-
- @Test
- public void testWithPolyFieldsAndFieldBoost() {
- SolrCore core = h.getCore();
- IndexSchema schema = core.getLatestSchema();
- assertFalse(schema.getField("store").omitNorms());
- assertTrue(schema.getField("store_0_coordinate").omitNorms());
- assertTrue(schema.getField("store_1_coordinate").omitNorms());
- assertFalse(schema.getField("amount").omitNorms());
- assertTrue(schema.getField("amount" + FieldType.POLY_FIELD_SEPARATOR + "_currency").omitNorms());
- assertTrue(schema.getField("amount" + FieldType.POLY_FIELD_SEPARATOR + "_amount_raw").omitNorms());
-
- SolrInputDocument doc = new SolrInputDocument();
- doc.addField( "store", "40.7143,-74.006", 3.0f );
- doc.addField( "amount", "10.5", 3.0f );
- Document out = DocumentBuilder.toDocument( doc, schema );
- assertNotNull( out.get( "store" ) );
- assertNotNull( out.get( "amount" ) );
- assertNotNull(out.getField("store_0_coordinate"));
- //NOTE: As the subtypes have omitNorm=true, they must have boost=1F, otherwise this is going to fail when adding the doc to Lucene.
- assertTrue(1f == out.getField("store_0_coordinate").boost());
- assertTrue(1f == out.getField("store_1_coordinate").boost());
- assertTrue(1f == out.getField("amount" + FieldType.POLY_FIELD_SEPARATOR + "_currency").boost());
- assertTrue(1f == out.getField("amount" + FieldType.POLY_FIELD_SEPARATOR + "_amount_raw").boost());
- }
-
- @Test
- public void testWithPolyFieldsAndDocumentBoost() {
- SolrCore core = h.getCore();
- IndexSchema schema = core.getLatestSchema();
- assertFalse(schema.getField("store").omitNorms());
- assertTrue(schema.getField("store_0_coordinate").omitNorms());
- assertTrue(schema.getField("store_1_coordinate").omitNorms());
- assertFalse(schema.getField("amount").omitNorms());
- assertTrue(schema.getField("amount" + FieldType.POLY_FIELD_SEPARATOR + "_currency").omitNorms());
- assertTrue(schema.getField("amount" + FieldType.POLY_FIELD_SEPARATOR + "_amount_raw").omitNorms());
-
- SolrInputDocument doc = new SolrInputDocument();
- doc.setDocumentBoost(3.0f);
- doc.addField( "store", "40.7143,-74.006");
- doc.addField( "amount", "10.5");
- Document out = DocumentBuilder.toDocument( doc, schema );
- assertNotNull( out.get( "store" ) );
- assertNotNull(out.getField("store_0_coordinate"));
- //NOTE: As the subtypes have omitNorm=true, they must have boost=1F, otherwise this is going to fail when adding the doc to Lucene.
- assertTrue(1f == out.getField("store_0_coordinate").boost());
- assertTrue(1f == out.getField("store_1_coordinate").boost());
- assertTrue(1f == out.getField("amount" + FieldType.POLY_FIELD_SEPARATOR + "_currency").boost());
- assertTrue(1f == out.getField("amount" + FieldType.POLY_FIELD_SEPARATOR + "_amount_raw").boost());
- }
-
/**
- * It's ok to boost a field if it has norms
+ * Even though boosts have been removed, we still support them for bw compat.
*/
public void testBoost() throws Exception {
XmlDoc xml = new XmlDoc();
@@ -222,192 +127,6 @@ public class DocumentBuilderTest extends SolrTestCaseJ4 {
assertNull(h.validateUpdate(add(xml, new String[0])));
}
- private void assertMultiValuedFieldAndDocBoosts(SolrInputField field) throws Exception {
- SolrCore core = h.getCore();
- IndexSchema schema = core.getLatestSchema();
- SolrInputDocument doc = new SolrInputDocument();
- doc.setDocumentBoost(3.0f);
- field.addValue( "summer time" , 1.0f );
- field.addValue( "in the city" , 5.0f ); // using boost
- field.addValue( "living is easy" , 1.0f );
- doc.put( field.getName(), field );
-
- Document out = DocumentBuilder.toDocument( doc, schema );
- IndexableField[] outF = out.getFields( field.getName() );
- assertEquals("wrong number of field values",
- 3, outF.length);
-
- // since Lucene no longer has native documnt boosts, we should find
- // the doc boost multiplied into the boost o nthe first field value
- // all other field values should be 1.0f
- // (lucene will multiply all of the field boosts later)
- assertEquals(15.0f, outF[0].boost(), 0.0f);
- assertEquals(1.0f, outF[1].boost(), 0.0f);
- assertEquals(1.0f, outF[2].boost(), 0.0f);
-
- }
-
- public void testMultiValuedFieldAndDocBoostsWithCopy() throws Exception {
- SolrCore core = h.getCore();
- IndexSchema schema = core.getLatestSchema();
- SolrInputField field = new SolrInputField( "foo_t" );
- List<CopyField> copyFields = schema.getCopyFieldsList(field.getName());
-
- assertNotNull( copyFields );
- assertFalse( copyFields.isEmpty() );
- assertMultiValuedFieldAndDocBoosts( field );
- }
-
- public void testMultiValuedFieldAndDocBoostsNoCopy() throws Exception {
- SolrCore core = h.getCore();
- IndexSchema schema = core.getLatestSchema();
- SolrInputField field = new SolrInputField( "t_foo" );
- List<CopyField> copyFields = schema.getCopyFieldsList(field.getName());
-
- assertTrue( copyFields == null || copyFields.isEmpty() );
- assertMultiValuedFieldAndDocBoosts( field );
- }
-
- public void testCopyFieldsAndFieldBoostsAndDocBoosts() throws Exception {
- SolrCore core = h.getCore();
- IndexSchema schema = core.getLatestSchema();
- SolrInputDocument doc = new SolrInputDocument();
-
- final float DOC_BOOST = 3.0F;
- doc.setDocumentBoost(DOC_BOOST);
- doc.addField("id", "42");
-
- SolrInputField inTitle = new SolrInputField( "title" );
- inTitle.addValue( "titleA" , 2.0F );
- inTitle.addValue( "titleB" , 7.0F );
- final float TITLE_BOOST = 2.0F * 7.0F;
- assertEquals(TITLE_BOOST, inTitle.getBoost(), 0.0F);
- doc.put( inTitle.getName(), inTitle );
-
- SolrInputField inFoo = new SolrInputField( "foo_t" );
- inFoo.addValue( "summer time" , 1.0F );
- inFoo.addValue( "in the city" , 5.0F );
- inFoo.addValue( "living is easy" , 11.0F );
- final float FOO_BOOST = 1.0F * 5.0F * 11.0F;
- assertEquals(FOO_BOOST, inFoo.getBoost(), 0.0F);
- doc.put( inFoo.getName(), inFoo );
-
- Document out = DocumentBuilder.toDocument( doc, schema );
-
- IndexableField[] outTitle = out.getFields( inTitle.getName() );
- assertEquals("wrong number of title values",
- 2, outTitle.length);
-
- IndexableField[] outNoNorms = out.getFields( "title_stringNoNorms" );
- assertEquals("wrong number of nonorms values",
- 2, outNoNorms.length);
-
- IndexableField[] outFoo = out.getFields( inFoo.getName() );
- assertEquals("wrong number of foo values",
- 3, outFoo.length);
-
- IndexableField[] outText = out.getFields( "text" );
- assertEquals("wrong number of text values",
- 5, outText.length);
-
- // since Lucene no longer has native document boosts, we should find
- // the doc boost multiplied into the boost on the first field value
- // of each field. All other field values should be 1.0f
- // (lucene will multiply all of the field value boosts later)
- assertEquals(TITLE_BOOST * DOC_BOOST, outTitle[0].boost(), 0.0F);
- assertEquals(1.0F, outTitle[1].boost(), 0.0F);
- //
- assertEquals(FOO_BOOST * DOC_BOOST, outFoo[0].boost(), 0.0F);
- assertEquals(1.0F, outFoo[1].boost(), 0.0F);
- assertEquals(1.0F, outFoo[2].boost(), 0.0F);
- //
- assertEquals(TITLE_BOOST * DOC_BOOST, outText[0].boost(), 0.0F);
- assertEquals(1.0F, outText[1].boost(), 0.0F);
- assertEquals(FOO_BOOST, outText[2].boost(), 0.0F);
- assertEquals(1.0F, outText[3].boost(), 0.0F);
- assertEquals(1.0F, outText[4].boost(), 0.0F);
-
- // copyField dest with no norms should not have received any boost
- assertEquals(1.0F, outNoNorms[0].boost(), 0.0F);
- assertEquals(1.0F, outNoNorms[1].boost(), 0.0F);
-
- // now index that SolrInputDocument to check the computed norms
-
- assertU(adoc(doc));
- assertU(commit());
-
- SolrQueryRequest req = req("q", "id:42");
- try {
- // very hack-ish
-
- SolrQueryResponse rsp = new SolrQueryResponse();
- core.execute(core.getRequestHandler(req.getParams().get(CommonParams.QT)), req, rsp);
-
- DocList dl = ((ResultContext) rsp.getResponse()).getDocList();
- assertTrue("can't find the doc we just added", 1 == dl.size());
- int docid = dl.iterator().nextDoc();
-
- SolrIndexSearcher searcher = req.getSearcher();
- LeafReader reader = SlowCompositeReaderWrapper.wrap(searcher.getTopReaderContext().reader());
-
- assertTrue("similarity doesn't extend ClassicSimilarity, " +
- "config or defaults have changed since test was written",
- searcher.getSimilarity(true) instanceof ClassicSimilarity);
-
- ClassicSimilarity sim = (ClassicSimilarity) searcher.getSimilarity(true);
-
- NumericDocValues titleNorms = reader.getNormValues("title");
- NumericDocValues fooNorms = reader.getNormValues("foo_t");
- NumericDocValues textNorms = reader.getNormValues("text");
-
- assertEquals(docid, titleNorms.advance(docid));
- assertEquals(expectedNorm(sim, 2, TITLE_BOOST * DOC_BOOST),
- titleNorms.longValue());
-
- assertEquals(docid, fooNorms.advance(docid));
- assertEquals(expectedNorm(sim, 8-3, FOO_BOOST * DOC_BOOST),
- fooNorms.longValue());
-
- assertEquals(docid, textNorms.advance(docid));
- assertEquals(expectedNorm(sim, 2 + 8-3,
- TITLE_BOOST * FOO_BOOST * DOC_BOOST),
- textNorms.longValue());
-
- } finally {
- req.close();
- }
- }
-
- /**
- * Given a length, and boost returns the expected encoded norm
- */
- private static byte expectedNorm(final ClassicSimilarity sim,
- final int length, final float boost) {
- return (byte) sim.encodeNormValue(boost / ((float) Math.sqrt(length)));
- }
-
-
- public void testBoostOmitNorms() throws Exception {
- XmlDoc xml = new XmlDoc();
- // explicitly boosting a field if that omits norms is not ok
- xml.xml = "<doc>"
- + "<field name=\"id\">ignore_exception</field>"
- + "<field name=\"title_stringNoNorms\" boost=\"3.0\">mytitle</field>"
- + "</doc>";
- try {
- assertNull(h.validateUpdate(add(xml, new String[0])));
- fail("didn't get expected exception for boosting omit norms field");
- } catch (SolrException expected) {
- // expected exception
- }
- // boosting a field that is copied to another field that omits norms is ok
- xml.xml = "<doc>"
- + "<field name=\"id\">42</field>"
- + "<field name=\"title\" boost=\"3.0\">mytitle</field>"
- + "</doc>";
- assertNull(h.validateUpdate(add(xml, new String[0])));
- }
-
/**
* It's ok to supply a document boost even if a field omits norms
*/
@@ -467,15 +186,6 @@ public class DocumentBuilderTest extends SolrTestCaseJ4 {
assertTrue(compareSolrInputDocument(doc1, doc2));
- doc1.setDocumentBoost(1.1f);
- assertFalse(compareSolrInputDocument(doc1, doc2));
-
- doc2.setDocumentBoost(1.1f);
- assertTrue(compareSolrInputDocument(doc1, doc2));
-
- doc2.setDocumentBoost(20f);
- assertFalse(compareSolrInputDocument(doc1, doc2));
-
doc1 = new SolrInputDocument();
doc1.addField("foo", randomString);
@@ -503,19 +213,12 @@ public class DocumentBuilderTest extends SolrTestCaseJ4 {
int val = random().nextInt();
SolrInputField sif1 = new SolrInputField(randomString);
- sif1.setValue(val, 1.0f);
+ sif1.setValue(val);
SolrInputField sif2 = new SolrInputField(randomString);
- sif2.setValue(val, 1.0f);
-
- assertTrue(assertSolrInputFieldEquals(sif1, sif2));
+ sif2.setValue(val);
- sif1.setBoost(2.1f);
- sif2.setBoost(2.1f);
assertTrue(assertSolrInputFieldEquals(sif1, sif2));
- sif2.setBoost(2.0f);
- assertFalse(assertSolrInputFieldEquals(sif1, sif2));
-
sif2.setName("foo");
assertFalse(assertSolrInputFieldEquals(sif1, sif2));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactoryTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactoryTest.java
index c01c319..051cad1 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactoryTest.java
@@ -177,7 +177,7 @@ public class CloneFieldUpdateProcessorFactoryTest extends UpdateProcessorTestBas
// append to existing values, preserve boost
d = processAdd(chain,
doc(f("id", "1111"),
- field("dest_s", 2.3f, "orig1", "orig2"),
+ field("dest_s", "orig1", "orig2"),
f("source0_s", "NOT COPIED"),
f("source1_s", "123456789", "", 42, "abcd")));
assertNotNull(chain, d);
@@ -187,8 +187,6 @@ public class CloneFieldUpdateProcessorFactoryTest extends UpdateProcessorTestBas
assertEquals(chain,
Arrays.asList("orig1", "orig2", "123456789", "", 42, "abcd"),
d.getFieldValues("dest_s"));
- assertEquals(chain + ": dest boost changed",
- 2.3f, d.getField("dest_s").getBoost(), 0.0f);
}
// should be equivalent for any chain matching source1_s and source2_s (but not source0_s)
@@ -213,10 +211,10 @@ public class CloneFieldUpdateProcessorFactoryTest extends UpdateProcessorTestBas
Arrays.asList("123456789", "", 42, "abcd", "xxx", 999),
d.getFieldValues("dest_s"));
- // append to existing values, preserve boost
+ // append to existing values
d = processAdd(chain,
doc(f("id", "1111"),
- field("dest_s", 2.3f, "orig1", "orig2"),
+ field("dest_s", "orig1", "orig2"),
f("source0_s", "NOT COPIED"),
f("source1_s", "123456789", "", 42, "abcd"),
f("source2_s", "xxx", 999)));
@@ -232,8 +230,6 @@ public class CloneFieldUpdateProcessorFactoryTest extends UpdateProcessorTestBas
"123456789", "", 42, "abcd",
"xxx", 999),
d.getFieldValues("dest_s"));
- assertEquals(chain + ": dest boost changed",
- 2.3f, d.getField("dest_s").getBoost(), 0.0f);
}
// any chain that copies source1_s to dest_s should be equivalent for these assertions
@@ -258,7 +254,7 @@ public class CloneFieldUpdateProcessorFactoryTest extends UpdateProcessorTestBas
// append to existing values, preserve boost
d = processAdd(chain,
doc(f("id", "1111"),
- field("dest_s", 2.3f, "orig1", "orig2"),
+ field("dest_s", "orig1", "orig2"),
f("source1_s", "123456789", "", 42, "abcd")));
assertNotNull(chain, d);
assertEquals(chain,
@@ -267,8 +263,6 @@ public class CloneFieldUpdateProcessorFactoryTest extends UpdateProcessorTestBas
assertEquals(chain,
Arrays.asList("orig1", "orig2", "123456789", "", 42, "abcd"),
d.getFieldValues("dest_s"));
- assertEquals(chain + ": dest boost changed",
- 2.3f, d.getField("dest_s").getBoost(), 0.0f);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/update/processor/DefaultValueUpdateProcessorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/DefaultValueUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/DefaultValueUpdateProcessorTest.java
index c90a727..e328267 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/DefaultValueUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/DefaultValueUpdateProcessorTest.java
@@ -104,12 +104,11 @@ public class DefaultValueUpdateProcessorTest extends SolrTestCaseJ4 {
/**
* Convenience method for building up SolrInputFields
*/
- SolrInputField field(String name, float boost, Object... values) {
+ SolrInputField field(String name, Object... values) {
SolrInputField f = new SolrInputField(name);
for (Object v : values) {
- f.addValue(v, 1.0F);
+ f.addValue(v);
}
- f.setBoost(boost);
return f;
}
@@ -117,7 +116,7 @@ public class DefaultValueUpdateProcessorTest extends SolrTestCaseJ4 {
* Convenience method for building up SolrInputFields with default boost
*/
SolrInputField f(String name, Object... values) {
- return field(name, 1.0F, values);
+ return field(name, values);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java
index ecfe772..55fa5bd 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java
@@ -75,7 +75,7 @@ public class FieldMutatingUpdateProcessorTest extends UpdateProcessorTestBase {
f("name", " Hoss ", new StringBuilder(" Man")),
f("foo_t", " some text ", "other Text\t"),
f("foo_d", new Integer(42)),
- field("foo_s", 5.0F, " string ")));
+ field("foo_s", " string ")));
assertNotNull(d);
@@ -89,8 +89,6 @@ public class FieldMutatingUpdateProcessorTest extends UpdateProcessorTestBase {
// slightly more interesting
assertEquals("processor borked non string value",
new Integer(42), d.getFieldValue("foo_d"));
- assertEquals("wrong boost",
- 5.0F, d.getField("foo_s").getBoost(), 0.0F);
}
public void testUniqValues() throws Exception {
@@ -448,7 +446,7 @@ public class FieldMutatingUpdateProcessorTest extends UpdateProcessorTestBase {
special = new SolrInputField("foo_s");
special.setValue(new TreeSet<>
- (Arrays.asList("ggg", "first", "last", "hhh")), 1.2F);
+ (Arrays.asList("ggg", "first", "last", "hhh")));
d = processAdd("last-value",
doc(f("id", "1111"),
@@ -461,7 +459,7 @@ public class FieldMutatingUpdateProcessorTest extends UpdateProcessorTestBase {
// test something that's definitely a List
special = new SolrInputField("foo_s");
- special.setValue(Arrays.asList("first", "ggg", "hhh", "last"), 1.2F);
+ special.setValue(Arrays.asList("first", "ggg", "hhh", "last"));
d = processAdd("last-value",
doc(f("id", "1111"),
@@ -476,7 +474,7 @@ public class FieldMutatingUpdateProcessorTest extends UpdateProcessorTestBase {
special = new SolrInputField("foo_s");
special.setValue(new LinkedHashSet<>
- (Arrays.asList("first", "ggg", "hhh", "last")), 1.2F);
+ (Arrays.asList("first", "ggg", "hhh", "last")));
d = processAdd("last-value",
doc(f("id", "1111"),
@@ -819,7 +817,7 @@ public class FieldMutatingUpdateProcessorTest extends UpdateProcessorTestBase {
doc(f("id", "1111"),
f("foo_t", "string1", "string2"),
f("foo_d", new Integer(42)),
- field("foo_s", 3.0F, "string3", "string4")));
+ field("foo_s", "string3", "string4")));
assertNotNull(d);
@@ -830,8 +828,6 @@ public class FieldMutatingUpdateProcessorTest extends UpdateProcessorTestBase {
// slightly more interesting
assertEquals("processor borked non string value",
new Integer(42), d.getFieldValue("foo_d"));
- assertEquals("wrong boost",
- 3.0F, d.getField("foo_s").getBoost(), 0.0F);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
index f4ab1fa..6e7584f 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
@@ -81,9 +81,9 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
badIds = new String[10];
for(int i = 0; i < 10;i++) {
// a valid document
- docs.add(doc(field("id", 1f, String.valueOf(2*i)), field("weight", 1f, i)));
+ docs.add(doc(field("id", String.valueOf(2*i)), field("weight", i)));
// ... and an invalid one
- docs.add(doc(field("id", 1f, String.valueOf(2*i+1)), field("weight", 1f, "b")));
+ docs.add(doc(field("id", String.valueOf(2*i+1)), field("weight", "b")));
badIds[i] = String.valueOf(2*i+1);
}
}
@@ -119,10 +119,10 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
@Test
public void testValidAdds() throws IOException {
- SolrInputDocument validDoc = doc(field("id", 1f, "1"), field("text", 1f, "the quick brown fox"));
+ SolrInputDocument validDoc = doc(field("id", "1"), field("text", "the quick brown fox"));
add("tolerant-chain-max-errors-10", null, validDoc);
- validDoc = doc(field("id", 1f, "2"), field("text", 1f, "the quick brown fox"));
+ validDoc = doc(field("id", "2"), field("text", "the quick brown fox"));
add("tolerant-chain-max-errors-not-set", null, validDoc);
assertU(commit());
@@ -136,7 +136,7 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
@Test
public void testInvalidAdds() throws IOException {
- SolrInputDocument invalidDoc = doc(field("text", 1f, "the quick brown fox")); //no id
+ SolrInputDocument invalidDoc = doc(field("text", "the quick brown fox")); //no id
try {
// This doc should fail without being tolerant
add("not-tolerant", null, invalidDoc);
@@ -148,7 +148,7 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
assertAddsSucceedWithErrors("tolerant-chain-max-errors-10", Arrays.asList(new SolrInputDocument[]{invalidDoc}), null, "(unknown)");
//a valid doc
- SolrInputDocument validDoc = doc(field("id", 1f, "1"), field("text", 1f, "the quick brown fox"));
+ SolrInputDocument validDoc = doc(field("id", "1"), field("text", "the quick brown fox"));
try {
// This batch should fail without being tolerant
@@ -171,8 +171,8 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
assertQ(req("q","id:1")
,"//result[@numFound='1']");
- invalidDoc = doc(field("id", 1f, "2"), field("weight", 1f, "aaa"));
- validDoc = doc(field("id", 1f, "3"), field("weight", 1f, "3"));
+ invalidDoc = doc(field("id", "2"), field("weight", "aaa"));
+ validDoc = doc(field("id", "3"), field("weight", "3"));
try {
// This batch should fail without being tolerant
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java b/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java
index bca2d30..2b10953 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java
@@ -106,9 +106,8 @@ public class UUIDUpdateProcessorFallbackTest extends SolrTestCaseJ4 {
SolrInputField field(String name, float boost, Object... values) {
SolrInputField f = new SolrInputField(name);
for (Object v : values) {
- f.addValue(v, 1.0F);
+ f.addValue(v);
}
- f.setBoost(boost);
return f;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/core/src/test/org/apache/solr/update/processor/UpdateProcessorTestBase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/UpdateProcessorTestBase.java b/solr/core/src/test/org/apache/solr/update/processor/UpdateProcessorTestBase.java
index e069ee2..d3aa979 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/UpdateProcessorTestBase.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/UpdateProcessorTestBase.java
@@ -151,12 +151,11 @@ public class UpdateProcessorTestBase extends SolrTestCaseJ4 {
/**
* Convenience method for building up SolrInputFields
*/
- final SolrInputField field(String name, float boost, Object... values) {
+ final SolrInputField field(String name, Object... values) {
SolrInputField f = new SolrInputField(name);
for (Object v : values) {
- f.addValue(v, 1.0F);
+ f.addValue(v);
}
- f.setBoost(boost);
return f;
}
@@ -164,6 +163,6 @@ public class UpdateProcessorTestBase extends SolrTestCaseJ4 {
* Convenience method for building up SolrInputFields with default boost
*/
final SolrInputField f(String name, Object... values) {
- return field(name, 1.0F, values);
+ return field(name, values);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/solrj/src/java/org/apache/solr/client/solrj/beans/DocumentObjectBinder.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/beans/DocumentObjectBinder.java b/solr/solrj/src/java/org/apache/solr/client/solrj/beans/DocumentObjectBinder.java
index 9550a41..e2e65bb 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/beans/DocumentObjectBinder.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/beans/DocumentObjectBinder.java
@@ -86,13 +86,13 @@ public class DocumentObjectBinder {
Map<String, Object> mapValue = (Map<String, Object>) field.get(obj);
for (Map.Entry<String, Object> e : mapValue.entrySet()) {
- doc.setField(e.getKey(), e.getValue(), 1.0f);
+ doc.setField(e.getKey(), e.getValue());
}
} else {
if (field.child != null) {
addChild(obj, field, doc);
} else {
- doc.setField(field.name, field.get(obj), 1.0f);
+ doc.setField(field.name, field.get(obj));
}
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java
index 47521de..7776a13 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java
@@ -19,12 +19,14 @@ package org.apache.solr.client.solrj.request;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.ModifiableSolrParams;
@@ -32,6 +34,8 @@ import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.DataInputInputStream;
import org.apache.solr.common.util.JavaBinCodec;
import org.apache.solr.common.util.NamedList;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Provides methods for marshalling an UpdateRequest to a NamedList which can be serialized in the javabin format and
@@ -43,6 +47,9 @@ import org.apache.solr.common.util.NamedList;
*/
public class JavaBinUpdateRequestCodec {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final AtomicBoolean WARNED_ABOUT_INDEX_TIME_BOOSTS = new AtomicBoolean();
+
/**
* Converts an UpdateRequest to a NamedList which can be serialized to the given OutputStream in the javabin format
*
@@ -243,11 +250,27 @@ public class JavaBinUpdateRequestCodec {
for (int i = 0; i < namedList.size(); i++) {
NamedList nl = namedList.get(i);
if (i == 0) {
- doc.setDocumentBoost(nl.getVal(0) == null ? 1.0f : (Float) nl.getVal(0));
+ Float boost = (Float) nl.getVal(0);
+ if (boost != null && boost.floatValue() != 1f) {
+ String message = "Ignoring document boost: " + boost + " as index-time boosts are not supported anymore";
+ if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) {
+ log.warn(message);
+ } else {
+ log.debug(message);
+ }
+ }
} else {
+ Float boost = (Float) nl.getVal(2);
+ if (boost != null && boost.floatValue() != 1f) {
+ String message = "Ignoring field boost: " + boost + " as index-time boosts are not supported anymore";
+ if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) {
+ log.warn(message);
+ } else {
+ log.debug(message);
+ }
+ }
doc.addField((String) nl.getVal(0),
- nl.getVal(1),
- nl.getVal(2) == null ? 1.0f : (Float) nl.getVal(2));
+ nl.getVal(1));
}
}
return doc;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java b/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java
index beed40e..c5595b1 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java
@@ -64,10 +64,9 @@ public class ClientUtils
public static void writeXML( SolrInputDocument doc, Writer writer ) throws IOException
{
- writer.write("<doc boost=\""+doc.getDocumentBoost()+"\">");
+ writer.write("<doc>");
for( SolrInputField field : doc ) {
- float boost = field.getBoost();
String name = field.getName();
for( Object v : field ) {
@@ -81,19 +80,14 @@ public class ClientUtils
if (v instanceof Collection) {
Collection values = (Collection) v;
for (Object value : values) {
- writeVal(writer, boost, name, value, update);
- boost = 1.0f;
+ writeVal(writer, name, value, update);
}
} else {
- writeVal(writer, boost, name, v, update);
- boost = 1.0f;
+ writeVal(writer, name, v, update);
}
}
} else {
- writeVal(writer, boost, name, v, update);
- // only write the boost for the first multi-valued field
- // otherwise, the used boost is the product of all the boost values
- boost = 1.0f;
+ writeVal(writer, name, v, update);
}
}
}
@@ -107,7 +101,7 @@ public class ClientUtils
writer.write("</doc>");
}
- private static void writeVal(Writer writer, float boost, String name, Object v, String update) throws IOException {
+ private static void writeVal(Writer writer, String name, Object v, String update) throws IOException {
if (v instanceof Date) {
v = ((Date)v).toInstant().toString();
} else if (v instanceof byte[]) {
@@ -119,20 +113,14 @@ public class ClientUtils
}
if (update == null) {
- if( boost != 1.0f ) {
- XML.writeXML(writer, "field", v.toString(), "name", name, "boost", boost);
- } else if (v != null) {
+ if (v != null) {
XML.writeXML(writer, "field", v.toString(), "name", name );
}
} else {
- if( boost != 1.0f ) {
- XML.writeXML(writer, "field", v.toString(), "name", name, "boost", boost, "update", update);
- } else {
- if (v == null) {
- XML.writeXML(writer, "field", null, "name", name, "update", update, "null", true);
- } else {
- XML.writeXML(writer, "field", v.toString(), "name", name, "update", update);
- }
+ if (v == null) {
+ XML.writeXML(writer, "field", null, "name", name, "update", update, "null", true);
+ } else {
+ XML.writeXML(writer, "field", v.toString(), "name", name, "update", update);
}
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/solrj/src/java/org/apache/solr/common/SolrInputDocument.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/SolrInputDocument.java b/solr/solrj/src/java/org/apache/solr/common/SolrInputDocument.java
index 3d3c060..c8451aa 100644
--- a/solr/solrj/src/java/org/apache/solr/common/SolrInputDocument.java
+++ b/solr/solrj/src/java/org/apache/solr/common/SolrInputDocument.java
@@ -35,7 +35,6 @@ import java.util.Set;
public class SolrInputDocument extends SolrDocumentBase<SolrInputField, SolrInputDocument> implements Iterable<SolrInputField>
{
private final Map<String,SolrInputField> _fields;
- private float _documentBoost = 1.0f;
private List<SolrInputDocument> _childDocuments;
public SolrInputDocument(String... fields) {
@@ -74,11 +73,16 @@ public class SolrInputDocument extends SolrDocumentBase<SolrInputField, SolrInpu
*
* @param name Name of the field, should match one of the field names defined under "fields" tag in schema.xml.
* @param value Value of the field, should be of same class type as defined by "type" attribute of the corresponding field in schema.xml.
- * @see #addField(String, Object, float)
*/
public void addField(String name, Object value)
{
- addField(name, value, 1.0f );
+ SolrInputField field = _fields.get( name );
+ if( field == null || field.value == null ) {
+ setField(name, value);
+ }
+ else {
+ field.addValue( value );
+ }
}
/** Get the first value for a field.
@@ -122,44 +126,14 @@ public class SolrInputDocument extends SolrDocumentBase<SolrInputField, SolrInpu
/** Set a field with implied null value for boost.
*
- * @see #setField(String, Object, float)
* @param name name of the field to set
* @param value value of the field
*/
- public void setField(String name, Object value)
- {
- setField(name, value, 1.0f );
- }
-
- public void setField(String name, Object value, float boost )
+ public void setField(String name, Object value )
{
SolrInputField field = new SolrInputField( name );
_fields.put( name, field );
- field.setValue( value, boost );
- }
-
- /**
- * Adds a field with the given name, value and boost. If a field with the
- * name already exists, then the given value is appended to the value of that
- * field, with the new boost. If the value is a collection, then each of its
- * values will be added to the field.
- *
- * The class type of value and the name parameter should match schema.xml.
- * schema.xml can be found in conf directory under the solr home by default.
- *
- * @param name Name of the field, should match one of the field names defined under "fields" tag in schema.xml.
- * @param value Value of the field, should be of same class type as defined by "type" attribute of the corresponding field in schema.xml.
- * @param boost Boost value for the field
- */
- public void addField(String name, Object value, float boost )
- {
- SolrInputField field = _fields.get( name );
- if( field == null || field.value == null ) {
- setField(name, value, boost);
- }
- else {
- field.addValue( value, boost );
- }
+ field.setValue( value );
}
/**
@@ -187,14 +161,6 @@ public class SolrInputDocument extends SolrDocumentBase<SolrInputField, SolrInpu
return _fields.values().iterator();
}
- public float getDocumentBoost() {
- return _documentBoost;
- }
-
- public void setDocumentBoost(float documentBoost) {
- _documentBoost = documentBoost;
- }
-
@Override
public String toString()
{
@@ -209,7 +175,6 @@ public class SolrInputDocument extends SolrDocumentBase<SolrInputField, SolrInpu
for (Map.Entry<String,SolrInputField> fieldEntry : entries) {
clone._fields.put(fieldEntry.getKey(), fieldEntry.getValue().deepCopy());
}
- clone._documentBoost = _documentBoost;
if (_childDocuments != null) {
clone._childDocuments = new ArrayList<>(_childDocuments.size());
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/solrj/src/java/org/apache/solr/common/SolrInputField.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/SolrInputField.java b/solr/solrj/src/java/org/apache/solr/common/SolrInputField.java
index 261b326..94e98de 100644
--- a/solr/solrj/src/java/org/apache/solr/common/SolrInputField.java
+++ b/solr/solrj/src/java/org/apache/solr/common/SolrInputField.java
@@ -29,7 +29,6 @@ public class SolrInputField implements Iterable<Object>, Serializable
{
String name;
Object value = null;
- float boost = 1.0f;
public SolrInputField( String n )
{
@@ -44,9 +43,7 @@ public class SolrInputField implements Iterable<Object>, Serializable
* a collection is given, then that collection will be used as the backing
* collection for the values.
*/
- public void setValue(Object v, float b) {
- boost = b;
-
+ public void setValue(Object v) {
if( v instanceof Object[] ) {
Object[] arr = (Object[])v;
Collection<Object> c = new ArrayList<>( arr.length );
@@ -65,27 +62,21 @@ public class SolrInputField implements Iterable<Object>, Serializable
* will be added individually.
*/
@SuppressWarnings("unchecked")
- public void addValue(Object v, float b) {
+ public void addValue(Object v) {
if( value == null ) {
if ( v instanceof Collection ) {
Collection<Object> c = new ArrayList<>( 3 );
for ( Object o : (Collection<Object>)v ) {
c.add( o );
}
- setValue(c, b);
+ setValue(c);
} else {
- setValue(v, b);
+ setValue(v);
}
return;
}
- // The lucene API and solr XML field specification make it possible to set boosts
- // on multi-value fields even though lucene indexing does not support this.
- // To keep behavior consistent with what happens in the lucene index, we accumulate
- // the product of all boosts specified for this field.
- boost *= b;
-
Collection<Object> vals = null;
if( value instanceof Collection ) {
vals = (Collection<Object>)value;
@@ -164,14 +155,6 @@ public class SolrInputField implements Iterable<Object>, Serializable
//---------------------------------------------------------------
//---------------------------------------------------------------
-
- public float getBoost() {
- return boost;
- }
-
- public void setBoost(float boost) {
- this.boost = boost;
- }
public String getName() {
return name;
@@ -211,12 +194,11 @@ public class SolrInputField implements Iterable<Object>, Serializable
@Override
public String toString()
{
- return name + ((boost == 1.0) ? "=" : ("("+boost+")=")) + value;
+ return name + "=" + value;
}
public SolrInputField deepCopy() {
SolrInputField clone = new SolrInputField(name);
- clone.boost = boost;
// We can't clone here, so we rely on simple primitives
if (value instanceof Collection) {
Collection<Object> values = (Collection<Object>) value;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java
index 7e0cac6..def3571 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java
@@ -19,6 +19,7 @@ package org.apache.solr.common.util;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import java.lang.invoke.MethodHandles;
import java.nio.ByteBuffer;
import java.nio.file.Path;
import java.util.ArrayList;
@@ -31,6 +32,7 @@ import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.solr.common.EnumFieldValue;
import org.apache.solr.common.IteratorWriter;
@@ -43,6 +45,8 @@ import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.SolrInputField;
import org.noggit.CharArr;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Defines a space-efficient serialization/deserialization format for transferring data.
@@ -61,6 +65,9 @@ import org.noggit.CharArr;
*/
public class JavaBinCodec implements PushWriter {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final AtomicBoolean WARNED_ABOUT_INDEX_TIME_BOOSTS = new AtomicBoolean();
+
public static final byte
NULL = 0,
BOOL_TRUE = 1,
@@ -555,14 +562,28 @@ public class JavaBinCodec implements PushWriter {
public SolrInputDocument readSolrInputDocument(DataInputInputStream dis) throws IOException {
int sz = readVInt(dis);
float docBoost = (Float)readVal(dis);
+ if (docBoost != 1f) {
+ String message = "Ignoring document boost: " + docBoost + " as index-time boosts are not supported anymore";
+ if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) {
+ log.warn(message);
+ } else {
+ log.debug(message);
+ }
+ }
SolrInputDocument sdoc = new SolrInputDocument(new LinkedHashMap<>(sz));
- sdoc.setDocumentBoost(docBoost);
for (int i = 0; i < sz; i++) {
- float boost = 1.0f;
String fieldName;
Object obj = readVal(dis); // could be a boost, a field name, or a child document
if (obj instanceof Float) {
- boost = (Float)obj;
+ float boost = (Float)obj;
+ if (boost != 1f) {
+ String message = "Ignoring field boost: " + boost + " as index-time boosts are not supported anymore";
+ if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) {
+ log.warn(message);
+ } else {
+ log.debug(message);
+ }
+ }
fieldName = (String)readVal(dis);
} else if (obj instanceof SolrInputDocument) {
sdoc.addChildDocument((SolrInputDocument)obj);
@@ -571,7 +592,7 @@ public class JavaBinCodec implements PushWriter {
fieldName = (String)obj;
}
Object fieldVal = readVal(dis);
- sdoc.setField(fieldName, fieldVal, boost);
+ sdoc.setField(fieldName, fieldVal);
}
return sdoc;
}
@@ -580,11 +601,8 @@ public class JavaBinCodec implements PushWriter {
List<SolrInputDocument> children = sdoc.getChildDocuments();
int sz = sdoc.size() + (children==null ? 0 : children.size());
writeTag(SOLRINPUTDOC, sz);
- writeFloat(sdoc.getDocumentBoost());
+ writeFloat(1f); // document boost
for (SolrInputField inputField : sdoc.values()) {
- if (inputField.getBoost() != 1.0f) {
- writeFloat(inputField.getBoost());
- }
writeExternString(inputField.getName());
writeVal(inputField.getValue());
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
index d25280d..b1e7285 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
@@ -100,8 +100,8 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
// Now add something...
SolrInputDocument doc = new SolrInputDocument();
String docID = "1112211111";
- doc.addField( "id", docID, 1.0f );
- doc.addField( "name", "my name!", 1.0f );
+ doc.addField( "id", docID );
+ doc.addField( "name", "my name!" );
Assert.assertEquals( null, doc.getField("foo") );
Assert.assertTrue(doc.getField("name").getValue() != null );
@@ -127,28 +127,28 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
// Now add a few docs for facet testing...
List<SolrInputDocument> docs = new ArrayList<>();
SolrInputDocument doc2 = new SolrInputDocument();
- doc2.addField( "id", "2", 1.0f );
- doc2.addField( "inStock", true, 1.0f );
- doc2.addField( "price", 2, 1.0f );
- doc2.addField( "timestamp_dt", new java.util.Date(), 1.0f );
+ doc2.addField( "id", "2" );
+ doc2.addField( "inStock", true );
+ doc2.addField( "price", 2 );
+ doc2.addField( "timestamp_dt", new java.util.Date() );
docs.add(doc2);
SolrInputDocument doc3 = new SolrInputDocument();
- doc3.addField( "id", "3", 1.0f );
- doc3.addField( "inStock", false, 1.0f );
- doc3.addField( "price", 3, 1.0f );
- doc3.addField( "timestamp_dt", new java.util.Date(), 1.0f );
+ doc3.addField( "id", "3" );
+ doc3.addField( "inStock", false );
+ doc3.addField( "price", 3 );
+ doc3.addField( "timestamp_dt", new java.util.Date() );
docs.add(doc3);
SolrInputDocument doc4 = new SolrInputDocument();
- doc4.addField( "id", "4", 1.0f );
- doc4.addField( "inStock", true, 1.0f );
- doc4.addField( "price", 4, 1.0f );
- doc4.addField( "timestamp_dt", new java.util.Date(), 1.0f );
+ doc4.addField( "id", "4" );
+ doc4.addField( "inStock", true );
+ doc4.addField( "price", 4 );
+ doc4.addField( "timestamp_dt", new java.util.Date() );
docs.add(doc4);
SolrInputDocument doc5 = new SolrInputDocument();
- doc5.addField( "id", "5", 1.0f );
- doc5.addField( "inStock", false, 1.0f );
- doc5.addField( "price", 5, 1.0f );
- doc5.addField( "timestamp_dt", new java.util.Date(), 1.0f );
+ doc5.addField( "id", "5" );
+ doc5.addField( "inStock", false );
+ doc5.addField( "price", 5 );
+ doc5.addField( "timestamp_dt", new java.util.Date() );
docs.add(doc5);
upres = client.add( docs );
@@ -247,13 +247,13 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
// Now add something...
SolrInputDocument doc1 = new SolrInputDocument();
- doc1.addField( "id", "id1", 1.0f );
- doc1.addField( "name", "doc1", 1.0f );
+ doc1.addField( "id", "id1" );
+ doc1.addField( "name", "doc1" );
doc1.addField( "price", 10 );
SolrInputDocument doc2 = new SolrInputDocument();
- doc2.addField( "id", "id2", 1.0f );
- doc2.addField( "name", "h\uD866\uDF05llo", 1.0f );
+ doc2.addField( "id", "id2" );
+ doc2.addField( "name", "h\uD866\uDF05llo" );
doc2.addField( "price", 20 );
Collection<SolrInputDocument> docs = new ArrayList<>();
@@ -302,13 +302,13 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
// Add two docs
SolrInputDocument doc = new SolrInputDocument();
- doc.addField( "id", "id1", 1.0f );
- doc.addField( "name", "doc1", 1.0f );
+ doc.addField( "id", "id1" );
+ doc.addField( "name", "doc1" );
doc.addField( "price", 10 );
client.add(doc);
doc = new SolrInputDocument();
- doc.addField( "id", "id2", 1.0f );
+ doc.addField( "id", "id2" );
client.add(doc);
client.commit();
@@ -443,9 +443,9 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
}
SolrInputDocument doc = new SolrInputDocument();
- doc.addField("id", "DOCID", 1.0f);
- doc.addField("id", "DOCID2", 1.0f);
- doc.addField("name", "hello", 1.0f);
+ doc.addField("id", "DOCID");
+ doc.addField("id", "DOCID2");
+ doc.addField("name", "hello");
if (client instanceof HttpSolrClient) {
try {
@@ -484,15 +484,15 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
// Now add something...
SolrInputDocument doc = new SolrInputDocument();
- doc.addField( "id", "111", 1.0f );
- doc.addField( "name", "doc1", 1.0f );
+ doc.addField( "id", "111" );
+ doc.addField( "name", "doc1" );
doc.addField( "price", 11 );
client.add(doc);
client.commit(); // make sure this gets in first
doc = new SolrInputDocument();
- doc.addField( "id", "222", 1.0f );
- doc.addField( "name", "doc2", 1.0f );
+ doc.addField( "id", "222" );
+ doc.addField( "name", "doc2" );
doc.addField( "price", 22 );
client.add(doc);
client.commit();
@@ -539,8 +539,8 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
// Now add something...
SolrInputDocument doc = new SolrInputDocument();
- doc.addField( "id", "111", 1.0f );
- doc.addField( "name", "doc1", 1.0f );
+ doc.addField( "id", "111" );
+ doc.addField( "name", "doc1" );
doc.addField( "json_s", rawJson );
doc.addField( "xml_s", rawXml );
client.add(doc);
@@ -689,7 +689,7 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
SolrInputDocument[] doc = new SolrInputDocument[5];
for( int i=0; i<doc.length; i++ ) {
doc[i] = new SolrInputDocument();
- doc[i].setField( "id", "ID"+i, 1.0f );
+ doc[i].setField( "id", "ID"+i );
client.add(doc[i]);
}
client.commit();
@@ -844,7 +844,7 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
ArrayList<SolrInputDocument> docs = new ArrayList<>(10);
for( int i=1; i<=10; i++ ) {
SolrInputDocument doc = new SolrInputDocument();
- doc.setField( "id", i+"", 1.0f );
+ doc.setField( "id", i+"" );
if( (i%2)==0 ) {
doc.addField( "features", "two" );
}
@@ -1576,8 +1576,8 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
// Now add something...
SolrInputDocument doc = new SolrInputDocument();
- doc.addField( "id", "DOCID", 1.0f );
- doc.addField( "name", "hello", 1.0f );
+ doc.addField( "id", "DOCID" );
+ doc.addField( "name", "hello" );
client.add(doc);
client.commit(); // Since the transaction log is disabled in the example, we need to commit
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java
index dc964cb..cda9961 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java
@@ -50,8 +50,8 @@ abstract public class SolrExampleTestsBase extends SolrJettyTestBase {
// Now try a timed commit...
SolrInputDocument doc3 = new SolrInputDocument();
- doc3.addField("id", "id3", 1.0f);
- doc3.addField("name", "doc3", 1.0f);
+ doc3.addField("id", "id3");
+ doc3.addField("name", "doc3");
doc3.addField("price", 10);
UpdateRequest up = new UpdateRequest();
up.add(doc3);
@@ -87,8 +87,8 @@ abstract public class SolrExampleTestsBase extends SolrJettyTestBase {
// Now test the new convenience parameter on the add() for commitWithin
SolrInputDocument doc4 = new SolrInputDocument();
- doc4.addField("id", "id4", 1.0f);
- doc4.addField("name", "doc4", 1.0f);
+ doc4.addField("id", "id4");
+ doc4.addField("name", "doc4");
doc4.addField("price", 10);
client.add(doc4, 500);
@@ -125,8 +125,8 @@ abstract public class SolrExampleTestsBase extends SolrJettyTestBase {
// Now add one document...
SolrInputDocument doc3 = new SolrInputDocument();
- doc3.addField("id", "id3", 1.0f);
- doc3.addField("name", "doc3", 1.0f);
+ doc3.addField("id", "id3");
+ doc3.addField("name", "doc3");
doc3.addField("price", 10);
client.add(doc3);
client.commit();
@@ -169,7 +169,7 @@ abstract public class SolrExampleTestsBase extends SolrJettyTestBase {
SolrInputDocument[] doc = new SolrInputDocument[3];
for (int i = 0; i < 3; i++) {
doc[i] = new SolrInputDocument();
- doc[i].setField("id", i + " & 222", 1.0f);
+ doc[i].setField("id", i + " & 222");
}
String id = (String) doc[0].getField("id").getFirstValue();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java
index 53ad2eb..df7beea 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java
@@ -53,20 +53,19 @@ public class TestUpdateRequestCodec extends LuceneTestCase {
updateRequest.setParam("a", "b");
SolrInputDocument doc = new SolrInputDocument();
doc.addField("id", 1);
- doc.addField("desc", "one", 2.0f);
+ doc.addField("desc", "one");
doc.addField("desc", "1");
updateRequest.add(doc);
doc = new SolrInputDocument();
doc.addField("id", 2);
- doc.setDocumentBoost(10.0f);
- doc.addField("desc", "two", 3.0f);
+ doc.addField("desc", "two");
doc.addField("desc", "2");
updateRequest.add(doc);
doc = new SolrInputDocument();
doc.addField("id", 3);
- doc.addField("desc", "three", 3.0f);
+ doc.addField("desc", "three");
doc.addField("desc", "3");
updateRequest.add(doc);
@@ -121,7 +120,7 @@ public class TestUpdateRequestCodec extends LuceneTestCase {
SolrInputDocument doc = new SolrInputDocument();
doc.addField("id", 1);
- doc.addField("desc", "one", 2.0f);
+ doc.addField("desc", "one");
// imagine someone adding a custom Bean that implements Iterable
// but is not a Collection
doc.addField("iter", new Iterable<String>() {
@@ -171,20 +170,19 @@ public class TestUpdateRequestCodec extends LuceneTestCase {
updateRequest.setParam("a", "b");
SolrInputDocument doc = new SolrInputDocument();
doc.addField("id", 1);
- doc.addField("desc", "one", 2.0f);
+ doc.addField("desc", "one");
doc.addField("desc", "1");
updateRequest.add(doc);
doc = new SolrInputDocument();
doc.addField("id", 2);
- doc.setDocumentBoost(10.0f);
- doc.addField("desc", "two", 3.0f);
+ doc.addField("desc", "two");
doc.addField("desc", "2");
updateRequest.add(doc);
doc = new SolrInputDocument();
doc.addField("id", 3);
- doc.addField("desc", "three", 3.0f);
+ doc.addField("desc", "three");
doc.addField("desc", "3");
updateRequest.add(doc);
@@ -234,14 +232,10 @@ public class TestUpdateRequestCodec extends LuceneTestCase {
private void compareDocs(String m,
SolrInputDocument expectedDoc,
SolrInputDocument actualDoc) {
- Assert.assertEquals(expectedDoc.getDocumentBoost(),
- actualDoc.getDocumentBoost());
for (String s : expectedDoc.getFieldNames()) {
SolrInputField expectedField = expectedDoc.getField(s);
SolrInputField actualField = actualDoc.getField(s);
- Assert.assertEquals(m + ": diff boosts for field: " + s,
- expectedField.getBoost(), actualField.getBoost());
Object expectedVal = expectedField.getValue();
Object actualVal = actualField.getValue();
if (expectedVal instanceof Set &&
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/solrj/src/test/org/apache/solr/common/SolrDocumentTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/common/SolrDocumentTest.java b/solr/solrj/src/test/org/apache/solr/common/SolrDocumentTest.java
index 6b3318d..0234ce0 100644
--- a/solr/solrj/src/test/org/apache/solr/common/SolrDocumentTest.java
+++ b/solr/solrj/src/test/org/apache/solr/common/SolrDocumentTest.java
@@ -191,9 +191,9 @@ public class SolrDocumentTest extends LuceneTestCase
// Set up a simple document
SolrInputDocument doc = new SolrInputDocument();
for( int i=0; i<5; i++ ) {
- doc.addField( "f", fval0, 1.0f );
- doc.addField( "f", fval1, 1.0f );
- doc.addField( "f", fval2, 1.0f );
+ doc.addField( "f", fval0 );
+ doc.addField( "f", fval1 );
+ doc.addField( "f", fval2 );
}
assertEquals( (3*5), doc.getField("f").getValueCount() );
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ed2b764/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
index e5bd384..c3c269c 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
@@ -2161,9 +2161,6 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
SolrInputDocument sdoc1 = (SolrInputDocument) expected;
SolrInputDocument sdoc2 = (SolrInputDocument) actual;
- if (Float.compare(sdoc1.getDocumentBoost(), sdoc2.getDocumentBoost()) != 0) {
- return false;
- }
if(sdoc1.getFieldNames().size() != sdoc2.getFieldNames().size()) {
return false;
@@ -2222,10 +2219,6 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
return false;
}
- if (Float.compare(sif1.getBoost(), sif2.getBoost()) != 0) {
- return false;
- }
-
return true;
}
[10/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10156: Update
CHANGES.txt
Posted by is...@apache.org.
SOLR-10156: Update CHANGES.txt
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/efbabf8e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/efbabf8e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/efbabf8e
Branch: refs/heads/jira/solr-6736
Commit: efbabf8e2cf770c3dd19fb3faf2e34b8885381ea
Parents: 4990eed
Author: Joel Bernstein <jb...@apache.org>
Authored: Fri Mar 3 15:07:37 2017 -0500
Committer: Joel Bernstein <jb...@apache.org>
Committed: Fri Mar 3 15:07:37 2017 -0500
----------------------------------------------------------------------
solr/CHANGES.txt | 2 ++
1 file changed, 2 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/efbabf8e/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 2659155..7ea6226 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -145,6 +145,8 @@ New Features
* SOLR-10153 & SOLR-10152: The Unified and Postings based highlighters: Add hl.bs.type=SEPARATOR along with new param
hl.bs.separator to break passages by a provided single character. (Amrit Sarkar, David Smiley)
+* SOLR-10156: Add significantTerms Streaming Expression (Joel Bernstein)
+
Bug Fixes
----------------------
[12/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-8593: Update
CHANGES.txt
Posted by is...@apache.org.
SOLR-8593: Update CHANGES.txt
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/5ae51d4d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/5ae51d4d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/5ae51d4d
Branch: refs/heads/jira/solr-6736
Commit: 5ae51d4ddf5e1a27b8f1741910a32697f952f482
Parents: 7453f78
Author: Joel Bernstein <jb...@apache.org>
Authored: Fri Mar 3 20:59:36 2017 -0500
Committer: Joel Bernstein <jb...@apache.org>
Committed: Fri Mar 3 20:59:36 2017 -0500
----------------------------------------------------------------------
solr/CHANGES.txt | 2 ++
1 file changed, 2 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5ae51d4d/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 7ea6226..2ffcc46 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -147,6 +147,8 @@ New Features
* SOLR-10156: Add significantTerms Streaming Expression (Joel Bernstein)
+* SOLR-8593: Integrate Apache Calcite into the SQLHandler (Kevin Risden, Cao Manh Dat, Joel Bernstein)
+
Bug Fixes
----------------------
[36/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10244:
TestCoreDiscovery fails if you run it as root.
Posted by is...@apache.org.
SOLR-10244: TestCoreDiscovery fails if you run it as root.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/6a6e3032
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/6a6e3032
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/6a6e3032
Branch: refs/heads/jira/solr-6736
Commit: 6a6e30329843a86de1063a2c8f324eb3f9dbfd91
Parents: 8a54929
Author: Mark Miller <ma...@gmail.com>
Authored: Wed Mar 8 10:23:21 2017 -0500
Committer: Mark Miller <ma...@gmail.com>
Committed: Wed Mar 8 10:23:46 2017 -0500
----------------------------------------------------------------------
solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6a6e3032/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
index fa07de8..65d459a 100644
--- a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
+++ b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
@@ -337,6 +337,7 @@ public class TestCoreDiscovery extends SolrTestCaseJ4 {
File toSet = new File(coreDir, "core1");
assumeTrue("Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false));
+ assumeFalse("Appears we are a super user, skip test", toSet.canRead());
CoreContainer cc = init();
try (SolrCore core1 = cc.getCore("core1");
SolrCore core2 = cc.getCore("core2")) {
@@ -362,6 +363,7 @@ public class TestCoreDiscovery extends SolrTestCaseJ4 {
File toSet = new File(solrHomeDirectory, "cantReadDir");
assertTrue("Should have been able to make directory '" + toSet.getAbsolutePath() + "' ", toSet.mkdirs());
assumeTrue("Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false));
+ assumeFalse("Appears we are a super user, skip test", toSet.canRead());
CoreContainer cc = init();
try (SolrCore core1 = cc.getCore("core1");
SolrCore core2 = cc.getCore("core2")) {
@@ -421,7 +423,7 @@ public class TestCoreDiscovery extends SolrTestCaseJ4 {
new File(homeDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME));
assumeTrue("Cannot make " + homeDir + " non-readable. Test aborted.", homeDir.setReadable(false, false));
-
+ assumeFalse("Appears we are a super user, skip test", homeDir.canRead());
CoreContainer cc = null;
try {
cc = init();
[37/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10248: Merge
SolrTestCaseJ4's SolrIndexSearcher tracking into the ObjectReleaseTracker.
Posted by is...@apache.org.
SOLR-10248: Merge SolrTestCaseJ4's SolrIndexSearcher tracking into the ObjectReleaseTracker.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e35881a6
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e35881a6
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e35881a6
Branch: refs/heads/jira/solr-6736
Commit: e35881a63aa9de72cf5c539396266e0d0e676956
Parents: 6a6e303
Author: Mark Miller <ma...@apache.org>
Authored: Wed Mar 8 11:30:08 2017 -0500
Committer: Mark Miller <ma...@apache.org>
Committed: Wed Mar 8 11:44:23 2017 -0500
----------------------------------------------------------------------
.../apache/solr/search/SolrIndexSearcher.java | 3 +
.../java/org/apache/solr/SolrTestCaseJ4.java | 71 +++++---------------
2 files changed, 19 insertions(+), 55 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e35881a6/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 521324a..a7ee433 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -105,6 +105,7 @@ import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.ObjectReleaseTracker;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.DirectoryFactory;
import org.apache.solr.core.DirectoryFactory.DirContext;
@@ -391,6 +392,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
readerStats = snapStatistics(reader);
// do this at the end since an exception in the constructor means we won't close
numOpens.incrementAndGet();
+ assert ObjectReleaseTracker.track(this);
}
/*
@@ -539,6 +541,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
// do this at the end so it only gets done if there are no exceptions
numCloses.incrementAndGet();
+ assert ObjectReleaseTracker.release(this);
}
/** Direct access to the IndexSchema for use with this searcher */
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e35881a6/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
index a8c93d6..825e7c7 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
@@ -297,17 +297,10 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
if (suiteFailureMarker.wasSuccessful()) {
// if the tests passed, make sure everything was closed / released
if (!RandomizedContext.current().getTargetClass().isAnnotationPresent(SuppressObjectReleaseTracker.class)) {
- endTrackingSearchers(120, false);
- String orr = clearObjectTrackerAndCheckEmpty(120);
+ String orr = clearObjectTrackerAndCheckEmpty(20, false);
assertNull(orr, orr);
} else {
- endTrackingSearchers(15, false);
- String orr = ObjectReleaseTracker.checkEmpty();
- if (orr != null) {
- log.warn(
- "Some resources were not closed, shutdown, or released. This has been ignored due to the SuppressObjectReleaseTracker annotation, trying to close them now.");
- ObjectReleaseTracker.tryClose();
- }
+ clearObjectTrackerAndCheckEmpty(20, true);
}
}
resetFactory();
@@ -341,6 +334,13 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
* @return null if ok else error message
*/
public static String clearObjectTrackerAndCheckEmpty(int waitSeconds) {
+ return clearObjectTrackerAndCheckEmpty(waitSeconds, false);
+ }
+
+ /**
+ * @return null if ok else error message
+ */
+ public static String clearObjectTrackerAndCheckEmpty(int waitSeconds, boolean tryClose) {
int retries = 0;
String result;
do {
@@ -367,6 +367,13 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
log.info("------------------------------------------------------- Done waiting for tracked resources to be released");
+
+ if (tryClose && result != null && RandomizedContext.current().getTargetClass().isAnnotationPresent(SuppressObjectReleaseTracker.class)) {
+ log.warn(
+ "Some resources were not closed, shutdown, or released. This has been ignored due to the SuppressObjectReleaseTracker annotation, trying to close them now.");
+ ObjectReleaseTracker.tryClose();
+ }
+
ObjectReleaseTracker.clear();
return result;
@@ -580,52 +587,6 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
numOpens = numCloses = 0;
}
}
-
- public static void endTrackingSearchers(int waitSeconds, boolean failTest) {
- long endNumOpens = SolrIndexSearcher.numOpens.get();
- long endNumCloses = SolrIndexSearcher.numCloses.get();
-
- // wait a bit in case any ending threads have anything to release
- int retries = 0;
- while (endNumOpens - numOpens != endNumCloses - numCloses) {
- if (retries++ > waitSeconds) {
- break;
- }
- if (retries % 10 == 0) {
- log.info("Waiting for all SolrIndexSearchers to be released at end of test");
- if (retries > 10) {
- TraceFormatting tf = new TraceFormatting();
- Map<Thread,StackTraceElement[]> stacksMap = Thread.getAllStackTraces();
- Set<Entry<Thread,StackTraceElement[]>> entries = stacksMap.entrySet();
- for (Entry<Thread,StackTraceElement[]> entry : entries) {
- String stack = tf.formatStackTrace(entry.getValue());
- System.err.println(entry.getKey().getName() + ":\n" + stack);
- }
- }
- }
- try {
- Thread.sleep(1000);
- } catch (InterruptedException e) {}
- endNumOpens = SolrIndexSearcher.numOpens.get();
- endNumCloses = SolrIndexSearcher.numCloses.get();
- }
-
- log.info("------------------------------------------------------- Done waiting for all SolrIndexSearchers to be released");
-
- SolrIndexSearcher.numOpens.getAndSet(0);
- SolrIndexSearcher.numCloses.getAndSet(0);
-
- if (endNumOpens-numOpens != endNumCloses-numCloses) {
- String msg = "ERROR: SolrIndexSearcher opens=" + (endNumOpens-numOpens) + " closes=" + (endNumCloses-numCloses);
- log.error(msg);
- // if it's TestReplicationHandler, ignore it. the test is broken and gets no love
- if ("TestReplicationHandler".equals(RandomizedContext.current().getTargetClass().getSimpleName())) {
- log.warn("TestReplicationHandler wants to fail!: " + msg);
- } else {
- if (failTest) fail(msg);
- }
- }
- }
/** Causes an exception matching the regex pattern to not be logged. */
public static void ignoreException(String pattern) {
[19/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10230: default TTL
of PKIAuthenticationPlugin increased to 10secs
Posted by is...@apache.org.
SOLR-10230: default TTL of PKIAuthenticationPlugin increased to 10secs
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/fb1d2d1a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/fb1d2d1a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/fb1d2d1a
Branch: refs/heads/jira/solr-6736
Commit: fb1d2d1ae21f04be355637cfc3b096b090583378
Parents: f49bd79
Author: Noble Paul <no...@apache.org>
Authored: Mon Mar 6 10:09:01 2017 +1030
Committer: Noble Paul <no...@apache.org>
Committed: Mon Mar 6 10:09:01 2017 +1030
----------------------------------------------------------------------
solr/CHANGES.txt | 4 +++-
.../java/org/apache/solr/security/PKIAuthenticationPlugin.java | 2 +-
2 files changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fb1d2d1a/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 5b7eac5..0b05749 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -285,7 +285,9 @@ Other Changes
* SOLR-10155: For numeric types facet.contains= and facet.prefix= are now rejected.
(Gus Heck, Christine Poerschke)
-* SOLR-10171 Add Constant Reduction Rules to Calcite Planner (Kevin Risden)
+* SOLR-10171: Add Constant Reduction Rules to Calcite Planner (Kevin Risden)
+
+* SOLR-10230: default TTL of PKIAuthenticationPlugin increased to 10secs (noble)
================== 6.4.2 ==================
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fb1d2d1a/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java b/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java
index fdd4408..fcc0560 100644
--- a/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java
@@ -63,7 +63,7 @@ public class PKIAuthenticationPlugin extends AuthenticationPlugin implements Htt
private final Map<String, PublicKey> keyCache = new ConcurrentHashMap<>();
private final CryptoKeys.RSAKeyPair keyPair = new CryptoKeys.RSAKeyPair();
private final CoreContainer cores;
- private final int MAX_VALIDITY = Integer.parseInt(System.getProperty("pkiauth.ttl", "5000"));
+ private final int MAX_VALIDITY = Integer.parseInt(System.getProperty("pkiauth.ttl", "10000"));
private final String myNodeName;
private final HttpHeaderClientInterceptor interceptor = new HttpHeaderClientInterceptor();
private boolean interceptorRegistered = false;
[43/50] [abbrv] lucene-solr:jira/solr-6736: LUCENE-7734: FieldType
copy constructor widened to IndexableFieldType
Posted by is...@apache.org.
LUCENE-7734: FieldType copy constructor widened to IndexableFieldType
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d2bf30d5
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d2bf30d5
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d2bf30d5
Branch: refs/heads/jira/solr-6736
Commit: d2bf30d58fbfc9279bed663500400153b4d4df44
Parents: d945a24
Author: David Smiley <ds...@apache.org>
Authored: Thu Mar 9 23:12:45 2017 -0500
Committer: David Smiley <ds...@apache.org>
Committed: Thu Mar 9 23:12:45 2017 -0500
----------------------------------------------------------------------
lucene/CHANGES.txt | 3 +++
lucene/core/src/java/org/apache/lucene/document/FieldType.java | 6 +++---
2 files changed, 6 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d2bf30d5/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index a8f7ee4..4040945 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -117,6 +117,9 @@ API Changes
instead of once all shard responses are present. (Simon Willnauer,
Mike McCandless)
+* LUCENE-7734: FieldType's copy constructor was widened to accept any IndexableFieldType.
+ (David Smiley)
+
New Features
* LUCENE-7449: Add CROSSES relation support to RangeFieldQuery. (Nick Knize)
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d2bf30d5/lucene/core/src/java/org/apache/lucene/document/FieldType.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/FieldType.java b/lucene/core/src/java/org/apache/lucene/document/FieldType.java
index 6f206a4..3c7d276 100644
--- a/lucene/core/src/java/org/apache/lucene/document/FieldType.java
+++ b/lucene/core/src/java/org/apache/lucene/document/FieldType.java
@@ -44,7 +44,7 @@ public class FieldType implements IndexableFieldType {
/**
* Create a new mutable FieldType with all of the properties from <code>ref</code>
*/
- public FieldType(FieldType ref) {
+ public FieldType(IndexableFieldType ref) {
this.stored = ref.stored();
this.tokenized = ref.tokenized();
this.storeTermVectors = ref.storeTermVectors();
@@ -54,8 +54,8 @@ public class FieldType implements IndexableFieldType {
this.omitNorms = ref.omitNorms();
this.indexOptions = ref.indexOptions();
this.docValuesType = ref.docValuesType();
- this.dimensionCount = ref.dimensionCount;
- this.dimensionNumBytes = ref.dimensionNumBytes;
+ this.dimensionCount = ref.pointDimensionCount();
+ this.dimensionNumBytes = ref.pointNumBytes();
// Do not copy frozen!
}
[41/50] [abbrv] lucene-solr:jira/solr-6736: added a test
Posted by is...@apache.org.
added a test
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c680f45f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c680f45f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c680f45f
Branch: refs/heads/jira/solr-6736
Commit: c680f45f2d8ca126e2783c371e07b46bd16234c6
Parents: c85aac2
Author: Noble Paul <no...@apache.org>
Authored: Thu Mar 9 14:41:42 2017 +1030
Committer: Noble Paul <no...@apache.org>
Committed: Thu Mar 9 14:41:42 2017 +1030
----------------------------------------------------------------------
.../TestRuleBasedAuthorizationPlugin.java | 23 ++++++++++++++++++++
1 file changed, 23 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c680f45f/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java b/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java
index 03656c5..4cdc555 100644
--- a/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java
+++ b/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java
@@ -305,6 +305,21 @@ public class TestRuleBasedAuthorizationPlugin extends SolrTestCaseJ4 {
"handler", new DumpRequestHandler(),
"params", new MapSolrParams(singletonMap("key", "VAL2")))
, FORBIDDEN);
+
+ checkRules(makeMap("resource", "/update",
+ "userPrincipal", "solr",
+ "requestType", RequestType.UNKNOWN,
+ "collectionRequests", "go",
+ "handler", new UpdateRequestHandler(),
+ "params", new MapSolrParams(singletonMap("key", "VAL2")))
+ , FORBIDDEN, (Map<String, Object>) Utils.fromJSONString( "{user-role:{" +
+ " admin:[admin_role]," +
+ " update:[update_role]," +
+ " solr:[read_role]}," +
+ " permissions:[" +
+ " {name:update, role:[admin_role,update_role]}," +
+ " {name:read, role:[admin_role,update_role,read_role]}" +
+ "]}"));
}
public void testEditRules() throws IOException {
@@ -438,5 +453,13 @@ public class TestRuleBasedAuthorizationPlugin extends SolrTestCaseJ4 {
}
}
+static String testPerms = "{user-role:{" +
+ " admin:[admin_role]," +
+ " update:[update_role]," +
+ " solr:[read_role]}," +
+ " permissions:[" +
+ " {name:update,role:[admin_role,update_role]}," +
+ " {name:read,role:[admin_role,update_role,read_role]" +
+ "]}";
}
[08/50] [abbrv] lucene-solr:jira/solr-6736: LUCENE-7410: Fix test bug.
Posted by is...@apache.org.
LUCENE-7410: Fix test bug.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/fbc844d3
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/fbc844d3
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/fbc844d3
Branch: refs/heads/jira/solr-6736
Commit: fbc844d33439efc1c5c6fee5547715d1a1b0db83
Parents: d947f53
Author: Adrien Grand <jp...@gmail.com>
Authored: Fri Mar 3 17:39:48 2017 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Fri Mar 3 17:41:04 2017 +0100
----------------------------------------------------------------------
.../org/apache/solr/index/TestSlowCompositeReaderWrapper.java | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fbc844d3/solr/core/src/test/org/apache/solr/index/TestSlowCompositeReaderWrapper.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/index/TestSlowCompositeReaderWrapper.java b/solr/core/src/test/org/apache/solr/index/TestSlowCompositeReaderWrapper.java
index 195aae5..9907d59 100644
--- a/solr/core/src/test/org/apache/solr/index/TestSlowCompositeReaderWrapper.java
+++ b/solr/core/src/test/org/apache/solr/index/TestSlowCompositeReaderWrapper.java
@@ -30,6 +30,7 @@ import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.MultiDocValues.MultiSortedDocValues;
import org.apache.lucene.index.MultiDocValues.MultiSortedSetDocValues;
+import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
@@ -95,7 +96,7 @@ public class TestSlowCompositeReaderWrapper extends LuceneTestCase {
public void testOrdMapsAreCached() throws Exception {
Directory dir = newDirectory();
- RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE));
Document doc = new Document();
doc.add(new SortedDocValuesField("sorted", new BytesRef("a")));
doc.add(new SortedSetDocValuesField("sorted_set", new BytesRef("b")));
[21/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-8593: in
TestSQLHandler assume not run with Turkish locale
Posted by is...@apache.org.
SOLR-8593: in TestSQLHandler assume not run with Turkish locale
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/6df17c8c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/6df17c8c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/6df17c8c
Branch: refs/heads/jira/solr-6736
Commit: 6df17c8cfe72d229140fb644d067a50cd7a2b455
Parents: acb185b
Author: Joel Bernstein <jb...@apache.org>
Authored: Mon Mar 6 12:20:28 2017 -0500
Committer: Joel Bernstein <jb...@apache.org>
Committed: Mon Mar 6 12:20:28 2017 -0500
----------------------------------------------------------------------
solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java | 4 ++++
1 file changed, 4 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6df17c8c/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
index d724fbd..f222cee 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
@@ -19,6 +19,7 @@ package org.apache.solr.handler;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.Locale;
import org.apache.solr.client.solrj.io.Tuple;
import org.apache.solr.client.solrj.io.stream.ExceptionStream;
@@ -69,6 +70,9 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
@Test
public void doTest() throws Exception {
+
+ assumeFalse("This test fails on UNIX with Turkish default locale", new Locale("tr").getLanguage().equals(Locale.getDefault().getLanguage()));
+
waitForRecoveriesToFinish(false);
testBasicSelect();
[45/50] [abbrv] lucene-solr:jira/solr-6736: LUCENE-7734: move to 7x
section; won't do 6x backport
Posted by is...@apache.org.
LUCENE-7734: move to 7x section; won't do 6x backport
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/6415d912
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/6415d912
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/6415d912
Branch: refs/heads/jira/solr-6736
Commit: 6415d912ca370c47ac9bd138d719b0ade71893a1
Parents: 9540bc3
Author: David Smiley <ds...@apache.org>
Authored: Fri Mar 10 21:35:53 2017 -0500
Committer: David Smiley <ds...@apache.org>
Committed: Fri Mar 10 21:35:53 2017 -0500
----------------------------------------------------------------------
lucene/CHANGES.txt | 3 +++
1 file changed, 3 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6415d912/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index b6ee4b8..9407dfa 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -41,6 +41,9 @@ API Changes
index-time scoring factors should be indexed into a doc value field and
combined at query time using eg. FunctionScoreQuery. (Adrien Grand)
+* LUCENE-7734: FieldType's copy constructor was widened to accept any IndexableFieldType.
+ (David Smiley)
+
Bug Fixes
* LUCENE-7626: IndexWriter will no longer accept broken token offsets
[50/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-6736: Adding config
sets with trusted=false, disallow XSLT request handler
Posted by is...@apache.org.
SOLR-6736: Adding config sets with trusted=false, disallow XSLT request handler
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/f664f1f3
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/f664f1f3
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/f664f1f3
Branch: refs/heads/jira/solr-6736
Commit: f664f1f39bf5c93e20cd19ae83a2538100d21942
Parents: 405da79
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Sun Mar 12 05:47:24 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Sun Mar 12 05:47:24 2017 +0530
----------------------------------------------------------------------
.../apache/solr/cloud/CreateCollectionCmd.java | 2 +-
.../apache/solr/cloud/ZkSolrResourceLoader.java | 3 +
.../java/org/apache/solr/core/ConfigSet.java | 12 ++-
.../org/apache/solr/core/ConfigSetService.java | 25 +++++-
.../org/apache/solr/core/CoreContainer.java | 3 +-
.../org/apache/solr/core/CoreDescriptor.java | 17 ++++
.../org/apache/solr/core/RequestHandlers.java | 12 ++-
.../src/java/org/apache/solr/core/SolrCore.java | 10 ++-
.../solr/handler/admin/ConfigSetsHandler.java | 10 +--
.../apache/solr/handler/loader/XMLLoader.java | 7 ++
.../solr/configsets/upload/managed-schema | 25 ++++++
.../solr/configsets/upload/newzkconf.zip | Bin 1994 -> 3186 bytes
.../solr/configsets/upload/schema-minimal.xml | 25 ------
.../configsets/upload/solrconfig-minimal.xml | 59 -------------
.../solr/configsets/upload/solrconfig.xml | 61 +++++++++++++
.../upload/xslt/xsl-update-handler-test.xsl | 49 +++++++++++
.../apache/solr/cloud/TestConfigSetsAPI.java | 88 ++++++++++++++++---
.../org/apache/solr/core/TestCodecSupport.java | 2 +-
.../apache/solr/cloud/MiniSolrCloudCluster.java | 10 +++
19 files changed, 302 insertions(+), 118 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java
index a1bb70e..f426658 100644
--- a/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java
@@ -246,7 +246,7 @@ public class CreateCollectionCmd implements Cmd {
ShardRequest sreq = new ShardRequest();
sreq.nodeName = nodeName;
params.set("qt", ocmh.adminPath);
- sreq.purpose = 1;
+ sreq.purpose = ShardRequest.PURPOSE_PRIVATE;
sreq.shards = new String[]{baseUrl};
sreq.actualShards = sreq.shards;
sreq.params = params;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java b/solr/core/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java
index b4137b3..c2b6bbb 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java
@@ -82,6 +82,9 @@ public class ZkSolrResourceLoader extends SolrResourceLoader {
public InputStream openResource(String resource) throws IOException {
InputStream is;
String file = configSetZkPath + "/" + resource;
+ if (file.endsWith("/")) {
+ file = file.substring(0, file.length()-1);
+ }
int maxTries = 10;
Exception exception = null;
while (maxTries -- > 0) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/java/org/apache/solr/core/ConfigSet.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSet.java b/solr/core/src/java/org/apache/solr/core/ConfigSet.java
index c406506..e94a892 100644
--- a/solr/core/src/java/org/apache/solr/core/ConfigSet.java
+++ b/solr/core/src/java/org/apache/solr/core/ConfigSet.java
@@ -31,12 +31,16 @@ public class ConfigSet {
private final IndexSchema indexSchema;
private final NamedList properties;
-
- public ConfigSet(String name, SolrConfig solrConfig, IndexSchema indexSchema, NamedList properties) {
+
+ private final boolean trusted;
+
+ public ConfigSet(String name, SolrConfig solrConfig, IndexSchema indexSchema,
+ NamedList properties, boolean trusted) {
this.name = name;
this.solrconfig = solrConfig;
this.indexSchema = indexSchema;
this.properties = properties;
+ this.trusted = trusted;
}
public String getName() {
@@ -54,4 +58,8 @@ public class ConfigSet {
public NamedList getProperties() {
return properties;
}
+
+ public boolean isTrusted() {
+ return trusted;
+ }
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
index e4a135e..5e68b70 100644
--- a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
+++ b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
@@ -28,6 +28,7 @@ import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import org.apache.solr.cloud.CloudConfigSetService;
import org.apache.solr.cloud.ZkController;
+import org.apache.solr.cloud.ZkSolrResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.schema.IndexSchema;
@@ -76,8 +77,18 @@ public abstract class ConfigSetService {
try {
SolrConfig solrConfig = createSolrConfig(dcore, coreLoader);
IndexSchema schema = createIndexSchema(dcore, solrConfig);
+
+ // nocommit javadocs difference between properties and flags
NamedList properties = createConfigSetProperties(dcore, coreLoader);
- return new ConfigSet(configName(dcore), solrConfig, schema, properties);
+ NamedList flags = getConfigSetFlags(dcore, coreLoader);
+
+ boolean trusted =
+ (coreLoader instanceof ZkSolrResourceLoader
+ && flags != null
+ && flags.get("trusted") != null
+ && !flags.getBooleanArg("trusted")
+ ) ? false: true;
+ return new ConfigSet(configName(dcore), solrConfig, schema, properties, trusted);
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"Could not load conf for core " + dcore.getName() +
@@ -116,6 +127,18 @@ public abstract class ConfigSetService {
return ConfigSetProperties.readFromResourceLoader(loader, cd.getConfigSetPropertiesName());
}
+ protected NamedList getConfigSetFlags(CoreDescriptor cd, SolrResourceLoader loader) {
+ if (loader instanceof ZkSolrResourceLoader) {
+ try {
+ return ConfigSetProperties.readFromResourceLoader(loader, "");
+ } catch (Exception ex) {
+ return null;
+ }
+ } else {
+ return null;
+ }
+ }
+
/**
* Create a SolrResourceLoader for a core
* @param cd the core's CoreDescriptor
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index b9597ae..5a6f98f 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -944,7 +944,8 @@ public class CoreContainer {
}
ConfigSet coreConfig = coreConfigService.getConfig(dcore);
- log.info("Creating SolrCore '{}' using configuration from {}", dcore.getName(), coreConfig.getName());
+ dcore.setConfigSetTrusted(coreConfig.isTrusted());
+ log.info("Creating SolrCore '{}' using configuration from {}, trusted={}", dcore.getName(), coreConfig.getName(), dcore.isConfigSetTrusted());
try {
core = new SolrCore(dcore, coreConfig);
} catch (SolrException e) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java b/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java
index a58de48..5dd214a 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java
@@ -66,6 +66,14 @@ public class CoreDescriptor {
public static final String DEFAULT_EXTERNAL_PROPERTIES_FILE = "conf" + File.separator + "solrcore.properties";
/**
+ * Whether this core was configured using a configSet that was trusted.
+ * This helps in avoiding the loading of plugins that have potential
+ * vulnerabilities, when the configSet was not uploaded from a trusted
+ * source.
+ */
+ private boolean trustedConfigSet = true;
+
+ /**
* Get the standard properties in persistable form
* @return the standard core properties in persistable form
*/
@@ -170,6 +178,7 @@ public class CoreDescriptor {
this.coreProperties.setProperty(CORE_NAME, coreName);
this.originalCoreProperties.setProperty(CORE_NAME, coreName);
this.substitutableProperties.setProperty(SOLR_CORE_PROP_PREFIX + CORE_NAME, coreName);
+ this.trustedConfigSet = other.trustedConfigSet;
}
/**
@@ -372,4 +381,12 @@ public class CoreDescriptor {
public String getConfigSetPropertiesName() {
return coreProperties.getProperty(CORE_CONFIGSET_PROPERTIES);
}
+
+ public boolean isConfigSetTrusted() {
+ return trustedConfigSet;
+ }
+
+ public void setConfigSetTrusted(boolean trusted) {
+ this.trustedConfigSet = trusted;
+ }
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/java/org/apache/solr/core/RequestHandlers.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/RequestHandlers.java b/solr/core/src/java/org/apache/solr/core/RequestHandlers.java
index dd06fa5..c91e711 100644
--- a/solr/core/src/java/org/apache/solr/core/RequestHandlers.java
+++ b/solr/core/src/java/org/apache/solr/core/RequestHandlers.java
@@ -114,7 +114,7 @@ public final class RequestHandlers {
* Handlers will be registered and initialized in the order they appear in solrconfig.xml
*/
- void initHandlersFromConfig(SolrConfig config) {
+ void initHandlersFromConfig(SolrConfig config, boolean isConfigSetTrusted) {
List<PluginInfo> implicits = core.getImplicitHandlers();
// use link map so we iterate in the same order
Map<String, PluginInfo> infoMap= new LinkedHashMap<>();
@@ -125,8 +125,9 @@ public final class RequestHandlers {
List<PluginInfo> modifiedInfos = new ArrayList<>();
for (PluginInfo info : infos) {
- modifiedInfos.add(applyInitParams(config, info));
+ modifiedInfos.add(applyInitParams(config, isConfigSetTrusted, info));
}
+ System.out.println("Handlers infos: "+modifiedInfos); // nocommit
handlers.init(Collections.emptyMap(),core, modifiedInfos);
handlers.alias(handlers.getDefault(), "");
log.debug("Registered paths: {}" , StrUtils.join(new ArrayList<>(handlers.keySet()) , ',' ));
@@ -137,7 +138,7 @@ public final class RequestHandlers {
}
}
- private PluginInfo applyInitParams(SolrConfig config, PluginInfo info) {
+ private PluginInfo applyInitParams(SolrConfig config, boolean isConfigSetTrusted, PluginInfo info) {
List<InitParams> ags = new ArrayList<>();
String p = info.attributes.get(InitParams.TYPE);
if(p!=null) {
@@ -148,12 +149,17 @@ public final class RequestHandlers {
}
for (InitParams args : config.getInitParams().values())
if(args.matchPath(info.name)) ags.add(args);
+
+ // nocommit review to make sure no plugin actually uses that keyname
+ info.initArgs.add("trusted", isConfigSetTrusted);
+
if(!ags.isEmpty()){
info = info.copy();
for (InitParams initParam : ags) {
initParam.apply(info);
}
}
+
return info;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 13c3bdd..dd7645d 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -187,6 +187,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
private final SolrResourceLoader resourceLoader;
private volatile IndexSchema schema;
private final NamedList configSetProperties;
+ private final boolean isConfigSetTrusted;
private final String dataDir;
private final String ulogDir;
private final UpdateHandler updateHandler;
@@ -635,7 +636,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
CoreDescriptor cd = new CoreDescriptor(coreDescriptor.getName(), coreDescriptor);
cd.loadExtraProperties(); //Reload the extra properties
core = new SolrCore(getName(), getDataDir(), coreConfig.getSolrConfig(),
- coreConfig.getIndexSchema(), coreConfig.getProperties(),
+ coreConfig.getIndexSchema(), coreConfig.getProperties(), coreConfig.isTrusted(),
cd, updateHandler, solrDelPolicy, currentCore, true);
// we open a new IndexWriter to pick up the latest config
@@ -827,7 +828,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
public SolrCore(CoreDescriptor cd, ConfigSet coreConfig) {
this(cd.getName(), null, coreConfig.getSolrConfig(), coreConfig.getIndexSchema(), coreConfig.getProperties(),
- cd, null, null, null, false);
+ coreConfig.isTrusted(), cd, null, null, null, false);
}
@@ -845,7 +846,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
* @since solr 1.3
*/
public SolrCore(String name, String dataDir, SolrConfig config,
- IndexSchema schema, NamedList configSetProperties,
+ IndexSchema schema, NamedList configSetProperties, boolean isConfigSetTrusted,
CoreDescriptor coreDescriptor, UpdateHandler updateHandler,
IndexDeletionPolicyWrapper delPolicy, SolrCore prev, boolean reload) {
@@ -858,6 +859,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
resourceLoader = config.getResourceLoader();
this.solrConfig = config;
this.configSetProperties = configSetProperties;
+ this.isConfigSetTrusted = isConfigSetTrusted;
// Initialize the metrics manager
this.coreMetricManager = initCoreMetricManager(config);
this.coreMetricManager.loadReporters();
@@ -922,7 +924,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
// Processors initialized before the handlers
updateProcessorChains = loadUpdateProcessorChains();
reqHandlers = new RequestHandlers(this);
- reqHandlers.initHandlersFromConfig(solrConfig);
+ reqHandlers.initHandlersFromConfig(solrConfig, isConfigSetTrusted);
statsCache = initStatsCache();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java
index cc15239..17e52ea 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java
@@ -144,12 +144,6 @@ public class ConfigSetsHandler extends RequestHandlerBase {
private void handleConfigUploadRequest(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
- String httpMethod = (String) req.getContext().get(ConfigSetParams.HTTP_METHOD);
- if (!"POST".equals(httpMethod)) {
- throw new SolrException(ErrorCode.BAD_REQUEST,
- "The upload action supports POST requests only");
- }
-
String configSetName = req.getParams().get(NAME);
if (StringUtils.isBlank(configSetName)) {
throw new SolrException(ErrorCode.BAD_REQUEST,
@@ -173,8 +167,8 @@ public class ConfigSetsHandler extends RequestHandlerBase {
InputStream inputStream = contentStreamsIterator.next().getStream();
- // Create a node for the configuration in zookeeper
- zkClient.makePath(configPathInZk, true);
+ // Create a node for the configuration in zookeeper nocommit: do this only if /admin is not protected by authz/authc
+ zkClient.makePath(configPathInZk, "{\"trusted\": false}".getBytes(StandardCharsets.UTF_8), true);
ZipInputStream zis = new ZipInputStream(inputStream, StandardCharsets.UTF_8);
ZipEntry zipEntry = null;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java b/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java
index 038ed9f..c283f6a 100644
--- a/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java
+++ b/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java
@@ -41,6 +41,7 @@ import com.google.common.collect.Lists;
import org.apache.commons.io.IOUtils;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
@@ -132,6 +133,12 @@ public class XMLLoader extends ContentStreamLoader {
String tr = req.getParams().get(CommonParams.TR,null);
if(tr!=null) {
+ if (req.getCore().getCoreDescriptor().isConfigSetTrusted() == false) {
+ throw new SolrException(ErrorCode.UNAUTHORIZED, "The configset for this collection was uploaded without any authorization in place,"
+ + " and this operation is not available for collections with untrusted configsets. To have this feature, re-upload the configset"
+ + " after enabling authentication and authorization for the /admin endpoints.");
+ }
+
final Transformer t = getTransformer(tr,req);
final DOMResult result = new DOMResult();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/test-files/solr/configsets/upload/managed-schema
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/configsets/upload/managed-schema b/solr/core/src/test-files/solr/configsets/upload/managed-schema
new file mode 100644
index 0000000..9e2f947
--- /dev/null
+++ b/solr/core/src/test-files/solr/configsets/upload/managed-schema
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<schema name="minimal" version="1.1">
+ <types>
+ <fieldType name="string" class="solr.StrField"/>
+ </types>
+ <fields>
+ <dynamicField name="*" type="string" indexed="true" stored="true" />
+ </fields>
+</schema>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/test-files/solr/configsets/upload/newzkconf.zip
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/configsets/upload/newzkconf.zip b/solr/core/src/test-files/solr/configsets/upload/newzkconf.zip
index e9db927..1577bd1 100644
Binary files a/solr/core/src/test-files/solr/configsets/upload/newzkconf.zip and b/solr/core/src/test-files/solr/configsets/upload/newzkconf.zip differ
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/test-files/solr/configsets/upload/schema-minimal.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/configsets/upload/schema-minimal.xml b/solr/core/src/test-files/solr/configsets/upload/schema-minimal.xml
deleted file mode 100644
index 9e2f947..0000000
--- a/solr/core/src/test-files/solr/configsets/upload/schema-minimal.xml
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<schema name="minimal" version="1.1">
- <types>
- <fieldType name="string" class="solr.StrField"/>
- </types>
- <fields>
- <dynamicField name="*" type="string" indexed="true" stored="true" />
- </fields>
-</schema>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/test-files/solr/configsets/upload/solrconfig-minimal.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/configsets/upload/solrconfig-minimal.xml b/solr/core/src/test-files/solr/configsets/upload/solrconfig-minimal.xml
deleted file mode 100644
index 2f9609d..0000000
--- a/solr/core/src/test-files/solr/configsets/upload/solrconfig-minimal.xml
+++ /dev/null
@@ -1,59 +0,0 @@
-<?xml version="1.0" ?>
-
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<!-- This is a "kitchen sink" config file that tests can use.
- When writting a new test, feel free to add *new* items (plugins,
- config options, etc...) as long as they don't break any existing
- tests. if you need to test something esoteric please add a new
- "solrconfig-your-esoteric-purpose.xml" config file.
-
- Note in particular that this test is used by MinimalSchemaTest so
- Anything added to this file needs to work correctly even if there
- is now uniqueKey or defaultSearch Field.
- -->
-
-<config>
-
- <dataDir>${solr.data.dir:}</dataDir>
-
- <directoryFactory name="DirectoryFactory"
- class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}"/>
-
- <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
-
- <updateHandler class="solr.DirectUpdateHandler2">
- <commitWithin>
- <softCommit>${solr.commitwithin.softcommit:true}</softCommit>
- </commitWithin>
-
- </updateHandler>
- <requestHandler name="/select" class="solr.SearchHandler">
- <lst name="defaults">
- <str name="echoParams">explicit</str>
- <str name="indent">true</str>
- <str name="df">text</str>
- </lst>
-
- </requestHandler>
-
- <requestHandler name="/admin/" class="org.apache.solr.handler.admin.AdminHandlers" />
-
- <requestHandler name="/update" class="solr.UpdateRequestHandler" />
-</config>
-
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/test-files/solr/configsets/upload/solrconfig.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/configsets/upload/solrconfig.xml b/solr/core/src/test-files/solr/configsets/upload/solrconfig.xml
new file mode 100644
index 0000000..82d0cc9
--- /dev/null
+++ b/solr/core/src/test-files/solr/configsets/upload/solrconfig.xml
@@ -0,0 +1,61 @@
+<?xml version="1.0" ?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- This is a "kitchen sink" config file that tests can use.
+ When writting a new test, feel free to add *new* items (plugins,
+ config options, etc...) as long as they don't break any existing
+ tests. if you need to test something esoteric please add a new
+ "solrconfig-your-esoteric-purpose.xml" config file.
+
+ Note in particular that this test is used by MinimalSchemaTest so
+ Anything added to this file needs to work correctly even if there
+ is now uniqueKey or defaultSearch Field.
+ -->
+
+<config>
+
+ <dataDir>${solr.data.dir:}</dataDir>
+
+ <directoryFactory name="DirectoryFactory"
+ class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}"/>
+
+ <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
+
+ <updateHandler class="solr.DirectUpdateHandler2">
+ <commitWithin>
+ <softCommit>${solr.commitwithin.softcommit:true}</softCommit>
+ </commitWithin>
+
+ </updateHandler>
+ <requestHandler name="/select" class="solr.SearchHandler">
+ <lst name="defaults">
+ <str name="echoParams">explicit</str>
+ <str name="indent">true</str>
+ <str name="df">text</str>
+ </lst>
+
+ </requestHandler>
+
+ <requestHandler name="/update/xslt"
+ startup="lazy"
+ class="solr.XsltUpdateRequestHandler"/>
+
+ <requestHandler name="/update" class="solr.UpdateRequestHandler" />
+</config>
+
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/test-files/solr/configsets/upload/xslt/xsl-update-handler-test.xsl
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/configsets/upload/xslt/xsl-update-handler-test.xsl b/solr/core/src/test-files/solr/configsets/upload/xslt/xsl-update-handler-test.xsl
new file mode 100644
index 0000000..2e7359a
--- /dev/null
+++ b/solr/core/src/test-files/solr/configsets/upload/xslt/xsl-update-handler-test.xsl
@@ -0,0 +1,49 @@
+<?xml version='1.0' encoding='UTF-8'?>
+
+<!--
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ -->
+
+<!--
+
+
+XSL transform used to test the XSLTUpdateRequestHandler.
+Transforms a test XML into standard Solr <add><doc/></add> format.
+
+ -->
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
+ <xsl:template match="/">
+ <add>
+ <xsl:apply-templates select="/random/document"/>
+ </add>
+ </xsl:template>
+
+ <xsl:template match="document">
+ <doc boost="5.5">
+ <xsl:apply-templates select="*"/>
+ </doc>
+ </xsl:template>
+
+ <xsl:template match="node">
+ <field name="{@name}">
+ <xsl:if test="@enhance!=''">
+ <xsl:attribute name="boost"><xsl:value-of select="@enhance"/></xsl:attribute>
+ </xsl:if>
+ <xsl:value-of select="@value"/>
+ </field>
+ </xsl:template>
+
+</xsl:stylesheet>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
index aa78c1d..fc325fb 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
@@ -40,19 +40,27 @@ import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.util.EntityUtils;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.ConfigSetAdminRequest;
+import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.client.solrj.request.ConfigSetAdminRequest.Create;
import org.apache.solr.client.solrj.request.ConfigSetAdminRequest.Delete;
-import org.apache.solr.client.solrj.request.ConfigSetAdminRequest.List;
+import org.apache.solr.client.solrj.response.CollectionAdminResponse;
import org.apache.solr.client.solrj.response.ConfigSetAdminResponse;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.cloud.ZkConfigManager;
+import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ConfigSetParams;
import org.apache.solr.common.params.ConfigSetParams.ConfigSetAction;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.params.CollectionParams.CollectionAction;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.Utils;
import org.apache.solr.core.ConfigSetProperties;
@@ -317,23 +325,77 @@ public class TestConfigSetsAPI extends SolrTestCaseJ4 {
long statusCode = (long) getObjectByPath(map, false, Arrays.asList("responseHeader", "status"));
assertEquals(0l, statusCode);
- assertTrue("schema-minimal.xml file should have been uploaded",
- zkClient.exists("/configs/newzkconf/schema-minimal.xml", true));
- assertTrue("schema-minimal.xml file contents on zookeeper are not exactly same as that of the file uploaded in config",
- Arrays.equals(zkClient.getData("/configs/newzkconf/schema-minimal.xml", null, null, true),
- readFile("solr/configsets/upload/schema-minimal.xml")));
-
- assertTrue("solrconfig-minimal.xml file should have been uploaded",
- zkClient.exists("/configs/newzkconf/solrconfig-minimal.xml", true));
- assertTrue("solrconfig-minimal.xml file contents on zookeeper are not exactly same as that of the file uploaded in config",
- Arrays.equals(zkClient.getData("/configs/newzkconf/solrconfig-minimal.xml", null, null, true),
- readFile("solr/configsets/upload/solrconfig-minimal.xml")));
+ assertTrue("managed-schema file should have been uploaded",
+ zkClient.exists("/configs/newzkconf/managed-schema", true));
+ assertTrue("managed-schema file contents on zookeeper are not exactly same as that of the file uploaded in config",
+ Arrays.equals(zkClient.getData("/configs/newzkconf/managed-schema", null, null, true),
+ readFile("solr/configsets/upload/managed-schema")));
+
+ assertTrue("solrconfig.xml file should have been uploaded",
+ zkClient.exists("/configs/newzkconf/solrconfig.xml", true));
+ byte data[] = zkClient.getData("/configs/newzkconf", null, null, true);
+ //assertEquals("{\"trusted\": false}", new String(data, StandardCharsets.UTF_8));
+ assertTrue("solrconfig.xml file contents on zookeeper are not exactly same as that of the file uploaded in config",
+ Arrays.equals(zkClient.getData("/configs/newzkconf/solrconfig.xml", null, null, true),
+ readFile("solr/configsets/upload/solrconfig.xml")));
+
+ // try to create a collection with the uploaded configset
+ createCollection("newcollection", "newzkconf", 1, 1, solrClient);
+ xsltRequest("newcollection");
} finally {
zkClient.close();
}
solrClient.close();
}
+
+ public void xsltRequest(String collection) throws SolrServerException, IOException {
+ String baseUrl = solrCluster.getJettySolrRunners().get(0).getBaseUrl().toString();
+ try (HttpSolrClient client = new HttpSolrClient(baseUrl + "/" + collection)) {
+ String xml =
+ "<random>" +
+ " <document>" +
+ " <node name=\"id\" value=\"12345\"/>" +
+ " <node name=\"name\" value=\"kitten\"/>" +
+ " <node name=\"text\" enhance=\"3\" value=\"some other day\"/>" +
+ " <node name=\"title\" enhance=\"4\" value=\"A story\"/>" +
+ " <node name=\"timestamp\" enhance=\"5\" value=\"2011-07-01T10:31:57.140Z\"/>" +
+ " </document>" +
+ "</random>";
+
+ SolrQuery query = new SolrQuery();
+ query.setQuery( "*:*" );//for anything
+ query.add("qt","/update");
+ query.add(CommonParams.TR, "xsl-update-handler-test.xsl");
+ query.add("stream.body", xml);
+ query.add("commit", "true");
+ try {
+ client.query(query);
+ fail("This should've returned a 401.");
+ } catch (SolrException ex) {
+ assertEquals(ErrorCode.UNAUTHORIZED.code, ex.code());
+ }
+ //System.out.println("Results: "+client.query(params("q", "*:*")));
+ }
+ }
+
+
+ protected CollectionAdminResponse createCollection(String collectionName, String confSetName, int numShards,
+ int replicationFactor, SolrClient client) throws SolrServerException, IOException {
+ ModifiableSolrParams params = new ModifiableSolrParams();
+ params.set("action", CollectionAction.CREATE.toString());
+ params.set("collection.configName", confSetName);
+ params.set("name", collectionName);
+ params.set("numShards", numShards);
+ params.set("replicationFactor", replicationFactor);
+ SolrRequest request = new QueryRequest(params);
+ request.setPath("/admin/collections");
+
+ CollectionAdminResponse res = new CollectionAdminResponse();
+ res.setResponse(client.request(request));
+ return res;
+ }
+
public static Map postDataAndGetResponse(CloudSolrClient cloudClient,
String uri, ByteBuffer bytarr) throws IOException {
HttpPost httpPost = null;
@@ -462,7 +524,7 @@ public class TestConfigSetsAPI extends SolrTestCaseJ4 {
AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null);
try {
// test empty
- List list = new List();
+ ConfigSetAdminRequest.List list = new ConfigSetAdminRequest.List();
ConfigSetAdminResponse.List response = list.process(solrClient);
Collection<String> actualConfigSets = response.getConfigSets();
assertEquals(0, actualConfigSets.size());
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/core/src/test/org/apache/solr/core/TestCodecSupport.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestCodecSupport.java b/solr/core/src/test/org/apache/solr/core/TestCodecSupport.java
index b6097ab..7d2f174 100644
--- a/solr/core/src/test/org/apache/solr/core/TestCodecSupport.java
+++ b/solr/core/src/test/org/apache/solr/core/TestCodecSupport.java
@@ -218,7 +218,7 @@ public class TestCodecSupport extends SolrTestCaseJ4 {
try {
c = new SolrCore(new CoreDescriptor(h.getCoreContainer(), newCoreName, testSolrHome.resolve(newCoreName)),
- new ConfigSet("fakeConfigset", config, schema, null));
+ new ConfigSet("fakeConfigset", config, schema, null, true));
assertNull(h.getCoreContainer().registerCore(newCoreName, c, false, false));
h.coreName = newCoreName;
assertEquals("We are not using the correct core", "solrconfig_codec2.xml", h.getCore().getConfigResource());
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f664f1f3/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
index e8a0c08..b129193 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
@@ -438,6 +438,16 @@ public class MiniSolrCloudCluster {
}
}
}
+
+ /*public void createCollection(String collectionName, String configName, int numShards,
+ int replicationFactor) throws Exception {
+ try (ZkStateReader reader = new ZkStateReader(solrClient.getZkStateReader().getZkClient())) {
+ reader.createClusterStateWatchersAndUpdate();
+ for (String collection : reader.getClusterState().getCollectionStates().keySet()) {
+ CollectionAdminRequest.deleteCollection(collection).process(solrClient);
+ }
+ }
+ }*/
/**
* Shut down the cluster, including all Solr nodes and ZooKeeper
[29/50] [abbrv] lucene-solr:jira/solr-6736: doap entries for 6.4.2
Posted by is...@apache.org.
doap entries for 6.4.2
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/57e8543b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/57e8543b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/57e8543b
Branch: refs/heads/jira/solr-6736
Commit: 57e8543bfd08533132d145985cadfcbdc5c12c36
Parents: 0d2c027
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Wed Mar 8 00:43:01 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Wed Mar 8 00:48:48 2017 +0530
----------------------------------------------------------------------
dev-tools/doap/lucene.rdf | 7 +++++++
dev-tools/doap/solr.rdf | 7 +++++++
2 files changed, 14 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57e8543b/dev-tools/doap/lucene.rdf
----------------------------------------------------------------------
diff --git a/dev-tools/doap/lucene.rdf b/dev-tools/doap/lucene.rdf
index 4b57d6c..c1950a0 100644
--- a/dev-tools/doap/lucene.rdf
+++ b/dev-tools/doap/lucene.rdf
@@ -68,6 +68,13 @@
<release>
<Version>
+ <name>lucene-6.4.2</name>
+ <created>2017-03-07</created>
+ <revision>6.4.2</revision>
+ </Version>
+ </release>
+ <release>
+ <Version>
<name>lucene-6.4.1</name>
<created>2017-02-06</created>
<revision>6.4.1</revision>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57e8543b/dev-tools/doap/solr.rdf
----------------------------------------------------------------------
diff --git a/dev-tools/doap/solr.rdf b/dev-tools/doap/solr.rdf
index 47a6652..45efd08 100644
--- a/dev-tools/doap/solr.rdf
+++ b/dev-tools/doap/solr.rdf
@@ -68,6 +68,13 @@
<release>
<Version>
+ <name>solr-6.4.2</name>
+ <created>2017-03-07</created>
+ <revision>6.4.2</revision>
+ </Version>
+ </release>
+ <release>
+ <Version>
<name>solr-6.4.1</name>
<created>2017-02-06</created>
<revision>6.4.1</revision>
[24/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-9986: Fix precommit
Posted by is...@apache.org.
SOLR-9986: Fix precommit
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/7af6cc97
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/7af6cc97
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/7af6cc97
Branch: refs/heads/jira/solr-6736
Commit: 7af6cc97151d727cb4d4e8730491a32e56a29397
Parents: 3131ec2
Author: Cao Manh Dat <da...@apache.org>
Authored: Tue Mar 7 15:37:24 2017 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Tue Mar 7 15:37:24 2017 +0700
----------------------------------------------------------------------
solr/core/src/java/org/apache/solr/request/NumericFacets.java | 1 -
solr/core/src/java/org/apache/solr/schema/DatePointField.java | 1 -
.../solr/update/processor/ParsingFieldUpdateProcessorsTest.java | 1 -
3 files changed, 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7af6cc97/solr/core/src/java/org/apache/solr/request/NumericFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/request/NumericFacets.java b/solr/core/src/java/org/apache/solr/request/NumericFacets.java
index c3bcb9f..fd17f1f 100644
--- a/solr/core/src/java/org/apache/solr/request/NumericFacets.java
+++ b/solr/core/src/java/org/apache/solr/request/NumericFacets.java
@@ -52,7 +52,6 @@ import org.apache.solr.schema.TrieField;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.SolrIndexSearcher;
-import org.apache.solr.util.DateMathParser;
/** Utility class to compute facets on numeric fields. */
final class NumericFacets {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7af6cc97/solr/core/src/java/org/apache/solr/schema/DatePointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/DatePointField.java b/solr/core/src/java/org/apache/solr/schema/DatePointField.java
index 18bf651..b3517db 100644
--- a/solr/core/src/java/org/apache/solr/schema/DatePointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/DatePointField.java
@@ -25,7 +25,6 @@ import java.util.Date;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexableField;
-import org.apache.lucene.legacy.LegacyNumericRangeQuery;
import org.apache.lucene.legacy.LegacyNumericType;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7af6cc97/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java b/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java
index 31f4760..3aeb1fb 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java
@@ -18,7 +18,6 @@ package org.apache.solr.update.processor;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.schema.IndexSchema;
-import org.apache.solr.schema.PointField;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
[33/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-9858: Collect
aggregated metrics from nodes and shard leaders in overseer.
Posted by is...@apache.org.
SOLR-9858: Collect aggregated metrics from nodes and shard leaders in overseer.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4d7bc947
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4d7bc947
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4d7bc947
Branch: refs/heads/jira/solr-6736
Commit: 4d7bc9477144937335e997ad630c4b89f558ddc5
Parents: a6e14ec
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Tue Mar 7 22:00:38 2017 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Tue Mar 7 22:01:21 2017 +0100
----------------------------------------------------------------------
solr/CHANGES.txt | 4 +
.../org/apache/solr/cloud/ElectionContext.java | 5 +-
.../java/org/apache/solr/cloud/Overseer.java | 7 +-
.../solr/cloud/OverseerNodePrioritizer.java | 2 +-
.../solr/cloud/OverseerTaskProcessor.java | 6 +-
.../org/apache/solr/cloud/ZkController.java | 2 +-
.../org/apache/solr/core/CoreContainer.java | 30 +-
.../org/apache/solr/core/JmxMonitoredMap.java | 9 +-
.../src/java/org/apache/solr/core/SolrCore.java | 4 +-
.../org/apache/solr/core/SolrInfoMBean.java | 4 +-
.../org/apache/solr/core/SolrXmlConfig.java | 3 +-
.../handler/admin/MetricsCollectorHandler.java | 228 +++++++++++
.../solr/handler/admin/MetricsHandler.java | 2 +-
.../apache/solr/metrics/AggregateMetric.java | 200 ++++++++++
.../solr/metrics/SolrCoreMetricManager.java | 125 +++++-
.../apache/solr/metrics/SolrMetricManager.java | 325 ++++++++++++++-
.../metrics/reporters/JmxObjectNameFactory.java | 6 +-
.../reporters/solr/SolrClusterReporter.java | 277 +++++++++++++
.../metrics/reporters/solr/SolrReporter.java | 392 +++++++++++++++++++
.../reporters/solr/SolrShardReporter.java | 188 +++++++++
.../metrics/reporters/solr/package-info.java | 22 ++
.../java/org/apache/solr/update/PeerSync.java | 8 +-
.../org/apache/solr/util/stats/MetricUtils.java | 265 +++++++++----
.../src/test-files/solr/solr-solrreporter.xml | 66 ++++
.../apache/solr/cloud/TestCloudRecovery.java | 6 +-
.../apache/solr/core/TestJmxMonitoredMap.java | 2 +-
.../solr/metrics/SolrCoreMetricManagerTest.java | 31 +-
.../solr/metrics/SolrMetricManagerTest.java | 30 +-
.../metrics/SolrMetricsIntegrationTest.java | 15 +-
.../metrics/reporters/SolrJmxReporterTest.java | 13 +-
.../reporters/solr/SolrCloudReportersTest.java | 163 ++++++++
.../reporters/solr/SolrShardReporterTest.java | 117 ++++++
.../apache/solr/util/stats/MetricUtilsTest.java | 54 ++-
.../client/solrj/impl/BinaryRequestWriter.java | 4 +-
.../solr/client/solrj/io/SolrClientCache.java | 26 +-
.../client/solrj/request/TestCoreAdmin.java | 4 +-
36 files changed, 2435 insertions(+), 210 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index dc97456..0e78535 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -50,6 +50,10 @@ Upgrading from Solr 6.x
factors should be indexed in a separate field and combined with the query
score using a function query.
+New Features
+----------------------
+* SOLR-9857, SOLR-9858: Collect aggregated metrics from nodes and shard leaders in overseer. (ab)
+
Bug Fixes
----------------------
* SOLR-9262: Connection and read timeouts are being ignored by UpdateShardHandler after SOLR-4509.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
index ff6fb30..d3ad322 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
@@ -714,14 +714,13 @@ final class OverseerElectionContext extends ElectionContext {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final SolrZkClient zkClient;
private Overseer overseer;
- public static final String OVERSEER_ELECT = "/overseer_elect";
public OverseerElectionContext(SolrZkClient zkClient, Overseer overseer, final String zkNodeName) {
- super(zkNodeName, OVERSEER_ELECT, OVERSEER_ELECT + "/leader", null, zkClient);
+ super(zkNodeName, Overseer.OVERSEER_ELECT, Overseer.OVERSEER_ELECT + "/leader", null, zkClient);
this.overseer = overseer;
this.zkClient = zkClient;
try {
- new ZkCmdExecutor(zkClient.getZkClientTimeout()).ensureExists(OVERSEER_ELECT, zkClient);
+ new ZkCmdExecutor(zkClient.getZkClientTimeout()).ensureExists(Overseer.OVERSEER_ELECT, zkClient);
} catch (KeeperException e) {
throw new SolrException(ErrorCode.SERVER_ERROR, e);
} catch (InterruptedException e) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/cloud/Overseer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
index 3a8aa3e..61f15fc 100644
--- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
@@ -65,7 +65,8 @@ public class Overseer implements Closeable {
public static final int STATE_UPDATE_DELAY = 1500; // delay between cloud state updates
public static final int NUM_RESPONSES_TO_STORE = 10000;
-
+ public static final String OVERSEER_ELECT = "/overseer_elect";
+
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
enum LeaderStatus {DONT_KNOW, NO, YES}
@@ -281,7 +282,7 @@ public class Overseer implements Closeable {
private void checkIfIamStillLeader() {
if (zkController != null && zkController.getCoreContainer().isShutDown()) return;//shutting down no need to go further
org.apache.zookeeper.data.Stat stat = new org.apache.zookeeper.data.Stat();
- String path = OverseerElectionContext.OVERSEER_ELECT + "/leader";
+ String path = OVERSEER_ELECT + "/leader";
byte[] data;
try {
data = zkClient.getData(path, null, stat, true);
@@ -394,7 +395,7 @@ public class Overseer implements Closeable {
boolean success = true;
try {
ZkNodeProps props = ZkNodeProps.load(zkClient.getData(
- OverseerElectionContext.OVERSEER_ELECT + "/leader", null, null, true));
+ OVERSEER_ELECT + "/leader", null, null, true));
if (myId.equals(props.getStr("id"))) {
return LeaderStatus.YES;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java b/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java
index 6512d26..798eca3 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java
@@ -65,7 +65,7 @@ public class OverseerNodePrioritizer {
String ldr = OverseerTaskProcessor.getLeaderNode(zk);
if(overseerDesignates.contains(ldr)) return;
log.info("prioritizing overseer nodes at {} overseer designates are {}", overseerId, overseerDesignates);
- List<String> electionNodes = OverseerTaskProcessor.getSortedElectionNodes(zk, OverseerElectionContext.OVERSEER_ELECT + LeaderElector.ELECTION_NODE);
+ List<String> electionNodes = OverseerTaskProcessor.getSortedElectionNodes(zk, Overseer.OVERSEER_ELECT + LeaderElector.ELECTION_NODE);
if(electionNodes.size()<2) return;
log.info("sorted nodes {}", electionNodes);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
index ad53346..bed71a6 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
@@ -337,7 +337,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
public static List<String> getSortedOverseerNodeNames(SolrZkClient zk) throws KeeperException, InterruptedException {
List<String> children = null;
try {
- children = zk.getChildren(OverseerElectionContext.OVERSEER_ELECT + LeaderElector.ELECTION_NODE, null, true);
+ children = zk.getChildren(Overseer.OVERSEER_ELECT + LeaderElector.ELECTION_NODE, null, true);
} catch (Exception e) {
log.warn("error ", e);
return new ArrayList<>();
@@ -370,7 +370,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
public static String getLeaderId(SolrZkClient zkClient) throws KeeperException,InterruptedException{
byte[] data = null;
try {
- data = zkClient.getData(OverseerElectionContext.OVERSEER_ELECT + "/leader", null, new Stat(), true);
+ data = zkClient.getData(Overseer.OVERSEER_ELECT + "/leader", null, new Stat(), true);
} catch (KeeperException.NoNodeException e) {
return null;
}
@@ -384,7 +384,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
boolean success = true;
try {
ZkNodeProps props = ZkNodeProps.load(zkStateReader.getZkClient().getData(
- OverseerElectionContext.OVERSEER_ELECT + "/leader", null, null, true));
+ Overseer.OVERSEER_ELECT + "/leader", null, null, true));
if (myId.equals(props.getStr("id"))) {
return LeaderStatus.YES;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/cloud/ZkController.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index c083736..333acd4 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -1715,7 +1715,7 @@ public class ZkController {
//however delete it . This is possible when the last attempt at deleting the election node failed.
if (electionNode.startsWith(getNodeName())) {
try {
- zkClient.delete(OverseerElectionContext.OVERSEER_ELECT + LeaderElector.ELECTION_NODE + "/" + electionNode, -1, true);
+ zkClient.delete(Overseer.OVERSEER_ELECT + LeaderElector.ELECTION_NODE + "/" + electionNode, -1, true);
} catch (NoNodeException e) {
//no problem
} catch (InterruptedException e) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index e3977d7..b9597ae 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -69,6 +69,7 @@ import org.apache.solr.handler.admin.CollectionsHandler;
import org.apache.solr.handler.admin.ConfigSetsHandler;
import org.apache.solr.handler.admin.CoreAdminHandler;
import org.apache.solr.handler.admin.InfoHandler;
+import org.apache.solr.handler.admin.MetricsCollectorHandler;
import org.apache.solr.handler.admin.MetricsHandler;
import org.apache.solr.handler.admin.SecurityConfHandler;
import org.apache.solr.handler.admin.SecurityConfHandlerLocal;
@@ -177,6 +178,8 @@ public class CoreContainer {
protected MetricsHandler metricsHandler;
+ protected MetricsCollectorHandler metricsCollectorHandler;
+
private enum CoreInitFailedAction { fromleader, none }
/**
@@ -511,15 +514,18 @@ public class CoreContainer {
coreAdminHandler = createHandler(CORES_HANDLER_PATH, cfg.getCoreAdminHandlerClass(), CoreAdminHandler.class);
configSetsHandler = createHandler(CONFIGSETS_HANDLER_PATH, cfg.getConfigSetsHandlerClass(), ConfigSetsHandler.class);
metricsHandler = createHandler(METRICS_PATH, MetricsHandler.class.getName(), MetricsHandler.class);
+ metricsCollectorHandler = createHandler(MetricsCollectorHandler.HANDLER_PATH, MetricsCollectorHandler.class.getName(), MetricsCollectorHandler.class);
+ // may want to add some configuration here in the future
+ metricsCollectorHandler.init(null);
containerHandlers.put(AUTHZ_PATH, securityConfHandler);
securityConfHandler.initializeMetrics(metricManager, SolrInfoMBean.Group.node.toString(), AUTHZ_PATH);
containerHandlers.put(AUTHC_PATH, securityConfHandler);
if(pkiAuthenticationPlugin != null)
containerHandlers.put(PKIAuthenticationPlugin.PATH, pkiAuthenticationPlugin.getRequestHandler());
- metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, SolrInfoMBean.Group.node);
- metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, SolrInfoMBean.Group.jvm);
- metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, SolrInfoMBean.Group.jetty);
+ metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.node);
+ metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.jvm);
+ metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.jetty);
coreConfigService = ConfigSetService.createConfigSetService(cfg, loader, zkSys.zkController);
@@ -537,6 +543,10 @@ public class CoreContainer {
metricManager.register(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node),
unloadedCores, true, "unloaded",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
+ if (isZooKeeperAware()) {
+ metricManager.loadClusterReporters(cfg.getMetricReporterPlugins(), this);
+ }
+
// setup executor to load cores in parallel
ExecutorService coreLoadExecutor = MetricUtils.instrumentedExecutorService(
ExecutorUtil.newMDCAwareFixedThreadPool(
@@ -660,10 +670,16 @@ public class CoreContainer {
isShutDown = true;
ExecutorUtil.shutdownAndAwaitTermination(coreContainerWorkExecutor);
+ if (metricManager != null) {
+ metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node));
+ }
if (isZooKeeperAware()) {
cancelCoreRecoveries();
- zkSys.zkController.publishNodeAsDown(zkSys.zkController.getNodeName());
+ zkSys.zkController.publishNodeAsDown(zkSys.zkController.getNodeName());
+ if (metricManager != null) {
+ metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.cluster));
+ }
}
try {
@@ -722,10 +738,6 @@ public class CoreContainer {
}
}
- if (metricManager != null) {
- metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node));
- }
-
// It should be safe to close the authorization plugin at this point.
try {
if(authorizationPlugin != null) {
@@ -1232,7 +1244,7 @@ public class CoreContainer {
try (SolrCore core = getCore(name)) {
if (core != null) {
String oldRegistryName = core.getCoreMetricManager().getRegistryName();
- String newRegistryName = SolrCoreMetricManager.createRegistryName(core.getCoreDescriptor().getCollectionName(), toName);
+ String newRegistryName = SolrCoreMetricManager.createRegistryName(core, toName);
metricManager.swapRegistries(oldRegistryName, newRegistryName);
registerCore(toName, core, true, false);
SolrCore old = solrCores.remove(name);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java b/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
index b2a5c79..8bfa662 100644
--- a/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
+++ b/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
@@ -20,6 +20,7 @@ import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.AttributeNotFoundException;
import javax.management.DynamicMBean;
+import javax.management.InstanceNotFoundException;
import javax.management.InvalidAttributeValueException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanException;
@@ -53,7 +54,6 @@ import org.apache.lucene.store.AlreadyClosedException;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrConfig.JmxConfiguration;
-import org.apache.solr.metrics.SolrCoreMetricManager;
import org.apache.solr.metrics.reporters.JmxObjectNameFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -93,9 +93,10 @@ public class JmxMonitoredMap<K, V> extends
private final String registryName;
- public JmxMonitoredMap(String coreName, String coreHashCode,
+ public JmxMonitoredMap(String coreName, String coreHashCode, String registryName,
final JmxConfiguration jmxConfig) {
this.coreHashCode = coreHashCode;
+ this.registryName = registryName;
jmxRootName = (null != jmxConfig.rootName ?
jmxConfig.rootName
: ("solr" + (null != coreName ? "/" + coreName : "")));
@@ -117,7 +118,6 @@ public class JmxMonitoredMap<K, V> extends
if (servers == null || servers.isEmpty()) {
server = null;
- registryName = null;
nameFactory = null;
log.debug("No JMX servers found, not exposing Solr information with JMX.");
return;
@@ -141,7 +141,6 @@ public class JmxMonitoredMap<K, V> extends
}
server = newServer;
}
- registryName = SolrCoreMetricManager.createRegistryName(null, coreName);
nameFactory = new JmxObjectNameFactory(REPORTER_NAME + coreHashCode, registryName);
}
@@ -166,6 +165,8 @@ public class JmxMonitoredMap<K, V> extends
for (ObjectName name : objectNames) {
try {
server.unregisterMBean(name);
+ } catch (InstanceNotFoundException ie) {
+ // ignore - someone else already deleted this one
} catch (Exception e) {
log.warn("Exception un-registering mbean {}", name, e);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index f22c472..13c3bdd 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -860,6 +860,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
this.configSetProperties = configSetProperties;
// Initialize the metrics manager
this.coreMetricManager = initCoreMetricManager(config);
+ this.coreMetricManager.loadReporters();
if (updateHandler == null) {
directoryFactory = initDirectoryFactory();
@@ -1101,13 +1102,12 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
*/
private SolrCoreMetricManager initCoreMetricManager(SolrConfig config) {
SolrCoreMetricManager coreMetricManager = new SolrCoreMetricManager(this);
- coreMetricManager.loadReporters();
return coreMetricManager;
}
private Map<String,SolrInfoMBean> initInfoRegistry(String name, SolrConfig config) {
if (config.jmxConfig.enabled) {
- return new JmxMonitoredMap<String, SolrInfoMBean>(name, String.valueOf(this.hashCode()), config.jmxConfig);
+ return new JmxMonitoredMap<String, SolrInfoMBean>(name, coreMetricManager.getRegistryName(), String.valueOf(this.hashCode()), config.jmxConfig);
} else {
log.debug("JMX monitoring not detected for core: " + name);
return new ConcurrentHashMap<>();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/core/SolrInfoMBean.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrInfoMBean.java b/solr/core/src/java/org/apache/solr/core/SolrInfoMBean.java
index bf77db4..63bdef0 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrInfoMBean.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrInfoMBean.java
@@ -36,9 +36,9 @@ public interface SolrInfoMBean {
SEARCHER, REPLICATION, TLOG, INDEX, DIRECTORY, HTTP, OTHER }
/**
- * Top-level group of beans for a subsystem.
+ * Top-level group of beans or metrics for a subsystem.
*/
- enum Group { jvm, jetty, node, core }
+ enum Group { jvm, jetty, node, core, collection, shard, cluster, overseer }
/**
* Simple common usage name, e.g. BasicQueryHandler,
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
index e41cd8d..951d8d5 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
@@ -451,7 +451,8 @@ public class SolrXmlConfig {
return new PluginInfo[0];
PluginInfo[] configs = new PluginInfo[nodes.getLength()];
for (int i = 0; i < nodes.getLength(); i++) {
- configs[i] = new PluginInfo(nodes.item(i), "SolrMetricReporter", true, true);
+ // we don't require class in order to support predefined replica and node reporter classes
+ configs[i] = new PluginInfo(nodes.item(i), "SolrMetricReporter", true, false);
}
return configs;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java
new file mode 100644
index 0000000..de39a61
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java
@@ -0,0 +1,228 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.admin;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.HashMap;
+import java.util.Map;
+
+import com.codahale.metrics.MetricRegistry;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.ContentStream;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.handler.loader.ContentStreamLoader;
+import org.apache.solr.handler.RequestHandlerBase;
+import org.apache.solr.handler.loader.CSVLoader;
+import org.apache.solr.handler.loader.JavabinLoader;
+import org.apache.solr.handler.loader.JsonLoader;
+import org.apache.solr.handler.loader.XMLLoader;
+import org.apache.solr.metrics.AggregateMetric;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.reporters.solr.SolrReporter;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.update.AddUpdateCommand;
+import org.apache.solr.update.CommitUpdateCommand;
+import org.apache.solr.update.DeleteUpdateCommand;
+import org.apache.solr.update.MergeIndexesCommand;
+import org.apache.solr.update.RollbackUpdateCommand;
+import org.apache.solr.update.processor.UpdateRequestProcessor;
+import org.apache.solr.util.stats.MetricUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Handler to collect and aggregate metric reports. Each report indicates the target registry where
+ * metrics values should be collected and aggregated. Metrics with the same names are
+ * aggregated using {@link AggregateMetric} instances, which track the source of updates and
+ * their count, as well as providing simple statistics over collected values.
+ *
+ * Each report consists of {@link SolrInputDocument}-s that are expected to contain
+ * the following fields:
+ * <ul>
+ * <li>{@link SolrReporter#GROUP_ID} - (required) specifies target registry name where metrics will be grouped.</li>
+ * <li>{@link SolrReporter#REPORTER_ID} - (required) id of the reporter that sent this update. This can be eg.
+ * node name or replica name or other id that uniquely identifies the source of metrics values.</li>
+ * <li>{@link MetricUtils#METRIC_NAME} - (required) metric name (in the source registry)</li>
+ * <li>{@link SolrReporter#LABEL_ID} - (optional) label to prepend to metric names in the target registry.</li>
+ * <li>{@link SolrReporter#REGISTRY_ID} - (optional) name of the source registry.</li>
+ * </ul>
+ * Remaining fields are assumed to be single-valued, and to contain metric attributes and their values. Example:
+ * <pre>
+ * <doc>
+ * <field name="_group_">solr.core.collection1.shard1.leader</field>
+ * <field name="_reporter_">core_node3</field>
+ * <field name="metric">INDEX.merge.errors</field>
+ * <field name="value">0</field>
+ * </doc>
+ * </pre>
+ */
+public class MetricsCollectorHandler extends RequestHandlerBase {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ public static final String HANDLER_PATH = "/admin/metrics/collector";
+
+ private final CoreContainer coreContainer;
+ private final SolrMetricManager metricManager;
+ private final Map<String, ContentStreamLoader> loaders = new HashMap<>();
+ private SolrParams params;
+
+ public MetricsCollectorHandler(final CoreContainer coreContainer) {
+ this.coreContainer = coreContainer;
+ this.metricManager = coreContainer.getMetricManager();
+
+ }
+
+ @Override
+ public void init(NamedList initArgs) {
+ super.init(initArgs);
+ if (initArgs != null) {
+ params = SolrParams.toSolrParams(initArgs);
+ } else {
+ params = new ModifiableSolrParams();
+ }
+ loaders.put("application/xml", new XMLLoader().init(params) );
+ loaders.put("application/json", new JsonLoader().init(params) );
+ loaders.put("application/csv", new CSVLoader().init(params) );
+ loaders.put("application/javabin", new JavabinLoader().init(params) );
+ loaders.put("text/csv", loaders.get("application/csv") );
+ loaders.put("text/xml", loaders.get("application/xml") );
+ loaders.put("text/json", loaders.get("application/json"));
+ }
+
+ @Override
+ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
+ if (coreContainer == null || coreContainer.isShutDown()) {
+ // silently drop request
+ return;
+ }
+ //log.info("#### " + req.toString());
+ if (req.getContentStreams() == null) { // no content
+ return;
+ }
+ for (ContentStream cs : req.getContentStreams()) {
+ if (cs.getContentType() == null) {
+ log.warn("Missing content type - ignoring");
+ continue;
+ }
+ ContentStreamLoader loader = loaders.get(cs.getContentType());
+ if (loader == null) {
+ throw new SolrException(SolrException.ErrorCode.UNSUPPORTED_MEDIA_TYPE, "Unsupported content type for stream: " + cs.getSourceInfo() + ", contentType=" + cs.getContentType());
+ }
+ loader.load(req, rsp, cs, new MetricUpdateProcessor(metricManager));
+ }
+ }
+
+ @Override
+ public String getDescription() {
+ return "Handler for collecting and aggregating metric reports.";
+ }
+
+ private static class MetricUpdateProcessor extends UpdateRequestProcessor {
+ private final SolrMetricManager metricManager;
+
+ public MetricUpdateProcessor(SolrMetricManager metricManager) {
+ super(null);
+ this.metricManager = metricManager;
+ }
+
+ @Override
+ public void processAdd(AddUpdateCommand cmd) throws IOException {
+ SolrInputDocument doc = cmd.solrDoc;
+ if (doc == null) {
+ return;
+ }
+ String metricName = (String)doc.getFieldValue(MetricUtils.METRIC_NAME);
+ if (metricName == null) {
+ log.warn("Missing " + MetricUtils.METRIC_NAME + " field in document, skipping: " + doc);
+ return;
+ }
+ doc.remove(MetricUtils.METRIC_NAME);
+ // XXX we could modify keys by using this original registry name
+ doc.remove(SolrReporter.REGISTRY_ID);
+ String groupId = (String)doc.getFieldValue(SolrReporter.GROUP_ID);
+ if (groupId == null) {
+ log.warn("Missing " + SolrReporter.GROUP_ID + " field in document, skipping: " + doc);
+ return;
+ }
+ doc.remove(SolrReporter.GROUP_ID);
+ String reporterId = (String)doc.getFieldValue(SolrReporter.REPORTER_ID);
+ if (reporterId == null) {
+ log.warn("Missing " + SolrReporter.REPORTER_ID + " field in document, skipping: " + doc);
+ return;
+ }
+ doc.remove(SolrReporter.REPORTER_ID);
+ String labelId = (String)doc.getFieldValue(SolrReporter.LABEL_ID);
+ doc.remove(SolrReporter.LABEL_ID);
+ doc.forEach(f -> {
+ String key = MetricRegistry.name(labelId, metricName, f.getName());
+ MetricRegistry registry = metricManager.registry(groupId);
+ AggregateMetric metric = getOrRegister(registry, key, new AggregateMetric());
+ Object o = f.getFirstValue();
+ if (o != null) {
+ metric.set(reporterId, o);
+ } else {
+ // remove missing values
+ metric.clear(reporterId);
+ }
+ });
+ }
+
+ private AggregateMetric getOrRegister(MetricRegistry registry, String name, AggregateMetric add) {
+ AggregateMetric existing = (AggregateMetric)registry.getMetrics().get(name);
+ if (existing != null) {
+ return existing;
+ }
+ try {
+ registry.register(name, add);
+ return add;
+ } catch (IllegalArgumentException e) {
+ // someone added before us
+ existing = (AggregateMetric)registry.getMetrics().get(name);
+ if (existing == null) { // now, that is weird...
+ throw new IllegalArgumentException("Inconsistent metric status, " + name);
+ }
+ return existing;
+ }
+ }
+
+ @Override
+ public void processDelete(DeleteUpdateCommand cmd) throws IOException {
+ throw new UnsupportedOperationException("processDelete");
+ }
+
+ @Override
+ public void processMergeIndexes(MergeIndexesCommand cmd) throws IOException {
+ throw new UnsupportedOperationException("processMergeIndexes");
+ }
+
+ @Override
+ public void processCommit(CommitUpdateCommand cmd) throws IOException {
+ throw new UnsupportedOperationException("processCommit");
+ }
+
+ @Override
+ public void processRollback(RollbackUpdateCommand cmd) throws IOException {
+ throw new UnsupportedOperationException("processRollback");
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
index 385317b..b53c818 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
@@ -79,7 +79,7 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
NamedList response = new NamedList();
for (String registryName : requestedRegistries) {
MetricRegistry registry = metricManager.registry(registryName);
- response.add(registryName, MetricUtils.toNamedList(registry, metricFilters, mustMatchFilter));
+ response.add(registryName, MetricUtils.toNamedList(registry, metricFilters, mustMatchFilter, false, false, null));
}
rsp.getValues().add("metrics", response);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/AggregateMetric.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/AggregateMetric.java b/solr/core/src/java/org/apache/solr/metrics/AggregateMetric.java
new file mode 100644
index 0000000..babc99d
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/metrics/AggregateMetric.java
@@ -0,0 +1,200 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import com.codahale.metrics.Metric;
+
+/**
+ * This class is used for keeping several partial named values and providing useful statistics over them.
+ */
+public class AggregateMetric implements Metric {
+
+ /**
+ * Simple class to represent current value and how many times it was set.
+ */
+ public static class Update {
+ public Object value;
+ public final AtomicInteger updateCount = new AtomicInteger();
+
+ public Update(Object value) {
+ update(value);
+ }
+
+ public void update(Object value) {
+ this.value = value;
+ updateCount.incrementAndGet();
+ }
+
+ @Override
+ public String toString() {
+ return "Update{" +
+ "value=" + value +
+ ", updateCount=" + updateCount +
+ '}';
+ }
+ }
+
+ private final Map<String, Update> values = new ConcurrentHashMap<>();
+
+ public void set(String name, Object value) {
+ final Update existing = values.get(name);
+ if (existing == null) {
+ final Update created = new Update(value);
+ final Update raced = values.putIfAbsent(name, created);
+ if (raced != null) {
+ raced.update(value);
+ }
+ } else {
+ existing.update(value);
+ }
+ }
+
+ public void clear(String name) {
+ values.remove(name);
+ }
+
+ public void clear() {
+ values.clear();
+ }
+
+ public int size() {
+ return values.size();
+ }
+
+ public boolean isEmpty() {
+ return values.isEmpty();
+ }
+
+ public Map<String, Update> getValues() {
+ return Collections.unmodifiableMap(values);
+ }
+
+ // --------- stats ---------
+ public double getMax() {
+ if (values.isEmpty()) {
+ return 0;
+ }
+ Double res = null;
+ for (Update u : values.values()) {
+ if (!(u.value instanceof Number)) {
+ continue;
+ }
+ Number n = (Number)u.value;
+ if (res == null) {
+ res = n.doubleValue();
+ continue;
+ }
+ if (n.doubleValue() > res) {
+ res = n.doubleValue();
+ }
+ }
+ return res;
+ }
+
+ public double getMin() {
+ if (values.isEmpty()) {
+ return 0;
+ }
+ Double res = null;
+ for (Update u : values.values()) {
+ if (!(u.value instanceof Number)) {
+ continue;
+ }
+ Number n = (Number)u.value;
+ if (res == null) {
+ res = n.doubleValue();
+ continue;
+ }
+ if (n.doubleValue() < res) {
+ res = n.doubleValue();
+ }
+ }
+ return res;
+ }
+
+ public double getMean() {
+ if (values.isEmpty()) {
+ return 0;
+ }
+ double total = 0;
+ for (Update u : values.values()) {
+ if (!(u.value instanceof Number)) {
+ continue;
+ }
+ Number n = (Number)u.value;
+ total += n.doubleValue();
+ }
+ return total / values.size();
+ }
+
+ public double getStdDev() {
+ int size = values.size();
+ if (size < 2) {
+ return 0;
+ }
+ final double mean = getMean();
+ double sum = 0;
+ int count = 0;
+ for (Update u : values.values()) {
+ if (!(u.value instanceof Number)) {
+ continue;
+ }
+ count++;
+ Number n = (Number)u.value;
+ final double diff = n.doubleValue() - mean;
+ sum += diff * diff;
+ }
+ if (count < 2) {
+ return 0;
+ }
+ final double variance = sum / (count - 1);
+ return Math.sqrt(variance);
+ }
+
+ public double getSum() {
+ if (values.isEmpty()) {
+ return 0;
+ }
+ double res = 0;
+ for (Update u : values.values()) {
+ if (!(u.value instanceof Number)) {
+ continue;
+ }
+ Number n = (Number)u.value;
+ res += n.doubleValue();
+ }
+ return res;
+ }
+
+ @Override
+ public String toString() {
+ return "AggregateMetric{" +
+ "size=" + size() +
+ ", max=" + getMax() +
+ ", min=" + getMin() +
+ ", mean=" + getMean() +
+ ", stddev=" + getStdDev() +
+ ", sum=" + getSum() +
+ ", values=" + values +
+ '}';
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
index eb5b687..43f3535 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
@@ -20,6 +20,7 @@ import java.io.Closeable;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
+import org.apache.solr.cloud.CloudDescriptor;
import org.apache.solr.core.NodeConfig;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
@@ -36,8 +37,14 @@ public class SolrCoreMetricManager implements Closeable {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final SolrCore core;
+ private final String tag;
private final SolrMetricManager metricManager;
private String registryName;
+ private String collectionName;
+ private String shardName;
+ private String replicaName;
+ private String leaderRegistryName;
+ private boolean cloudMode;
/**
* Constructs a metric manager.
@@ -46,8 +53,26 @@ public class SolrCoreMetricManager implements Closeable {
*/
public SolrCoreMetricManager(SolrCore core) {
this.core = core;
+ this.tag = String.valueOf(core.hashCode());
this.metricManager = core.getCoreDescriptor().getCoreContainer().getMetricManager();
- registryName = createRegistryName(core.getCoreDescriptor().getCollectionName(), core.getName());
+ initCloudMode();
+ registryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName());
+ leaderRegistryName = createLeaderRegistryName(cloudMode, collectionName, shardName);
+ }
+
+ private void initCloudMode() {
+ CloudDescriptor cd = core.getCoreDescriptor().getCloudDescriptor();
+ if (cd != null) {
+ cloudMode = true;
+ collectionName = core.getCoreDescriptor().getCollectionName();
+ shardName = cd.getShardId();
+ //replicaName = cd.getCoreNodeName();
+ String coreName = core.getName();
+ replicaName = parseReplicaName(collectionName, coreName);
+ if (replicaName == null) {
+ replicaName = cd.getCoreNodeName();
+ }
+ }
}
/**
@@ -57,7 +82,11 @@ public class SolrCoreMetricManager implements Closeable {
public void loadReporters() {
NodeConfig nodeConfig = core.getCoreDescriptor().getCoreContainer().getConfig();
PluginInfo[] pluginInfos = nodeConfig.getMetricReporterPlugins();
- metricManager.loadReporters(pluginInfos, core.getResourceLoader(), SolrInfoMBean.Group.core, registryName);
+ metricManager.loadReporters(pluginInfos, core.getResourceLoader(), tag,
+ SolrInfoMBean.Group.core, registryName);
+ if (cloudMode) {
+ metricManager.loadShardReporters(pluginInfos, core);
+ }
}
/**
@@ -67,12 +96,18 @@ public class SolrCoreMetricManager implements Closeable {
*/
public void afterCoreSetName() {
String oldRegistryName = registryName;
- registryName = createRegistryName(core.getCoreDescriptor().getCollectionName(), core.getName());
+ String oldLeaderRegistryName = leaderRegistryName;
+ initCloudMode();
+ registryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName());
+ leaderRegistryName = createLeaderRegistryName(cloudMode, collectionName, shardName);
if (oldRegistryName.equals(registryName)) {
return;
}
// close old reporters
- metricManager.closeReporters(oldRegistryName);
+ metricManager.closeReporters(oldRegistryName, tag);
+ if (oldLeaderRegistryName != null) {
+ metricManager.closeReporters(oldLeaderRegistryName, tag);
+ }
// load reporters again, using the new core name
loadReporters();
}
@@ -96,7 +131,7 @@ public class SolrCoreMetricManager implements Closeable {
*/
@Override
public void close() throws IOException {
- metricManager.closeReporters(getRegistryName());
+ metricManager.closeReporters(getRegistryName(), tag);
}
public SolrCore getCore() {
@@ -104,7 +139,7 @@ public class SolrCoreMetricManager implements Closeable {
}
/**
- * Retrieves the metric registry name of the manager.
+ * Metric registry name of the manager.
*
* In order to make it easier for reporting tools to aggregate metrics from
* different cores that logically belong to a single collection we convert the
@@ -124,22 +159,74 @@ public class SolrCoreMetricManager implements Closeable {
return registryName;
}
- public static String createRegistryName(String collectionName, String coreName) {
- if (collectionName == null || (collectionName != null && !coreName.startsWith(collectionName + "_"))) {
- // single core, or unknown naming scheme
+ /**
+ * Metric registry name for leader metrics. This is null if not in cloud mode.
+ * @return metric registry name for leader metrics
+ */
+ public String getLeaderRegistryName() {
+ return leaderRegistryName;
+ }
+
+ /**
+ * Return a tag specific to this instance.
+ */
+ public String getTag() {
+ return tag;
+ }
+
+ public static String createRegistryName(boolean cloud, String collectionName, String shardName, String replicaName, String coreName) {
+ if (cloud) { // build registry name from logical names
+ return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, collectionName, shardName, replicaName);
+ } else {
return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, coreName);
}
- // split "collection1_shard1_1_replica1" into parts
- String str = coreName.substring(collectionName.length() + 1);
- String shard;
- String replica = null;
- int pos = str.lastIndexOf("_replica");
- if (pos == -1) { // ?? no _replicaN part ??
- shard = str;
+ }
+
+ /**
+ * This method is used by {@link org.apache.solr.core.CoreContainer#rename(String, String)}.
+ * @param aCore existing core with old name
+ * @param coreName new name
+ * @return new registry name
+ */
+ public static String createRegistryName(SolrCore aCore, String coreName) {
+ CloudDescriptor cd = aCore.getCoreDescriptor().getCloudDescriptor();
+ String replicaName = null;
+ if (cd != null) {
+ replicaName = parseReplicaName(cd.getCollectionName(), coreName);
+ }
+ return createRegistryName(
+ cd != null,
+ cd != null ? cd.getCollectionName() : null,
+ cd != null ? cd.getShardId() : null,
+ replicaName,
+ coreName
+ );
+ }
+
+ public static String parseReplicaName(String collectionName, String coreName) {
+ if (collectionName == null || !coreName.startsWith(collectionName)) {
+ return null;
+ } else {
+ // split "collection1_shard1_1_replica1" into parts
+ if (coreName.length() > collectionName.length()) {
+ String str = coreName.substring(collectionName.length() + 1);
+ int pos = str.lastIndexOf("_replica");
+ if (pos == -1) { // ?? no _replicaN part ??
+ return str;
+ } else {
+ return str.substring(pos + 1);
+ }
+ } else {
+ return null;
+ }
+ }
+ }
+
+ public static String createLeaderRegistryName(boolean cloud, String collectionName, String shardName) {
+ if (cloud) {
+ return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.collection, collectionName, shardName, "leader");
} else {
- shard = str.substring(0, pos);
- replica = str.substring(pos + 1);
+ return null;
}
- return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, collectionName, shard, replica);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
index cac5389..3a4c3fe 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
@@ -18,9 +18,13 @@ package org.apache.solr.metrics;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
@@ -29,6 +33,9 @@ import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
+import java.util.stream.Collectors;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Histogram;
@@ -39,9 +46,14 @@ import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.MetricSet;
import com.codahale.metrics.SharedMetricRegistries;
import com.codahale.metrics.Timer;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.PluginInfo;
+import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.metrics.reporters.solr.SolrClusterReporter;
+import org.apache.solr.metrics.reporters.solr.SolrShardReporter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -87,27 +99,39 @@ public class SolrMetricManager {
private final Lock reportersLock = new ReentrantLock();
private final Lock swapLock = new ReentrantLock();
+ public static final int DEFAULT_CLOUD_REPORTER_PERIOD = 60;
+
public SolrMetricManager() { }
/**
* An implementation of {@link MetricFilter} that selects metrics
- * with names that start with a prefix.
+ * with names that start with one of prefixes.
*/
public static class PrefixFilter implements MetricFilter {
- private final String[] prefixes;
+ private final Set<String> prefixes = new HashSet<>();
private final Set<String> matched = new HashSet<>();
private boolean allMatch = false;
/**
- * Create a filter that uses the provided prefix.
+ * Create a filter that uses the provided prefixes.
* @param prefixes prefixes to use, must not be null. If empty then any
* name will match, if not empty then match on any prefix will
* succeed (logical OR).
*/
public PrefixFilter(String... prefixes) {
Objects.requireNonNull(prefixes);
- this.prefixes = prefixes;
- if (prefixes.length == 0) {
+ if (prefixes.length > 0) {
+ this.prefixes.addAll(Arrays.asList(prefixes));
+ }
+ if (this.prefixes.isEmpty()) {
+ allMatch = true;
+ }
+ }
+
+ public PrefixFilter(Collection<String> prefixes) {
+ Objects.requireNonNull(prefixes);
+ this.prefixes.addAll(prefixes);
+ if (this.prefixes.isEmpty()) {
allMatch = true;
}
}
@@ -141,6 +165,85 @@ public class SolrMetricManager {
public void reset() {
matched.clear();
}
+
+ @Override
+ public String toString() {
+ return "PrefixFilter{" +
+ "prefixes=" + prefixes +
+ '}';
+ }
+ }
+
+ /**
+ * An implementation of {@link MetricFilter} that selects metrics
+ * with names that match regular expression patterns.
+ */
+ public static class RegexFilter implements MetricFilter {
+ private final Set<Pattern> compiledPatterns = new HashSet<>();
+ private final Set<String> matched = new HashSet<>();
+ private boolean allMatch = false;
+
+ /**
+ * Create a filter that uses the provided prefix.
+ * @param patterns regex patterns to use, must not be null. If empty then any
+ * name will match, if not empty then match on any pattern will
+ * succeed (logical OR).
+ */
+ public RegexFilter(String... patterns) throws PatternSyntaxException {
+ this(patterns != null ? Arrays.asList(patterns) : Collections.emptyList());
+ }
+
+ public RegexFilter(Collection<String> patterns) throws PatternSyntaxException {
+ Objects.requireNonNull(patterns);
+ if (patterns.isEmpty()) {
+ allMatch = true;
+ return;
+ }
+ patterns.forEach(p -> {
+ Pattern pattern = Pattern.compile(p);
+ compiledPatterns.add(pattern);
+ });
+ if (patterns.isEmpty()) {
+ allMatch = true;
+ }
+ }
+
+ @Override
+ public boolean matches(String name, Metric metric) {
+ if (allMatch) {
+ matched.add(name);
+ return true;
+ }
+ for (Pattern p : compiledPatterns) {
+ if (p.matcher(name).matches()) {
+ matched.add(name);
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Return the set of names that matched this filter.
+ * @return matching names
+ */
+ public Set<String> getMatched() {
+ return Collections.unmodifiableSet(matched);
+ }
+
+ /**
+ * Clear the set of names that matched.
+ */
+ public void reset() {
+ matched.clear();
+ }
+
+ @Override
+ public String toString() {
+ return "RegexFilter{" +
+ "compiledPatterns=" + compiledPatterns +
+ '}';
+ }
}
/**
@@ -150,7 +253,40 @@ public class SolrMetricManager {
Set<String> set = new HashSet<>();
set.addAll(registries.keySet());
set.addAll(SharedMetricRegistries.names());
- return Collections.unmodifiableSet(set);
+ return set;
+ }
+
+ /**
+ * Return set of existing registry names that match a regex pattern
+ * @param patterns regex patterns. NOTE: users need to make sure that patterns that
+ * don't start with a wildcard use the full registry name starting with
+ * {@link #REGISTRY_NAME_PREFIX}
+ * @return set of existing registry names where at least one pattern matched.
+ */
+ public Set<String> registryNames(String... patterns) throws PatternSyntaxException {
+ if (patterns == null || patterns.length == 0) {
+ return registryNames();
+ }
+ List<Pattern> compiled = new ArrayList<>();
+ for (String pattern : patterns) {
+ compiled.add(Pattern.compile(pattern));
+ }
+ return registryNames((Pattern[])compiled.toArray(new Pattern[compiled.size()]));
+ }
+
+ public Set<String> registryNames(Pattern... patterns) {
+ Set<String> allNames = registryNames();
+ if (patterns == null || patterns.length == 0) {
+ return allNames;
+ }
+ return allNames.stream().filter(s -> {
+ for (Pattern p : patterns) {
+ if (p.matcher(s).matches()) {
+ return true;
+ }
+ }
+ return false;
+ }).collect(Collectors.toSet());
}
/**
@@ -209,7 +345,7 @@ public class SolrMetricManager {
*/
public void removeRegistry(String registry) {
// close any reporters for this registry first
- closeReporters(registry);
+ closeReporters(registry, null);
// make sure we use a name with prefix, with overrides
registry = overridableRegistryName(registry);
if (isSharedRegistry(registry)) {
@@ -490,10 +626,12 @@ public class SolrMetricManager {
* the list. If both attributes are present then only "group" attribute will be processed.
* @param pluginInfos plugin configurations
* @param loader resource loader
+ * @param tag optional tag for the reporters, to distinguish reporters logically created for different parent
+ * component instances.
* @param group selected group, not null
* @param registryNames optional child registry name elements
*/
- public void loadReporters(PluginInfo[] pluginInfos, SolrResourceLoader loader, SolrInfoMBean.Group group, String... registryNames) {
+ public void loadReporters(PluginInfo[] pluginInfos, SolrResourceLoader loader, String tag, SolrInfoMBean.Group group, String... registryNames) {
if (pluginInfos == null || pluginInfos.length == 0) {
return;
}
@@ -533,7 +671,7 @@ public class SolrMetricManager {
}
}
try {
- loadReporter(registryName, loader, info);
+ loadReporter(registryName, loader, info, tag);
} catch (Exception e) {
log.warn("Error loading metrics reporter, plugin info: " + info, e);
}
@@ -545,9 +683,12 @@ public class SolrMetricManager {
* @param registry reporter is associated with this registry
* @param loader loader to use when creating an instance of the reporter
* @param pluginInfo plugin configuration. Plugin "name" and "class" attributes are required.
+ * @param tag optional tag for the reporter, to distinguish reporters logically created for different parent
+ * component instances.
+ * @return instance of newly created and registered reporter
* @throws Exception if any argument is missing or invalid
*/
- public void loadReporter(String registry, SolrResourceLoader loader, PluginInfo pluginInfo) throws Exception {
+ public SolrMetricReporter loadReporter(String registry, SolrResourceLoader loader, PluginInfo pluginInfo, String tag) throws Exception {
if (registry == null || pluginInfo == null || pluginInfo.name == null || pluginInfo.className == null) {
throw new IllegalArgumentException("loadReporter called with missing arguments: " +
"registry=" + registry + ", loader=" + loader + ", pluginInfo=" + pluginInfo);
@@ -558,14 +699,19 @@ public class SolrMetricManager {
pluginInfo.className,
SolrMetricReporter.class,
new String[0],
- new Class[] { SolrMetricManager.class, String.class },
- new Object[] { this, registry }
+ new Class[]{SolrMetricManager.class, String.class},
+ new Object[]{this, registry}
);
try {
reporter.init(pluginInfo);
} catch (IllegalStateException e) {
throw new IllegalArgumentException("reporter init failed: " + pluginInfo, e);
}
+ registerReporter(registry, pluginInfo.name, tag, reporter);
+ return reporter;
+ }
+
+ private void registerReporter(String registry, String name, String tag, SolrMetricReporter reporter) throws Exception {
try {
if (!reportersLock.tryLock(10, TimeUnit.SECONDS)) {
throw new Exception("Could not obtain lock to modify reporters registry: " + registry);
@@ -579,12 +725,15 @@ public class SolrMetricManager {
perRegistry = new HashMap<>();
reporters.put(registry, perRegistry);
}
- SolrMetricReporter oldReporter = perRegistry.get(pluginInfo.name);
+ if (tag != null && !tag.isEmpty()) {
+ name = name + "@" + tag;
+ }
+ SolrMetricReporter oldReporter = perRegistry.get(name);
if (oldReporter != null) { // close it
- log.info("Replacing existing reporter '" + pluginInfo.name + "' in registry '" + registry + "': " + oldReporter.toString());
+ log.info("Replacing existing reporter '" + name + "' in registry '" + registry + "': " + oldReporter.toString());
oldReporter.close();
}
- perRegistry.put(pluginInfo.name, reporter);
+ perRegistry.put(name, reporter);
} finally {
reportersLock.unlock();
@@ -595,9 +744,11 @@ public class SolrMetricManager {
* Close and unregister a named {@link SolrMetricReporter} for a registry.
* @param registry registry name
* @param name reporter name
+ * @param tag optional tag for the reporter, to distinguish reporters logically created for different parent
+ * component instances.
* @return true if a named reporter existed and was closed.
*/
- public boolean closeReporter(String registry, String name) {
+ public boolean closeReporter(String registry, String name, String tag) {
// make sure we use a name with prefix, with overrides
registry = overridableRegistryName(registry);
try {
@@ -614,6 +765,9 @@ public class SolrMetricManager {
if (perRegistry == null) {
return false;
}
+ if (tag != null && !tag.isEmpty()) {
+ name = name + "@" + tag;
+ }
SolrMetricReporter reporter = perRegistry.remove(name);
if (reporter == null) {
return false;
@@ -635,6 +789,17 @@ public class SolrMetricManager {
* @return names of closed reporters
*/
public Set<String> closeReporters(String registry) {
+ return closeReporters(registry, null);
+ }
+
+ /**
+ * Close and unregister all {@link SolrMetricReporter}-s for a registry.
+ * @param registry registry name
+ * @param tag optional tag for the reporter, to distinguish reporters logically created for different parent
+ * component instances.
+ * @return names of closed reporters
+ */
+ public Set<String> closeReporters(String registry, String tag) {
// make sure we use a name with prefix, with overrides
registry = overridableRegistryName(registry);
try {
@@ -646,18 +811,28 @@ public class SolrMetricManager {
log.warn("Interrupted while trying to obtain lock to modify reporters registry: " + registry);
return Collections.emptySet();
}
- log.info("Closing metric reporters for: " + registry);
+ log.info("Closing metric reporters for registry=" + registry + ", tag=" + tag);
try {
- Map<String, SolrMetricReporter> perRegistry = reporters.remove(registry);
+ Map<String, SolrMetricReporter> perRegistry = reporters.get(registry);
if (perRegistry != null) {
- for (SolrMetricReporter reporter : perRegistry.values()) {
+ Set<String> names = new HashSet<>(perRegistry.keySet());
+ Set<String> removed = new HashSet<>();
+ names.forEach(name -> {
+ if (tag != null && !tag.isEmpty() && !name.endsWith("@" + tag)) {
+ return;
+ }
+ SolrMetricReporter reporter = perRegistry.remove(name);
try {
reporter.close();
} catch (IOException ioe) {
log.warn("Exception closing reporter " + reporter, ioe);
}
+ removed.add(name);
+ });
+ if (removed.size() == names.size()) {
+ reporters.remove(registry);
}
- return perRegistry.keySet();
+ return removed;
} else {
return Collections.emptySet();
}
@@ -695,4 +870,114 @@ public class SolrMetricManager {
reportersLock.unlock();
}
}
+
+ private List<PluginInfo> prepareCloudPlugins(PluginInfo[] pluginInfos, String group, String className,
+ Map<String, String> defaultAttributes,
+ Map<String, Object> defaultInitArgs,
+ PluginInfo defaultPlugin) {
+ List<PluginInfo> result = new ArrayList<>();
+ if (pluginInfos == null) {
+ pluginInfos = new PluginInfo[0];
+ }
+ for (PluginInfo info : pluginInfos) {
+ String groupAttr = info.attributes.get("group");
+ if (!group.equals(groupAttr)) {
+ continue;
+ }
+ info = preparePlugin(info, className, defaultAttributes, defaultInitArgs);
+ if (info != null) {
+ result.add(info);
+ }
+ }
+ if (result.isEmpty() && defaultPlugin != null) {
+ defaultPlugin = preparePlugin(defaultPlugin, className, defaultAttributes, defaultInitArgs);
+ if (defaultPlugin != null) {
+ result.add(defaultPlugin);
+ }
+ }
+ return result;
+ }
+
+ private PluginInfo preparePlugin(PluginInfo info, String className, Map<String, String> defaultAttributes,
+ Map<String, Object> defaultInitArgs) {
+ if (info == null) {
+ return null;
+ }
+ String classNameAttr = info.attributes.get("class");
+ if (className != null) {
+ if (classNameAttr != null && !className.equals(classNameAttr)) {
+ log.warn("Conflicting class name attributes, expected " + className + " but was " + classNameAttr + ", skipping " + info);
+ return null;
+ }
+ }
+
+ Map<String, String> attrs = new HashMap<>(info.attributes);
+ defaultAttributes.forEach((k, v) -> {
+ if (!attrs.containsKey(k)) {
+ attrs.put(k, v);
+ }
+ });
+ attrs.put("class", className);
+ Map<String, Object> initArgs = new HashMap<>();
+ if (info.initArgs != null) {
+ initArgs.putAll(info.initArgs.asMap(10));
+ }
+ defaultInitArgs.forEach((k, v) -> {
+ if (!initArgs.containsKey(k)) {
+ initArgs.put(k, v);
+ }
+ });
+ return new PluginInfo(info.type, attrs, new NamedList(initArgs), null);
+ }
+
+ public void loadShardReporters(PluginInfo[] pluginInfos, SolrCore core) {
+ // don't load for non-cloud cores
+ if (core.getCoreDescriptor().getCloudDescriptor() == null) {
+ return;
+ }
+ // prepare default plugin if none present in the config
+ Map<String, String> attrs = new HashMap<>();
+ attrs.put("name", "shardDefault");
+ attrs.put("group", SolrInfoMBean.Group.shard.toString());
+ Map<String, Object> initArgs = new HashMap<>();
+ initArgs.put("period", DEFAULT_CLOUD_REPORTER_PERIOD);
+
+ String registryName = core.getCoreMetricManager().getRegistryName();
+ // collect infos and normalize
+ List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoMBean.Group.shard.toString(), SolrShardReporter.class.getName(),
+ attrs, initArgs, null);
+ for (PluginInfo info : infos) {
+ try {
+ SolrMetricReporter reporter = loadReporter(registryName, core.getResourceLoader(), info,
+ String.valueOf(core.hashCode()));
+ ((SolrShardReporter)reporter).setCore(core);
+ } catch (Exception e) {
+ log.warn("Could not load shard reporter, pluginInfo=" + info, e);
+ }
+ }
+ }
+
+ public void loadClusterReporters(PluginInfo[] pluginInfos, CoreContainer cc) {
+ // don't load for non-cloud instances
+ if (!cc.isZooKeeperAware()) {
+ return;
+ }
+ Map<String, String> attrs = new HashMap<>();
+ attrs.put("name", "clusterDefault");
+ attrs.put("group", SolrInfoMBean.Group.cluster.toString());
+ Map<String, Object> initArgs = new HashMap<>();
+ initArgs.put("period", DEFAULT_CLOUD_REPORTER_PERIOD);
+ List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoMBean.Group.cluster.toString(), SolrClusterReporter.class.getName(),
+ attrs, initArgs, null);
+ String registryName = getRegistryName(SolrInfoMBean.Group.cluster);
+ for (PluginInfo info : infos) {
+ try {
+ SolrMetricReporter reporter = loadReporter(registryName, cc.getResourceLoader(), info, null);
+ ((SolrClusterReporter)reporter).setCoreContainer(cc);
+ } catch (Exception e) {
+ log.warn("Could not load node reporter, pluginInfo=" + info, e);
+ }
+ }
+ }
+
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java b/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java
index 4df5257..1f5b4f0 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java
@@ -41,9 +41,9 @@ public class JmxObjectNameFactory implements ObjectNameFactory {
* @param additionalProperties additional properties as key, value pairs.
*/
public JmxObjectNameFactory(String reporterName, String domain, String... additionalProperties) {
- this.reporterName = reporterName;
+ this.reporterName = reporterName.replaceAll(":", "_");
this.domain = domain;
- this.subdomains = domain.split("\\.");
+ this.subdomains = domain.replaceAll(":", "_").split("\\.");
if (additionalProperties != null && (additionalProperties.length % 2) != 0) {
throw new IllegalArgumentException("additionalProperties length must be even: " + Arrays.toString(additionalProperties));
}
@@ -83,7 +83,7 @@ public class JmxObjectNameFactory implements ObjectNameFactory {
}
sb.append(','); // separate from other properties
} else {
- sb.append(currentDomain);
+ sb.append(currentDomain.replaceAll(":", "_"));
sb.append(':');
}
} else {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
new file mode 100644
index 0000000..846e805
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
@@ -0,0 +1,277 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics.reporters.solr;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.net.MalformedURLException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Supplier;
+
+import org.apache.http.client.HttpClient;
+import org.apache.solr.cloud.Overseer;
+import org.apache.solr.cloud.ZkController;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.handler.admin.MetricsCollectorHandler;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricReporter;
+import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This reporter sends selected metrics from local registries to {@link Overseer}.
+ * <p>The following configuration properties are supported:</p>
+ * <ul>
+ * <li>handler - (optional str) handler path where reports are sent. Default is
+ * {@link MetricsCollectorHandler#HANDLER_PATH}.</li>
+ * <li>period - (optional int) how often reports are sent, in seconds. Default is 60. Setting this
+ * to 0 disables the reporter.</li>
+ * <li>report - (optional multiple lst) report configuration(s), see below.</li>
+ * </ul>
+ * Each report configuration consist of the following properties:
+ * <ul>
+ * <li>registry - (required str) regex pattern matching source registries (see {@link SolrMetricManager#registryNames(String...)}),
+ * may contain capture groups.</li>
+ * <li>group - (required str) target registry name where metrics will be grouped. This can be a regex pattern that
+ * contains back-references to capture groups collected by <code>registry</code> pattern</li>
+ * <li>label - (optional str) optional prefix to prepend to metric names, may contain back-references to
+ * capture groups collected by <code>registry</code> pattern</li>
+ * <li>filter - (optional multiple str) regex expression(s) matching selected metrics to be reported.</li>
+ * </ul>
+ * NOTE: this reporter uses predefined "overseer" group, and it's always created even if explicit configuration
+ * is missing. Default configuration uses report specifications from {@link #DEFAULT_REPORTS}.
+ * <p>Example configuration:</p>
+ * <pre>
+ * <reporter name="test" group="overseer">
+ * <str name="handler">/admin/metrics/collector</str>
+ * <int name="period">11</int>
+ * <lst name="report">
+ * <str name="group">overseer</str>
+ * <str name="label">jvm</str>
+ * <str name="registry">solr\.jvm</str>
+ * <str name="filter">memory\.total\..*</str>
+ * <str name="filter">memory\.heap\..*</str>
+ * <str name="filter">os\.SystemLoadAverage</str>
+ * <str name="filter">threads\.count</str>
+ * </lst>
+ * <lst name="report">
+ * <str name="group">overseer</str>
+ * <str name="label">leader.$1</str>
+ * <str name="registry">solr\.core\.(.*)\.leader</str>
+ * <str name="filter">UPDATE\./update/.*</str>
+ * </lst>
+ * </reporter>
+ * </pre>
+ *
+ */
+public class SolrClusterReporter extends SolrMetricReporter {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ public static final String CLUSTER_GROUP = SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.cluster.toString());
+
+ public static final List<SolrReporter.Report> DEFAULT_REPORTS = new ArrayList<SolrReporter.Report>() {{
+ add(new SolrReporter.Report(CLUSTER_GROUP, "jetty",
+ SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.jetty.toString()),
+ Collections.emptySet())); // all metrics
+ add(new SolrReporter.Report(CLUSTER_GROUP, "jvm",
+ SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.jvm.toString()),
+ new HashSet<String>() {{
+ add("memory\\.total\\..*");
+ add("memory\\.heap\\..*");
+ add("os\\.SystemLoadAverage");
+ add("os\\.FreePhysicalMemorySize");
+ add("os\\.FreeSwapSpaceSize");
+ add("os\\.OpenFileDescriptorCount");
+ add("threads\\.count");
+ }})); // all metrics
+ // XXX anything interesting here?
+ //add(new SolrReporter.Specification(OVERSEER_GROUP, "node", SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.node.toString()),
+ // Collections.emptySet())); // all metrics
+ add(new SolrReporter.Report(CLUSTER_GROUP, "leader.$1", "solr\\.collection\\.(.*)\\.leader",
+ new HashSet<String>(){{
+ add("UPDATE\\./update/.*");
+ add("QUERY\\./select.*");
+ add("INDEX\\..*");
+ add("TLOG\\..*");
+ }}));
+ }};
+
+ private String handler = MetricsCollectorHandler.HANDLER_PATH;
+ private int period = SolrMetricManager.DEFAULT_CLOUD_REPORTER_PERIOD;
+ private List<SolrReporter.Report> reports = new ArrayList<>();
+
+ private SolrReporter reporter;
+
+ /**
+ * Create a reporter for metrics managed in a named registry.
+ *
+ * @param metricManager metric manager
+ * @param registryName this is ignored
+ */
+ public SolrClusterReporter(SolrMetricManager metricManager, String registryName) {
+ super(metricManager, registryName);
+ }
+
+ public void setHandler(String handler) {
+ this.handler = handler;
+ }
+
+ public void setPeriod(int period) {
+ this.period = period;
+ }
+
+ public void setReport(List<Map> reportConfig) {
+ if (reportConfig == null || reportConfig.isEmpty()) {
+ return;
+ }
+ reportConfig.forEach(map -> {
+ SolrReporter.Report r = SolrReporter.Report.fromMap(map);
+ if (r != null) {
+ reports.add(r);
+ }
+ });
+ }
+
+ // for unit tests
+ int getPeriod() {
+ return period;
+ }
+
+ List<SolrReporter.Report> getReports() {
+ return reports;
+ }
+
+ @Override
+ protected void validate() throws IllegalStateException {
+ if (period < 1) {
+ log.info("Turning off node reporter, period=" + period);
+ }
+ if (reports.isEmpty()) { // set defaults
+ reports = DEFAULT_REPORTS;
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ if (reporter != null) {
+ reporter.close();;
+ }
+ }
+
+ public void setCoreContainer(CoreContainer cc) {
+ if (reporter != null) {
+ reporter.close();;
+ }
+ // start reporter only in cloud mode
+ if (!cc.isZooKeeperAware()) {
+ log.warn("Not ZK-aware, not starting...");
+ return;
+ }
+ if (period < 1) { // don't start it
+ return;
+ }
+ HttpClient httpClient = cc.getUpdateShardHandler().getHttpClient();
+ ZkController zk = cc.getZkController();
+ String reporterId = zk.getNodeName();
+ reporter = SolrReporter.Builder.forReports(metricManager, reports)
+ .convertRatesTo(TimeUnit.SECONDS)
+ .convertDurationsTo(TimeUnit.MILLISECONDS)
+ .withHandler(handler)
+ .withReporterId(reporterId)
+ .cloudClient(false) // we want to send reports specifically to a selected leader instance
+ .skipAggregateValues(true) // we don't want to transport details of aggregates
+ .skipHistograms(true) // we don't want to transport histograms
+ .build(httpClient, new OverseerUrlSupplier(zk));
+
+ reporter.start(period, TimeUnit.SECONDS);
+ }
+
+ // TODO: fix this when there is an elegant way to retrieve URL of a node that runs Overseer leader.
+ // package visibility for unit tests
+ static class OverseerUrlSupplier implements Supplier<String> {
+ private static final long DEFAULT_INTERVAL = 30000000; // 30s
+ private ZkController zk;
+ private String lastKnownUrl = null;
+ private long lastCheckTime = 0;
+ private long interval = DEFAULT_INTERVAL;
+
+ OverseerUrlSupplier(ZkController zk) {
+ this.zk = zk;
+ }
+
+ @Override
+ public String get() {
+ if (zk == null) {
+ return null;
+ }
+ // primitive caching for lastKnownUrl
+ long now = System.nanoTime();
+ if (lastKnownUrl != null && (now - lastCheckTime) < interval) {
+ return lastKnownUrl;
+ }
+ if (!zk.isConnected()) {
+ return lastKnownUrl;
+ }
+ lastCheckTime = now;
+ SolrZkClient zkClient = zk.getZkClient();
+ ZkNodeProps props;
+ try {
+ props = ZkNodeProps.load(zkClient.getData(
+ Overseer.OVERSEER_ELECT + "/leader", null, null, true));
+ } catch (KeeperException e) {
+ log.warn("Could not obtain overseer's address, skipping.", e);
+ return lastKnownUrl;
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ return lastKnownUrl;
+ }
+ if (props == null) {
+ return lastKnownUrl;
+ }
+ String oid = props.getStr("id");
+ if (oid == null) {
+ return lastKnownUrl;
+ }
+ String[] ids = oid.split("-");
+ if (ids.length != 3) { // unknown format
+ log.warn("Unknown format of leader id, skipping: " + oid);
+ return lastKnownUrl;
+ }
+ // convert nodeName back to URL
+ String url = zk.getZkStateReader().getBaseUrlForNodeName(ids[1]);
+ // check that it's parseable
+ try {
+ new java.net.URL(url);
+ } catch (MalformedURLException mue) {
+ log.warn("Malformed Overseer's leader URL: url", mue);
+ return lastKnownUrl;
+ }
+ lastKnownUrl = url;
+ return url;
+ }
+ }
+
+}
[26/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10178,
SOLR-10079: Force tests to always use NoMergePolicy,
also assert that it was used
Posted by is...@apache.org.
SOLR-10178, SOLR-10079: Force tests to always use NoMergePolicy, also assert that it was used
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/190f4b6b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/190f4b6b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/190f4b6b
Branch: refs/heads/jira/solr-6736
Commit: 190f4b6b935d39d5c08b9a23a07c9c891d197312
Parents: 21559fe
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Tue Mar 7 19:02:26 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Tue Mar 7 19:02:26 2017 +0530
----------------------------------------------------------------------
.../solr/update/TestInPlaceUpdatesDistrib.java | 19 ++++++++++++++++++-
1 file changed, 18 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/190f4b6b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
index b107cbd..4c90bc6 100644
--- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
+++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
@@ -30,6 +30,8 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.lucene.util.TestUtil;
import org.apache.solr.client.solrj.SolrClient;
@@ -55,6 +57,7 @@ import org.apache.solr.common.util.NamedList;
import org.apache.solr.index.NoMergePolicyFactory;
import org.apache.solr.update.processor.DistributedUpdateProcessor;
import org.apache.solr.util.DefaultSolrThreadFactory;
+import org.apache.solr.util.RefCounted;
import org.apache.zookeeper.KeeperException;
import org.junit.After;
import org.junit.BeforeClass;
@@ -82,7 +85,11 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase {
// we need consistent segments that aren't re-ordered on merge because we're
// asserting inplace updates happen by checking the internal [docid]
systemSetPropertySolrTestsMergePolicyFactory(NoMergePolicyFactory.class.getName());
-
+
+ // HACK: Don't use a RandomMergePolicy, but only use the mergePolicyFactory that we've just set
+ System.setProperty(SYSTEM_PROPERTY_SOLR_TESTS_USEMERGEPOLICYFACTORY, "true");
+ System.setProperty(SYSTEM_PROPERTY_SOLR_TESTS_USEMERGEPOLICY, "false");
+
initCore(configString, schemaString);
// sanity check that autocommits are disabled
@@ -90,6 +97,16 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase {
assertEquals(-1, h.getCore().getSolrConfig().getUpdateHandlerInfo().autoSoftCommmitMaxTime);
assertEquals(-1, h.getCore().getSolrConfig().getUpdateHandlerInfo().autoCommmitMaxDocs);
assertEquals(-1, h.getCore().getSolrConfig().getUpdateHandlerInfo().autoSoftCommmitMaxDocs);
+
+ // assert that NoMergePolicy was chosen
+ RefCounted<IndexWriter> iw = h.getCore().getSolrCoreState().getIndexWriter(h.getCore());
+ try {
+ IndexWriter writer = iw.get();
+ assertTrue("Actual merge policy is: " + writer.getConfig().getMergePolicy(),
+ writer.getConfig().getMergePolicy() instanceof NoMergePolicy);
+ } finally {
+ iw.decref();
+ }
}
@After
[23/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-9986: Implement
DatePointField
Posted by is...@apache.org.
SOLR-9986: Implement DatePointField
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3131ec2d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3131ec2d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3131ec2d
Branch: refs/heads/jira/solr-6736
Commit: 3131ec2d99401c1fd1fc33a00343a59a78ab6445
Parents: 6df17c8
Author: Cao Manh Dat <da...@apache.org>
Authored: Tue Mar 7 10:11:47 2017 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Tue Mar 7 10:11:47 2017 +0700
----------------------------------------------------------------------
solr/CHANGES.txt | 2 +
.../handler/component/RangeFacetRequest.java | 5 +-
.../component/SortedDateStatsValues.java | 89 +++
.../handler/component/StatsValuesFactory.java | 8 +-
.../org/apache/solr/request/NumericFacets.java | 5 +-
.../org/apache/solr/response/DocsStreamer.java | 2 +
.../org/apache/solr/schema/DatePointField.java | 219 ++++++++
.../apache/solr/search/SolrIndexSearcher.java | 2 +-
.../conf/schema-docValuesFaceting.xml | 5 +-
.../solr/collection1/conf/schema-point.xml | 11 +
.../solr/collection1/conf/schema-sorts.xml | 11 +-
.../test-files/solr/collection1/conf/schema.xml | 11 +-
.../solr/collection1/conf/schema11.xml | 3 +-
.../solr/collection1/conf/schema12.xml | 9 +-
.../solr/collection1/conf/schema_latest.xml | 13 +-
...lrconfig-parsing-update-processor-chains.xml | 7 +
.../conf/solrconfig-update-processor-chains.xml | 2 +
.../handler/admin/LukeRequestHandlerTest.java | 4 +-
.../org/apache/solr/schema/DateFieldTest.java | 10 +-
.../org/apache/solr/schema/TestPointFields.java | 561 ++++++++++++++++++-
.../apache/solr/search/TestSolrQueryParser.java | 12 +-
.../update/processor/AtomicUpdatesTest.java | 25 +-
.../ParsingFieldUpdateProcessorsTest.java | 11 +-
.../java/org/apache/solr/SolrTestCaseJ4.java | 2 +
24 files changed, 966 insertions(+), 63 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 4cfcb72..db721da 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -154,6 +154,8 @@ New Features
* SOLR-9999: Instrument DirectUpdateHandler2. This registers existing statistics under metrics API and adds
more metrics to track the rates of update and delete commands. (ab)
+* SOLR-9986: Implement DatePointField (Cao Manh Dat, Tom�s Fern�ndez L�bbe)
+
Bug Fixes
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java b/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java
index aa3e3cb..3ac7300 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java
@@ -31,6 +31,7 @@ import org.apache.solr.common.params.RequiredSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
+import org.apache.solr.schema.DatePointField;
import org.apache.solr.schema.DateRangeField;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.IndexSchema;
@@ -725,7 +726,9 @@ public class RangeFacetRequest extends FacetComponent.FacetBase {
final Date now) {
super(rangeFacetRequest);
this.now = now;
- if (!(field.getType() instanceof TrieDateField) && !(field.getType() instanceof DateRangeField)) {
+ if (!(field.getType() instanceof TrieDateField)
+ && !(field.getType() instanceof DateRangeField)
+ && !(field.getType() instanceof DatePointField)) {
throw new IllegalArgumentException(TYPE_ERR_MSG);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/java/org/apache/solr/handler/component/SortedDateStatsValues.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SortedDateStatsValues.java b/solr/core/src/java/org/apache/solr/handler/component/SortedDateStatsValues.java
new file mode 100644
index 0000000..0df45c7
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/handler/component/SortedDateStatsValues.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.handler.component;
+
+import java.io.IOException;
+import java.util.Date;
+import java.util.Map;
+
+import org.apache.lucene.index.DocValues;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.SortedNumericDocValues;
+import org.apache.lucene.util.BytesRef;
+import org.apache.solr.common.util.NamedList;
+
+public class SortedDateStatsValues implements StatsValues {
+
+ private final DateStatsValues dsv;
+ private final String fieldName;
+ private SortedNumericDocValues sndv;
+
+
+ public SortedDateStatsValues(DateStatsValues dsv, StatsField field) {
+ this.dsv = dsv;
+ this.fieldName = field.getSchemaField().getName();
+ }
+
+ @Override
+ public void accumulate(NamedList stv) {
+ dsv.accumulate(stv);
+ }
+
+ @Override
+ public void accumulate(int docId) throws IOException {
+ if (!sndv.advanceExact(docId)) {
+ missing();
+ } else {
+ for (int i = 0 ; i < sndv.docValueCount(); i++) {
+ dsv.accumulate(new Date(sndv.nextValue()), 1);
+ }
+ }
+
+ }
+
+ @Override
+ public void accumulate(BytesRef value, int count) {
+ dsv.accumulate(value, count);
+ }
+
+ @Override
+ public void missing() {
+ dsv.missing();
+ }
+
+ @Override
+ public void addMissing(int count) {
+ dsv.addMissing(count);
+ }
+
+ @Override
+ public void addFacet(String facetName, Map<String,StatsValues> facetValues) {
+ dsv.addFacet(facetName, facetValues);
+ }
+
+ @Override
+ public NamedList<?> getStatsValues() {
+ return dsv.getStatsValues();
+ }
+
+ @Override
+ public void setNextReader(LeafReaderContext ctx) throws IOException {
+ sndv = DocValues.getSortedNumeric(ctx.reader(), fieldName);
+ assert sndv != null;
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java b/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java
index 2a6e795..d39ada2 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java
@@ -63,8 +63,12 @@ public class StatsValuesFactory {
final FieldType fieldType = sf.getType(); // TODO: allow FieldType to provide impl.
- if (TrieDateField.class.isInstance(fieldType)) {
- return new DateStatsValues(statsField);
+ if (TrieDateField.class.isInstance(fieldType) || DatePointField.class.isInstance(fieldType)) {
+ DateStatsValues statsValues = new DateStatsValues(statsField);
+ if (sf.multiValued()) {
+ return new SortedDateStatsValues(statsValues, statsField);
+ }
+ return statsValues;
} else if (TrieField.class.isInstance(fieldType) || PointField.class.isInstance(fieldType)) {
NumericStatsValues statsValue = new NumericStatsValues(statsField);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/java/org/apache/solr/request/NumericFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/request/NumericFacets.java b/solr/core/src/java/org/apache/solr/request/NumericFacets.java
index a72eeee..c3bcb9f 100644
--- a/solr/core/src/java/org/apache/solr/request/NumericFacets.java
+++ b/solr/core/src/java/org/apache/solr/request/NumericFacets.java
@@ -19,6 +19,7 @@ package org.apache.solr.request;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.Collections;
+import java.util.Date;
import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
@@ -51,6 +52,7 @@ import org.apache.solr.schema.TrieField;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.SolrIndexSearcher;
+import org.apache.solr.util.DateMathParser;
/** Utility class to compute facets on numeric fields. */
final class NumericFacets {
@@ -512,7 +514,8 @@ final class NumericFacets {
return String.valueOf(NumericUtils.sortableIntToFloat((int)bits));
case DOUBLE:
return String.valueOf(NumericUtils.sortableLongToDouble(bits));
- //TODO: DATE
+ case DATE:
+ return new Date(bits).toInstant().toString();
default:
throw new AssertionError("Unsupported NumberType: " + fieldType.getNumberType());
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/java/org/apache/solr/response/DocsStreamer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/DocsStreamer.java b/solr/core/src/java/org/apache/solr/response/DocsStreamer.java
index ef0b0c7..bdea9ec 100644
--- a/solr/core/src/java/org/apache/solr/response/DocsStreamer.java
+++ b/solr/core/src/java/org/apache/solr/response/DocsStreamer.java
@@ -31,6 +31,7 @@ import org.apache.solr.common.SolrException;
import org.apache.solr.response.transform.DocTransformer;
import org.apache.solr.schema.BinaryField;
import org.apache.solr.schema.BoolField;
+import org.apache.solr.schema.DatePointField;
import org.apache.solr.schema.DoublePointField;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.FloatPointField;
@@ -221,6 +222,7 @@ public class DocsStreamer implements Iterator<SolrDocument> {
KNOWN_TYPES.add(LongPointField.class);
KNOWN_TYPES.add(DoublePointField.class);
KNOWN_TYPES.add(FloatPointField.class);
+ KNOWN_TYPES.add(DatePointField.class);
// We do not add UUIDField because UUID object is not a supported type in JavaBinCodec
// and if we write UUIDField.toObject, we wouldn't know how to handle it in the client side
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/java/org/apache/solr/schema/DatePointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/DatePointField.java b/solr/core/src/java/org/apache/solr/schema/DatePointField.java
new file mode 100644
index 0000000..18bf651
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/schema/DatePointField.java
@@ -0,0 +1,219 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.schema;
+
+import java.lang.invoke.MethodHandles;
+import java.time.Instant;
+import java.util.Collection;
+import java.util.Date;
+
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.StoredField;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.legacy.LegacyNumericRangeQuery;
+import org.apache.lucene.legacy.LegacyNumericType;
+import org.apache.lucene.queries.function.ValueSource;
+import org.apache.lucene.queries.function.valuesource.LongFieldSource;
+import org.apache.lucene.queries.function.valuesource.MultiValuedLongFieldSource;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.SortedNumericSelector;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.BytesRefBuilder;
+import org.apache.lucene.util.mutable.MutableValueDate;
+import org.apache.lucene.util.mutable.MutableValueLong;
+import org.apache.solr.search.QParser;
+import org.apache.solr.uninverting.UninvertingReader;
+import org.apache.solr.util.DateMathParser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class DatePointField extends PointField implements DateValueFieldType {
+
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ public DatePointField() {
+ type = NumberType.DATE;
+ }
+
+
+ @Override
+ public Object toNativeType(Object val) {
+ if (val instanceof String) {
+ return DateMathParser.parseMath(null, (String) val);
+ }
+ return super.toNativeType(val);
+ }
+
+ @Override
+ public Query getPointRangeQuery(QParser parser, SchemaField field, String min, String max, boolean minInclusive, boolean maxInclusive) {
+ long actualMin, actualMax;
+ if (min == null) {
+ actualMin = Long.MIN_VALUE;
+ } else {
+ actualMin = DateMathParser.parseMath(null, min).getTime();
+ if (!minInclusive) {
+ actualMin++;
+ }
+ }
+ if (max == null) {
+ actualMax = Long.MAX_VALUE;
+ } else {
+ actualMax = DateMathParser.parseMath(null, max).getTime();
+ if (!maxInclusive) {
+ actualMax--;
+ }
+ }
+ return LongPoint.newRangeQuery(field.getName(), actualMin, actualMax);
+ }
+
+ @Override
+ public Object toObject(SchemaField sf, BytesRef term) {
+ return new Date(LongPoint.decodeDimension(term.bytes, term.offset));
+ }
+
+ @Override
+ public Object toObject(IndexableField f) {
+ final Number val = f.numericValue();
+ if (val != null) {
+ return new Date(val.longValue());
+ } else {
+ throw new AssertionError("Unexpected state. Field: '" + f + "'");
+ }
+ }
+
+ @Override
+ protected Query getExactQuery(SchemaField field, String externalVal) {
+ return LongPoint.newExactQuery(field.getName(), DateMathParser.parseMath(null, externalVal).getTime());
+ }
+
+ @Override
+ public Query getSetQuery(QParser parser, SchemaField field, Collection<String> externalVals) {
+ assert externalVals.size() > 0;
+ long[] values = new long[externalVals.size()];
+ int i = 0;
+ for (String val:externalVals) {
+ values[i] = DateMathParser.parseMath(null, val).getTime();
+ i++;
+ }
+ return LongPoint.newSetQuery(field.getName(), values);
+ }
+
+ @Override
+ protected String indexedToReadable(BytesRef indexedForm) {
+ return Instant.ofEpochMilli(LongPoint.decodeDimension(indexedForm.bytes, indexedForm.offset)).toString();
+ }
+
+ @Override
+ public void readableToIndexed(CharSequence val, BytesRefBuilder result) {
+ Date date = (Date) toNativeType(val.toString());
+ result.grow(Long.BYTES);
+ result.setLength(Long.BYTES);
+ LongPoint.encodeDimension(date.getTime(), result.bytes(), 0);
+ }
+
+ @Override
+ public SortField getSortField(SchemaField field, boolean top) {
+ field.checkSortability();
+
+ Object missingValue = null;
+ boolean sortMissingLast = field.sortMissingLast();
+ boolean sortMissingFirst = field.sortMissingFirst();
+
+ if (sortMissingLast) {
+ missingValue = top ? Long.MIN_VALUE : Long.MAX_VALUE;
+ } else if (sortMissingFirst) {
+ missingValue = top ? Long.MAX_VALUE : Long.MIN_VALUE;
+ }
+ SortField sf = new SortField(field.getName(), SortField.Type.LONG, top);
+ sf.setMissingValue(missingValue);
+ return sf;
+ }
+
+ @Override
+ public UninvertingReader.Type getUninversionType(SchemaField sf) {
+ if (sf.multiValued()) {
+ return UninvertingReader.Type.SORTED_LONG;
+ } else {
+ return UninvertingReader.Type.LONG_POINT;
+ }
+ }
+
+ @Override
+ public ValueSource getValueSource(SchemaField field, QParser parser) {
+ field.checkFieldCacheSource();
+ return new DatePointFieldSource(field.getName());
+ }
+
+ @Override
+ protected ValueSource getSingleValueSource(SortedNumericSelector.Type choice, SchemaField field) {
+ return new MultiValuedLongFieldSource(field.getName(), choice);
+ }
+
+ @Override
+ public LegacyNumericType getNumericType() {
+ return LegacyNumericType.LONG;
+ }
+
+ @Override
+ public IndexableField createField(SchemaField field, Object value) {
+ if (!isFieldUsed(field)) return null;
+
+ Date date = (value instanceof Date)
+ ? ((Date)value)
+ : DateMathParser.parseMath(null, value.toString());
+ return new LongPoint(field.getName(), date.getTime());
+ }
+
+ @Override
+ protected StoredField getStoredField(SchemaField sf, Object value) {
+ return new StoredField(sf.getName(), ((Date) this.toNativeType(value)).getTime());
+ }
+}
+
+class DatePointFieldSource extends LongFieldSource {
+
+ public DatePointFieldSource(String field) {
+ super(field);
+ }
+
+ @Override
+ public String description() {
+ return "date(" + field + ')';
+ }
+
+ @Override
+ protected MutableValueLong newMutableValueLong() {
+ return new MutableValueDate();
+ }
+
+ @Override
+ public Date longToObject(long val) {
+ return new Date(val);
+ }
+
+ @Override
+ public String longToString(long val) {
+ return longToObject(val).toInstant().toString();
+ }
+
+ @Override
+ public long externalToLong(String extVal) {
+ return DateMathParser.parseMath(null, extVal).getTime();
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index c650845..521324a 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -923,7 +923,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
outValues.add(NumericUtils.sortableLongToDouble(number));
break;
case DATE:
- newVal = new Date(number);
+ outValues.add(new Date(number));
break;
default:
throw new AssertionError("Unexpected PointType: " + type);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test-files/solr/collection1/conf/schema-docValuesFaceting.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-docValuesFaceting.xml b/solr/core/src/test-files/solr/collection1/conf/schema-docValuesFaceting.xml
index 597f2c3..0917ff5 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema-docValuesFaceting.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema-docValuesFaceting.xml
@@ -27,6 +27,7 @@
<fieldType name="plong" class="solr.LongPointField"/>
<fieldType name="pdouble" class="solr.DoublePointField"/>
<fieldType name="pfloat" class="solr.FloatPointField"/>
+ <fieldType name="pdate" class="solr.DatePointField"/>
<field name="id" type="string" indexed="true" stored="true" docValues="false" multiValued="false" required="true"/>
<field name="id_dv" type="string" indexed="false" stored="false" docValues="true" multiValued="false"
@@ -60,8 +61,8 @@
<dynamicField name="*_ds_dv" type="double" indexed="true" stored="false" docValues="true" multiValued="true"/>
<dynamicField name="*_ds_p" type="pdouble" indexed="true" stored="false" docValues="true" multiValued="true"/>
<dynamicField name="*_dt" type="date" indexed="true" stored="false" docValues="false"/>
- <dynamicField name="*_dt_dv" type="date" indexed="true" stored="false" docValues="true"/>
- <dynamicField name="*_dts_dv" type="date" indexed="true" stored="false" docValues="true" multiValued="true"/>
+ <dynamicField name="*_dt_dv" type="${solr.tests.dateClass:pdate}" indexed="true" stored="false" docValues="true"/>
+ <dynamicField name="*_dts_dv" type="${solr.tests.dateClass:pdate}" indexed="true" stored="false" docValues="true" multiValued="true"/>
<defaultSearchField>id</defaultSearchField>
<uniqueKey>id</uniqueKey>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test-files/solr/collection1/conf/schema-point.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-point.xml b/solr/core/src/test-files/solr/collection1/conf/schema-point.xml
index 3561013..ed169a1 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema-point.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema-point.xml
@@ -24,6 +24,7 @@
<fieldType name="plong" class="solr.LongPointField"/>
<fieldType name="pdouble" class="solr.DoublePointField"/>
<fieldType name="pfloat" class="solr.FloatPointField"/>
+ <fieldType name="pdate" class="solr.DatePointField"/>
<fieldType name="int" class="solr.TrieIntField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="float" class="solr.TrieFloatField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
@@ -73,21 +74,31 @@
<dynamicField name="*_p_f_mv_dv" type="pfloat" indexed="true" stored="true" docValues="true" multiValued="true"/>
<dynamicField name="*_p_f_ni_dv" type="pfloat" indexed="false" stored="true" docValues="true"/>
<dynamicField name="*_p_f_ni_mv_dv" type="pfloat" indexed="false" stored="true" docValues="true" multiValued="true"/>
+
+ <dynamicField name="*_p_dt" type="pdate" indexed="true" stored="true"/>
+ <dynamicField name="*_p_dt_dv" type="pdate" indexed="true" stored="true" docValues="true"/>
+ <dynamicField name="*_p_dt_mv" type="pdate" indexed="true" stored="true" multiValued="true"/>
+ <dynamicField name="*_p_dt_mv_dv" type="pdate" indexed="true" stored="true" docValues="true" multiValued="true"/>
+ <dynamicField name="*_p_dt_ni_dv" type="pdate" indexed="false" stored="true" docValues="true"/>
+ <dynamicField name="*_p_dt_ni_mv_dv" type="pdate" indexed="false" stored="true" docValues="true" multiValued="true"/>
<!-- return DV fields as -->
<dynamicField name="*_p_i_dv_ns" type="pint" indexed="true" stored="false" docValues="true" useDocValuesAsStored="true"/>
<dynamicField name="*_p_l_dv_ns" type="plong" indexed="true" stored="false" docValues="true" useDocValuesAsStored="true"/>
<dynamicField name="*_p_d_dv_ns" type="pdouble" indexed="true" stored="false" docValues="true" useDocValuesAsStored="true"/>
<dynamicField name="*_p_f_dv_ns" type="pfloat" indexed="true" stored="false" docValues="true" useDocValuesAsStored="true"/>
+ <dynamicField name="*_p_dt_dv_ns" type="pdate" indexed="true" stored="false" docValues="true" useDocValuesAsStored="true"/>
<dynamicField name="*_p_i_ni_ns_dv" type="pint" indexed="false" stored="false" docValues="true" useDocValuesAsStored="true"/>
<dynamicField name="*_p_l_ni_ns_dv" type="plong" indexed="false" stored="false" docValues="true" useDocValuesAsStored="true"/>
<dynamicField name="*_p_d_ni_ns_dv" type="pdouble" indexed="false" stored="false" docValues="true" useDocValuesAsStored="true"/>
<dynamicField name="*_p_f_ni_ns_dv" type="pfloat" indexed="false" stored="false" docValues="true" useDocValuesAsStored="true"/>
+ <dynamicField name="*_p_dt_ni_ns_dv" type="pdate" indexed="false" stored="false" docValues="true" useDocValuesAsStored="true"/>
<dynamicField name="*_p_i_dv_ns_mv" type="pint" indexed="true" stored="false" docValues="true" useDocValuesAsStored="true" multiValued="true"/>
<dynamicField name="*_p_d_dv_ns_mv" type="pdouble" indexed="true" stored="false" docValues="true" useDocValuesAsStored="true" multiValued="true"/>
<dynamicField name="*_p_l_dv_ns_mv" type="plong" indexed="true" stored="false" docValues="true" useDocValuesAsStored="true" multiValued="true"/>
<dynamicField name="*_p_f_dv_ns_mv" type="pfloat" indexed="true" stored="false" docValues="true" useDocValuesAsStored="true" multiValued="true"/>
+ <dynamicField name="*_p_dt_dv_ns_mv" type="pdate" indexed="true" stored="false" docValues="true" useDocValuesAsStored="true" multiValued="true"/>
</fields>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test-files/solr/collection1/conf/schema-sorts.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-sorts.xml b/solr/core/src/test-files/solr/collection1/conf/schema-sorts.xml
index f68841c..8497318 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema-sorts.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema-sorts.xml
@@ -73,9 +73,9 @@ NOTE: Tests expect every field in this schema to be sortable.
<field name="date" type="date"/>
<field name="date_last" type="date_last"/>
<field name="date_first" type="date_first"/>
- <field name="date_dv" type="date_dv"/>
- <field name="date_dv_last" type="date_dv_last"/>
- <field name="date_dv_first" type="date_dv_first"/>
+ <field name="date_dv" type="${solr.tests.dateClass:pdate}_dv"/>
+ <field name="date_dv_last" type="${solr.tests.dateClass:pdate}_dv_last"/>
+ <field name="date_dv_first" type="${solr.tests.dateClass:pdate}_dv_first"/>
<field name="uuid" type="uuid"/>
<field name="uuid_last" type="uuid_last"/>
@@ -276,6 +276,11 @@ NOTE: Tests expect every field in this schema to be sortable.
sortMissingLast="true"/>
<fieldType name="date_dv_first" class="solr.TrieDateField" stored="true" indexed="false" docValues="true"
sortMissingFirst="true"/>
+ <fieldType name="pdate_dv" class="solr.DatePointField" stored="true" indexed="false" docValues="true"/>
+ <fieldType name="pdate_dv_last" class="solr.DatePointField" stored="true" indexed="false" docValues="true"
+ sortMissingLast="true"/>
+ <fieldType name="pdate_dv_first" class="solr.DatePointField" stored="true" indexed="false" docValues="true"
+ sortMissingFirst="true"/>
<fieldType name="uuid" class="solr.UUIDField" stored="true" indexed="true"/>
<fieldType name="uuid_last" class="solr.UUIDField" stored="true" indexed="true" sortMissingLast="true"/>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test-files/solr/collection1/conf/schema.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema.xml b/solr/core/src/test-files/solr/collection1/conf/schema.xml
index c53be9b..8c549a3 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema.xml
@@ -49,6 +49,7 @@
<fieldType name="plong" class="solr.LongPointField" docValues="true"/>
<fieldType name="pdouble" class="solr.DoublePointField" docValues="true"/>
<fieldType name="pfloat" class="solr.FloatPointField" docValues="true"/>
+ <fieldType name="pdate" class="solr.DatePointField" docValues="true"/>
<!-- Field type demonstrating an Analyzer failure -->
<fieldType name="failtype1" class="solr.TextField">
@@ -569,13 +570,13 @@
<field name="textgap" type="textgap" indexed="true" stored="true"/>
- <field name="timestamp" type="date" indexed="true" stored="true" docValues="true" default="NOW" multiValued="false"/>
+ <field name="timestamp" type="${solr.tests.dateClass:pdate}" indexed="true" stored="true" docValues="true" default="NOW" multiValued="false"/>
<field name="multiDefault" type="string" indexed="true" stored="true" default="muLti-Default" multiValued="true"/>
<field name="intDefault" type="${solr.tests.intClass:pint}" indexed="true" stored="true" default="42" multiValued="false"/>
<field name="intDvoDefault" type="${solr.tests.intClass:pint}" indexed="false" stored="false" multiValued="false"
useDocValuesAsStored="true" docValues="true" default="42" />
<field name="intRemove" type="${solr.tests.intClass:pint}" indexed="true" stored="true" multiValued="true"/>
- <field name="dateRemove" type="date" indexed="true" stored="true" multiValued="true"/>
+ <field name="dateRemove" type="${solr.tests.dateClass:pdate}" indexed="true" stored="true" multiValued="true"/>
<field name="floatRemove" type="${solr.tests.floatClass:pfloat}" indexed="true" stored="true" multiValued="true"/>
<field name="nopositionstext" type="nopositions" indexed="true" stored="true"/>
@@ -621,8 +622,8 @@
<dynamicField name="*_d" type="${solr.tests.doubleClass:pdouble}" indexed="true" stored="true"/>
<dynamicField name="*_d1" type="${solr.tests.doubleClass:pdouble}" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_d1_ndv" type="${solr.tests.doubleClass:pdouble}" indexed="true" docValues="false" stored="true" multiValued="false"/>
- <dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
- <dynamicField name="*_dt1" type="date" indexed="true" stored="true" multiValued="false"/>
+ <dynamicField name="*_dt" type="${solr.tests.dateClass:pdate}" indexed="true" stored="true"/>
+ <dynamicField name="*_dt1" type="${solr.tests.dateClass:pdate}" indexed="true" stored="true" multiValued="false"/>
<!-- some trie-coded dynamic fields for faster range queries -->
@@ -683,7 +684,7 @@
<dynamicField name="*_l_dv" type="${solr.tests.longClass:plong}" indexed="true" stored="true" docValues="true"/>
<dynamicField name="*_f_dv" type="${solr.tests.floatClass:pfloat}" indexed="true" stored="true" docValues="true"/>
<dynamicField name="*_d_dv" type="${solr.tests.doubleClass:pdouble}" indexed="true" stored="true" docValues="true"/>
- <dynamicField name="*_dt_dv" type="date" indexed="true" stored="true" docValues="true"/>
+ <dynamicField name="*_dt_dv" type="${solr.tests.dateClass:pdate}" indexed="true" stored="true" docValues="true"/>
<dynamicField name="*_f1_dv" type="${solr.tests.floatClass:pfloat}" indexed="true" stored="true" docValues="true" multiValued="false"/>
<!-- Non-stored, DocValues=true -->
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test-files/solr/collection1/conf/schema11.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema11.xml b/solr/core/src/test-files/solr/collection1/conf/schema11.xml
index 24129ae..7591c96 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema11.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema11.xml
@@ -83,6 +83,7 @@
<fieldType name="plong" class="solr.LongPointField" docValues="true"/>
<fieldType name="pdouble" class="solr.DoublePointField" docValues="true"/>
<fieldType name="pfloat" class="solr.FloatPointField" docValues="true"/>
+ <fieldType name="pdate" class="solr.DatePointField" docValues="true"/>
<!-- The format for this date field is of the form 1995-12-31T23:59:59Z, and
is a more restricted form of the canonical representation of dateTime
@@ -403,7 +404,7 @@ valued. -->
<dynamicField name="*_t" type="text" indexed="true" stored="true"/>
<dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
- <dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
+ <dynamicField name="*_dt" type="${solr.tests.dateClass:pdate}" indexed="true" stored="true"/>
<dynamicField name="*_ws" type="text_ws" indexed="true" stored="true"/>
<!-- for testing tfidf functions, see TestFunctionQuery.testTFIDFFunctions -->
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test-files/solr/collection1/conf/schema12.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema12.xml b/solr/core/src/test-files/solr/collection1/conf/schema12.xml
index 2d0615c..8577440 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema12.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema12.xml
@@ -42,12 +42,13 @@
<fieldType name="tfloat" class="solr.TrieFloatField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="tlong" class="solr.TrieLongField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="tdouble" class="solr.TrieDoubleField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
-
+
<!-- Point Fields -->
<fieldType name="pint" class="solr.IntPointField" docValues="true"/>
<fieldType name="plong" class="solr.LongPointField" docValues="true"/>
<fieldType name="pdouble" class="solr.DoublePointField" docValues="true"/>
<fieldType name="pfloat" class="solr.FloatPointField" docValues="true"/>
+ <fieldType name="pdate" class="solr.DatePointField" docValues="true"/>
<!-- Field type demonstrating an Analyzer failure -->
<fieldType name="failtype1" class="solr.TextField">
@@ -361,7 +362,7 @@
</analyzer>
</fieldType>
- <!-- a text field with the stop filter only on the query analyzer
+ <!-- a text field with the stop filter only on the query analyzer
-->
<fieldType name="text_sw" class="solr.TextField" positionIncrementGap="100">
<analyzer type="index">
@@ -568,8 +569,8 @@
<dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
<dynamicField name="*_f" type="${solr.tests.floatClass:pfloat}" indexed="true" stored="true"/>
<dynamicField name="*_d" type="${solr.tests.doubleClass:pdouble}" indexed="true" stored="true"/>
- <dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
-
+ <dynamicField name="*_dt" type="${solr.tests.dateClass:pdate}" indexed="true" stored="true"/>
+
<dynamicField name="*_pi" type="pint" indexed="true" stored="true" docValues="false" multiValued="false"/>
<!-- some trie-coded dynamic fields for faster range queries -->
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml b/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml
index e4747d8..dfeac32 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml
@@ -221,10 +221,10 @@
<dynamicField name="*_ds" type="${solr.tests.doubleClass:pdouble}" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_dd" type="${solr.tests.doubleClass:pdouble}" indexed="true" stored="false" docValues="true"/>
<dynamicField name="*_dds" type="${solr.tests.doubleClass:pdouble}" indexed="true" stored="false" multiValued="true" docValues="true"/>
- <dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
- <dynamicField name="*_dts" type="date" indexed="true" stored="true" multiValued="true"/>
- <dynamicField name="*_dtd" type="date" indexed="true" stored="false" docValues="true"/>
- <dynamicField name="*_dtds" type="date" indexed="true" stored="false" multiValued="true" docValues="true"/>
+ <dynamicField name="*_dt" type="${solr.tests.dateClass:pdate}" indexed="true" stored="true"/>
+ <dynamicField name="*_dts" type="${solr.tests.dateClass:pdate}" indexed="true" stored="true" multiValued="true"/>
+ <dynamicField name="*_dtd" type="${solr.tests.dateClass:pdate}" indexed="true" stored="false" docValues="true"/>
+ <dynamicField name="*_dtds" type="${solr.tests.dateClass:pdate}" indexed="true" stored="false" multiValued="true" docValues="true"/>
<!-- docvalues and stored (S suffix) -->
<dynamicField name="*_idS" type="${solr.tests.intClass:pint}" indexed="true" stored="true" docValues="true"/>
@@ -237,8 +237,8 @@
<dynamicField name="*_fdsS" type="${solr.tests.floatClass:pfloat}" indexed="true" stored="true" multiValued="true" docValues="true"/>
<dynamicField name="*_ddS" type="${solr.tests.doubleClass:pdouble}" indexed="true" stored="true" docValues="true"/>
<dynamicField name="*_ddsS" type="${solr.tests.doubleClass:pdouble}" indexed="true" stored="true" multiValued="true" docValues="true"/>
- <dynamicField name="*_dtdS" type="date" indexed="true" stored="true" docValues="true"/>
- <dynamicField name="*_dtdsS" type="date" indexed="true" stored="true" multiValued="true" docValues="true"/>
+ <dynamicField name="*_dtdS" type="${solr.tests.dateClass:pdate}" indexed="true" stored="true" docValues="true"/>
+ <dynamicField name="*_dtdsS" type="${solr.tests.dateClass:pdate}" indexed="true" stored="true" multiValued="true" docValues="true"/>
<dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
@@ -400,6 +400,7 @@
<fieldType name="plong" class="solr.LongPointField" docValues="true"/>
<fieldType name="pdouble" class="solr.DoublePointField" docValues="true"/>
<fieldType name="pfloat" class="solr.FloatPointField" docValues="true"/>
+ <fieldType name="pdate" class="solr.DatePointField" docValues="true"/>
<!-- The format for this date field is of the form 1995-12-31T23:59:59Z, and
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test-files/solr/collection1/conf/solrconfig-parsing-update-processor-chains.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-parsing-update-processor-chains.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-parsing-update-processor-chains.xml
index 7078da7..f83df6c 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-parsing-update-processor-chains.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-parsing-update-processor-chains.xml
@@ -47,6 +47,13 @@
</processor>
</updateRequestProcessorChain>
+ <updateRequestProcessorChain name="parse-date-explicit-typeclass-point-selector-no-run-processor">
+ <processor class="solr.ParseDateFieldUpdateProcessorFactory">
+ <str name="typeClass">solr.DatePointField</str>
+ <str name="format">yyyy-MM-dd'T'HH:mm:ss.SSSZ</str>
+ </processor>
+ </updateRequestProcessorChain>
+
<updateRequestProcessorChain name="parse-date-explicit-typeclass-selector-no-run-processor">
<processor class="solr.ParseDateFieldUpdateProcessorFactory">
<str name="typeClass">solr.TrieDateField</str>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test-files/solr/collection1/conf/solrconfig-update-processor-chains.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-update-processor-chains.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-update-processor-chains.xml
index a38bc04..426f3c0 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-update-processor-chains.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-update-processor-chains.xml
@@ -94,6 +94,7 @@
<!-- each set of exclusions is checked independently -->
<lst name="exclude">
<str name="typeClass">solr.TrieDateField</str>
+ <str name="typeClass">solr.DatePointField</str>
</lst>
<lst name="exclude">
<str name="fieldRegex">.*HOSS.*</str>
@@ -144,6 +145,7 @@
<updateRequestProcessorChain name="trim-classes">
<processor class="solr.TrimFieldUpdateProcessorFactory">
<str name="typeClass">solr.TrieDateField</str>
+ <str name="typeClass">solr.DatePointField</str>
<str name="typeClass">solr.StrField</str>
</processor>
</updateRequestProcessorChain>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test/org/apache/solr/handler/admin/LukeRequestHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/LukeRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/LukeRequestHandlerTest.java
index 92b4943..d253ef2 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/LukeRequestHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/LukeRequestHandlerTest.java
@@ -117,7 +117,7 @@ public class LukeRequestHandlerTest extends AbstractSolrTestCase {
// only valid for fields that are indexed & stored
for (String f : Arrays.asList("solr_t","solr_s","solr_ti",
"solr_td","solr_dt","solr_b")) {
-
+ if (h.getCore().getLatestSchema().getField(f).getType().isPointField()) continue;
final String xp = getFieldXPathPrefix(f);
assertQ("Not as many index flags as expected ("+numFlags+") for " + f,
req("qt","/admin/luke", "fl", f),
@@ -166,7 +166,7 @@ public class LukeRequestHandlerTest extends AbstractSolrTestCase {
response = h.query(req);
for (String f : Arrays.asList("solr_t", "solr_s", "solr_ti",
"solr_td", "solr_dt", "solr_b")) {
-
+ if (h.getCore().getLatestSchema().getField(f).getType().isPointField()) continue;
assertNull(TestHarness.validateXPath(response,
getFieldXPathPrefix(f) + "[@name='index']"));
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test/org/apache/solr/schema/DateFieldTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/DateFieldTest.java b/solr/core/src/test/org/apache/solr/schema/DateFieldTest.java
index ac451bf..d42d6dd 100644
--- a/solr/core/src/test/org/apache/solr/schema/DateFieldTest.java
+++ b/solr/core/src/test/org/apache/solr/schema/DateFieldTest.java
@@ -29,7 +29,7 @@ import org.apache.solr.core.SolrResourceLoader;
public class DateFieldTest extends SolrTestCaseJ4 {
private final String testInstanceDir = TEST_HOME() + File.separator + "collection1";
private final String testConfHome = testInstanceDir + File.separator + "conf"+ File.separator;
- private TrieDateField f = null;
+ private FieldType f = null;
@Override
public void setUp() throws Exception {
@@ -40,7 +40,7 @@ public class DateFieldTest extends SolrTestCaseJ4 {
SolrConfig config = new SolrConfig
(new SolrResourceLoader(Paths.get(testInstanceDir)), testConfHome + "solrconfig.xml", null);
IndexSchema schema = IndexSchemaFactory.buildIndexSchema(testConfHome + "schema.xml", config);
- f = new TrieDateField();
+ f = random().nextBoolean()? new TrieDateField() : new DatePointField();
f.init(schema, Collections.<String,String>emptyMap());
}
@@ -51,13 +51,13 @@ public class DateFieldTest extends SolrTestCaseJ4 {
SchemaField sf = new SchemaField( "test", f, props, null );
// String
IndexableField out = f.createField(sf, "1995-12-31T23:59:59Z" );
- assertEquals(820454399000L, f.toObject( out ).getTime() );
+ assertEquals(820454399000L, ((Date) f.toObject( out )).getTime() );
// Date obj
out = f.createField(sf, new Date(820454399000L) );
- assertEquals(820454399000L, f.toObject( out ).getTime() );
+ assertEquals(820454399000L, ((Date) f.toObject( out )).getTime() );
// Date math
out = f.createField(sf, "1995-12-31T23:59:59.99Z+5MINUTES");
- assertEquals(820454699990L, f.toObject( out ).getTime() );
+ assertEquals(820454699990L, ((Date) f.toObject( out )).getTime() );
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3131ec2d/solr/core/src/test/org/apache/solr/schema/TestPointFields.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/TestPointFields.java b/solr/core/src/test/org/apache/solr/schema/TestPointFields.java
index b3d0b97..3c1f0b3 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestPointFields.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestPointFields.java
@@ -25,6 +25,7 @@ import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.PointRangeQuery;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrException;
+import org.apache.solr.util.DateMathParser;
import org.junit.After;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -587,6 +588,126 @@ public class TestPointFields extends SolrTestCaseJ4 {
doTestSetQueries("number_p_l_mv", getRandomStringArrayWithLongs(10, false), true);
doTestSetQueries("number_p_l_ni_dv", getRandomStringArrayWithLongs(10, false), false);
}
+
+ // Date
+
+ @Test
+ public void testDatePointFieldExactQuery() throws Exception {
+ doTestDatePointFieldExactQuery("number_p_dt", "1995-12-31T23:59:59Z");
+ doTestDatePointFieldExactQuery("number_p_dt_mv", "2015-12-31T23:59:59Z-1DAY");
+ doTestDatePointFieldExactQuery("number_p_dt_ni_dv", "2000-12-31T23:59:59Z+3DAYS");
+ doTestDatePointFieldExactQuery("number_p_dt_ni_ns_dv", "1995-12-31T23:59:59Z-1MONTH");
+ doTestDatePointFieldExactQuery("number_p_dt_ni_mv_dv", "1995-12-31T23:59:59Z+2MONTHS");
+ }
+
+ @Test
+ public void testDatePointFieldReturn() throws Exception {
+ testPointFieldReturn("number_p_dt", "date",
+ new String[]{"1995-12-31T23:59:59Z", "1994-02-28T23:59:59Z",
+ "2015-12-31T23:59:59Z", "2000-10-31T23:59:59Z", "1999-12-31T12:59:59Z"});
+ clearIndex();
+ assertU(commit());
+ testPointFieldReturn("number_p_dt_dv_ns", "date",
+ new String[]{"1995-12-31T23:59:59Z", "1994-02-28T23:59:59Z",
+ "2015-12-31T23:59:59Z", "2000-10-31T23:59:59Z", "1999-12-31T12:59:59Z"});
+ }
+
+ @Test
+ public void testDatePointFieldRangeQuery() throws Exception {
+ doTestDatePointFieldRangeQuery("number_p_dt");
+ doTestDatePointFieldRangeQuery("number_p_dt_ni_ns_dv");
+ }
+
+ @Test
+ public void testDatePointFieldSort() throws Exception {
+ doTestPointFieldSort("number_p_dt", "number_p_dt_dv", getSequentialStringArrayWithDates(10));
+ }
+
+ @Test
+ public void testDatePointFieldFacetField() throws Exception {
+ testPointFieldFacetField("number_p_dt", "number_p_dt_dv", getSequentialStringArrayWithDates(10));
+ clearIndex();
+ assertU(commit());
+ testPointFieldFacetField("number_p_dt", "number_p_dt_dv", getSequentialStringArrayWithDates(10));
+ }
+
+ @Test
+ public void testDatePointFieldRangeFacet() throws Exception {
+ doTestDatePointFieldRangeFacet("number_p_dt_dv", "number_p_dt");
+ }
+
+ @Test
+ public void testDatePointFunctionQuery() throws Exception {
+ doTestDatePointFunctionQuery("number_p_dt_dv", "number_p_dt", "date");
+ }
+
+ @Test
+ public void testDatePointStats() throws Exception {
+ testDatePointStats("number_p_dt", "number_p_dt_dv", getSequentialStringArrayWithDates(10));
+ testDatePointStats("number_p_dt_mv", "number_p_dt_mv_dv", getSequentialStringArrayWithDates(10));
+ }
+
+ @Test
+ public void testDatePointFieldMultiValuedExactQuery() throws Exception {
+ testPointFieldMultiValuedExactQuery("number_p_dt_mv", getSequentialStringArrayWithDates(20));
+ testPointFieldMultiValuedExactQuery("number_p_dt_ni_mv_dv", getSequentialStringArrayWithDates(20));
+ }
+
+ @Test
+ public void testDatePointFieldMultiValuedReturn() throws Exception {
+ testPointFieldMultiValuedReturn("number_p_dt_mv", "date", getSequentialStringArrayWithDates(20));
+ testPointFieldMultiValuedReturn("number_p_dt_ni_mv_dv", "date", getSequentialStringArrayWithDates(20));
+ testPointFieldMultiValuedReturn("number_p_dt_dv_ns_mv", "date", getSequentialStringArrayWithDates(20));
+ }
+
+ @Test
+ public void testDatePointFieldMultiValuedRangeQuery() throws Exception {
+ testPointFieldMultiValuedRangeQuery("number_p_dt_mv", "date", getSequentialStringArrayWithDates(20));
+ testPointFieldMultiValuedRangeQuery("number_p_dt_ni_mv_dv", "date", getSequentialStringArrayWithDates(20));
+ }
+
+ @Test
+ public void testDatePointFieldMultiValuedFacetField() throws Exception {
+ testPointFieldMultiValuedFacetField("number_p_dt_mv", "number_p_dt_mv_dv", getSequentialStringArrayWithDates(20));
+ testPointFieldMultiValuedFacetField("number_p_dt_mv", "number_p_dt_mv_dv", getRandomStringArrayWithDates(20, false));
+ }
+
+ @Test
+ public void testDatePointFieldMultiValuedRangeFacet() throws Exception {
+ doTestDatePointFieldMultiValuedRangeFacet("number_p_dt_mv_dv", "number_p_dt_mv");
+ }
+
+ @Test
+ public void testDatePointMultiValuedFunctionQuery() throws Exception {
+ testPointMultiValuedFunctionQuery("number_p_dt_mv", "number_p_dt_mv_dv", "date", getSequentialStringArrayWithDates(20));
+ }
+
+ @Test
+ public void testDatePointFieldsAtomicUpdates() throws Exception {
+ if (!Boolean.getBoolean("enable.update.log")) {
+ return;
+ }
+ testDatePointFieldsAtomicUpdates("number_p_dt", "date");
+ testDatePointFieldsAtomicUpdates("number_p_dt_dv", "date");
+ testDatePointFieldsAtomicUpdates("number_p_dt_dv_ns", "date");
+ }
+
+ @Test
+ public void testMultiValuedDatePointFieldsAtomicUpdates() throws Exception {
+ if (!Boolean.getBoolean("enable.update.log")) {
+ return;
+ }
+ testMultiValuedDatePointFieldsAtomicUpdates("number_p_dt_mv", "date");
+ testMultiValuedDatePointFieldsAtomicUpdates("number_p_dt_ni_mv_dv", "date");
+ testMultiValuedDatePointFieldsAtomicUpdates("number_p_dt_dv_ns_mv", "date");
+ }
+
+ @Test
+ public void testDatePointSetQuery() throws Exception {
+ doTestSetQueries("number_p_dt", getRandomStringArrayWithDates(10, false), false);
+ doTestSetQueries("number_p_dt_mv", getRandomStringArrayWithDates(10, false), true);
+ doTestSetQueries("number_p_dt_ni_dv", getRandomStringArrayWithDates(10, false), false);
+ }
@Test
public void testIndexOrDocValuesQuery() throws Exception {
@@ -664,6 +785,15 @@ public class TestPointFields extends SolrTestCaseJ4 {
}
return arr;
}
+
+ private String[] getSequentialStringArrayWithDates(int length) {
+ assert length < 60;
+ String[] arr = new String[length];
+ for (int i = 0; i < length; i++) {
+ arr[i] = String.format(Locale.ROOT, "1995-12-11T19:59:%02dZ", i);
+ }
+ return arr;
+ }
private String[] getSequentialStringArrayWithDoubles(int length) {
String[] arr = new String[length];
@@ -718,6 +848,27 @@ public class TestPointFields extends SolrTestCaseJ4 {
}
return stringArr;
}
+
+ private String[] getRandomStringArrayWithDates(int length, boolean sorted) {
+ assert length < 60;
+ Set<Integer> set;
+ if (sorted) {
+ set = new TreeSet<>();
+ } else {
+ set = new HashSet<>();
+ }
+ while (set.size() < length) {
+ int number = random().nextInt(60);
+ set.add(number);
+ }
+ String[] stringArr = new String[length];
+ int i = 0;
+ for (int val:set) {
+ stringArr[i] = String.format(Locale.ROOT, "1995-12-11T19:59:%02dZ", val);
+ i++;
+ }
+ return stringArr;
+ }
private void doTestIntPointFieldExactQuery(String field, boolean testLong) throws Exception {
for (int i=0; i < 10; i++) {
@@ -1037,12 +1188,21 @@ public class TestPointFields extends SolrTestCaseJ4 {
}
assertU(commit());
for (int i = 0; i < 20; i++) {
- assertQ(req("q", fieldName + ":" + numbers[i].replace("-", "\\-")),
- "//*[@numFound='1']");
+ if (h.getCore().getLatestSchema().getField(fieldName).getType() instanceof DatePointField) {
+ assertQ(req("q", fieldName + ":\"" + numbers[i] + "\""),
+ "//*[@numFound='1']");
+ } else {
+ assertQ(req("q", fieldName + ":" + numbers[i].replace("-", "\\-")),
+ "//*[@numFound='1']");
+ }
}
for (int i = 0; i < 20; i++) {
- assertQ(req("q", fieldName + ":" + numbers[i].replace("-", "\\-") + " OR " + fieldName + ":" + numbers[(i+1)%10].replace("-", "\\-")), "//*[@numFound='2']");
+ if (h.getCore().getLatestSchema().getField(fieldName).getType() instanceof DatePointField) {
+ assertQ(req("q", fieldName + ":\"" + numbers[i] + "\"" + " OR " + fieldName + ":\"" + numbers[(i+1)%10]+"\""), "//*[@numFound='2']");
+ } else {
+ assertQ(req("q", fieldName + ":" + numbers[i].replace("-", "\\-") + " OR " + fieldName + ":" + numbers[(i+1)%10].replace("-", "\\-")), "//*[@numFound='2']");
+ }
}
}
@@ -1089,10 +1249,10 @@ public class TestPointFields extends SolrTestCaseJ4 {
assertTrue(h.getCore().getLatestSchema().getField(fieldName).multiValued());
assertTrue(h.getCore().getLatestSchema().getField(fieldName).getType() instanceof PointField);
for (int i=0; i < 10; i++) {
- assertU(adoc("id", String.valueOf(i), fieldName, String.valueOf(i), fieldName, String.valueOf(i+10)));
+ assertU(adoc("id", String.valueOf(i), fieldName, numbers[i], fieldName, numbers[i+10]));
}
assertU(commit());
- assertQ(req("q", fieldName + ":[0 TO 3]", "fl", "id, " + fieldName),
+ assertQ(req("q", String.format(Locale.ROOT, "%s:[%s TO %s]", fieldName, numbers[0], numbers[3]), "fl", "id, " + fieldName),
"//*[@numFound='4']",
"//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']",
"//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[2][.='" + numbers[10] + "']",
@@ -1103,36 +1263,36 @@ public class TestPointFields extends SolrTestCaseJ4 {
"//result/doc[4]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[3] + "']",
"//result/doc[4]/arr[@name='" + fieldName + "']/" + type + "[2][.='" + numbers[13] + "']");
- assertQ(req("q", fieldName + ":{0 TO 3]", "fl", "id, " + fieldName),
+ assertQ(req("q", String.format(Locale.ROOT, "%s:{%s TO %s]", fieldName, numbers[0], numbers[3]), "fl", "id, " + fieldName),
"//*[@numFound='3']",
"//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[1] + "']",
"//result/doc[2]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[2] + "']",
"//result/doc[3]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[3] + "']");
- assertQ(req("q", fieldName + ":[0 TO 3}", "fl", "id, " + fieldName),
+ assertQ(req("q", String.format(Locale.ROOT, "%s:[%s TO %s}", fieldName, numbers[0], numbers[3]), "fl", "id, " + fieldName),
"//*[@numFound='3']",
"//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']",
"//result/doc[2]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[1] + "']",
"//result/doc[3]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[2] + "']");
- assertQ(req("q", fieldName + ":{0 TO 3}", "fl", "id, " + fieldName),
+ assertQ(req("q", String.format(Locale.ROOT, "%s:{%s TO %s}", fieldName, numbers[0], numbers[3]), "fl", "id, " + fieldName),
"//*[@numFound='2']",
"//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[1] + "']",
"//result/doc[2]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[2] + "']");
-
- assertQ(req("q", fieldName + ":{0 TO *}", "fl", "id, " + fieldName),
+
+ assertQ(req("q", String.format(Locale.ROOT, "%s:{%s TO *}", fieldName, numbers[0]), "fl", "id, " + fieldName),
"//*[@numFound='10']",
"//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']");
- assertQ(req("q", fieldName + ":{10 TO *}", "fl", "id, " + fieldName),
+ assertQ(req("q", String.format(Locale.ROOT, "%s:{%s TO *}", fieldName, numbers[10]), "fl", "id, " + fieldName),
"//*[@numFound='9']",
"//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[1] + "']");
- assertQ(req("q", fieldName + ":{* TO 3}", "fl", "id, " + fieldName),
+ assertQ(req("q", String.format(Locale.ROOT, "%s:{* TO %s}", fieldName, numbers[3]), "fl", "id, " + fieldName),
"//*[@numFound='3']",
"//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']");
- assertQ(req("q", fieldName + ":[* TO 3}", "fl", "id, " + fieldName),
+ assertQ(req("q", String.format(Locale.ROOT, "%s:[* TO %s}", fieldName, numbers[3]), "fl", "id, " + fieldName),
"//*[@numFound='3']",
"//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']");
@@ -1141,14 +1301,14 @@ public class TestPointFields extends SolrTestCaseJ4 {
"//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']",
"//result/doc[10]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[9] + "']");
- assertQ(req("q", fieldName + ":[0 TO 1] OR " + fieldName + ":[8 TO 9]", "fl", "id, " + fieldName),
+ assertQ(req("q", String.format(Locale.ROOT, "%s:[%s TO %s] OR %s:[%s TO %s]", fieldName, numbers[0], numbers[1], fieldName, numbers[8], numbers[9]), "fl", "id, " + fieldName),
"//*[@numFound='4']",
"//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']",
"//result/doc[2]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[1] + "']",
"//result/doc[3]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[8] + "']",
"//result/doc[4]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[9] + "']");
- assertQ(req("q", fieldName + ":[0 TO 0] AND " + fieldName + ":[10 TO 10]", "fl", "id, " + fieldName),
+ assertQ(req("q", String.format(Locale.ROOT, "%s:[%s TO %s] OR %s:[%s TO %s]", fieldName, numbers[0], numbers[0], fieldName, numbers[10], numbers[10]), "fl", "id, " + fieldName),
"//*[@numFound='1']",
"//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']");
}
@@ -1238,12 +1398,22 @@ public class TestPointFields extends SolrTestCaseJ4 {
larger = numbers[1];
}
} catch (NumberFormatException e) {
- if (Double.valueOf(numbers[1]) < Double.valueOf(numbers[2])) {
- smaller = numbers[1];
- larger = numbers[2];
- } else {
- smaller = numbers[2];
- larger = numbers[1];
+ try {
+ if (Double.valueOf(numbers[1]) < Double.valueOf(numbers[2])) {
+ smaller = numbers[1];
+ larger = numbers[2];
+ } else {
+ smaller = numbers[2];
+ larger = numbers[1];
+ }
+ } catch (NumberFormatException e2) {
+ if (DateMathParser.parseMath(null, numbers[1]).getTime() < DateMathParser.parseMath(null, numbers[2]).getTime()) {
+ smaller = numbers[1];
+ larger = numbers[2];
+ } else {
+ smaller = numbers[2];
+ larger = numbers[1];
+ }
}
}
@@ -1818,4 +1988,353 @@ public class TestPointFields extends SolrTestCaseJ4 {
"//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='18'][.='2']",
"//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='-10'][.='0']");
}
+
+
+ private void doTestDatePointFieldExactQuery(String field, String baseDate) throws Exception {
+ for (int i=0; i < 10; i++) {
+ assertU(adoc("id", String.valueOf(i), field, String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, i+1)));
+ }
+ assertU(commit());
+ for (int i = 0; i < 10; i++) {
+ String date = String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, i+1);
+ assertQ(req("q", field + ":\""+date+"\"", "fl", "id, " + field),
+ "//*[@numFound='1']");
+ }
+
+ for (int i = 0; i < 10; i++) {
+ String date1 = String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, i+1);
+ String date2 = String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, ((i+1)%10 + 1));
+ assertQ(req("q", field + ":\"" + date1 + "\""
+ + " OR " + field + ":\"" + date2 + "\""), "//*[@numFound='2']");
+ }
+
+ clearIndex();
+ assertU(commit());
+ }
+
+ private void doTestDatePointFieldRangeQuery(String fieldName) throws Exception {
+ String baseDate = "1995-12-31T10:59:59Z";
+ for (int i = 0; i < 10; i++) {
+ assertU(adoc("id", String.valueOf(i), fieldName, String.format(Locale.ROOT, "%s+%dHOURS", baseDate, i)));
+ }
+ assertU(commit());
+ assertQ(req("q", fieldName + ":" + String.format(Locale.ROOT, "[%s+0HOURS TO %s+3HOURS]", baseDate, baseDate), "fl", "id, " + fieldName),
+ "//*[@numFound='4']",
+ "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T10:59:59Z']",
+ "//result/doc[2]/date[@name='" + fieldName + "'][.='1995-12-31T11:59:59Z']",
+ "//result/doc[3]/date[@name='" + fieldName + "'][.='1995-12-31T12:59:59Z']",
+ "//result/doc[4]/date[@name='" + fieldName + "'][.='1995-12-31T13:59:59Z']");
+
+ assertQ(req("q", fieldName + ":" + String.format(Locale.ROOT, "{%s+0HOURS TO %s+3HOURS]", baseDate, baseDate), "fl", "id, " + fieldName),
+ "//*[@numFound='3']",
+ "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T11:59:59Z']",
+ "//result/doc[2]/date[@name='" + fieldName + "'][.='1995-12-31T12:59:59Z']",
+ "//result/doc[3]/date[@name='" + fieldName + "'][.='1995-12-31T13:59:59Z']");
+
+ assertQ(req("q", fieldName + ":"+ String.format(Locale.ROOT, "[%s+0HOURS TO %s+3HOURS}",baseDate,baseDate), "fl", "id, " + fieldName),
+ "//*[@numFound='3']",
+ "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T10:59:59Z']",
+ "//result/doc[2]/date[@name='" + fieldName + "'][.='1995-12-31T11:59:59Z']",
+ "//result/doc[3]/date[@name='" + fieldName + "'][.='1995-12-31T12:59:59Z']");
+
+ assertQ(req("q", fieldName + ":"+ String.format(Locale.ROOT, "{%s+0HOURS TO %s+3HOURS}",baseDate,baseDate), "fl", "id, " + fieldName),
+ "//*[@numFound='2']",
+ "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T11:59:59Z']",
+ "//result/doc[2]/date[@name='" + fieldName + "'][.='1995-12-31T12:59:59Z']");
+
+ assertQ(req("q", fieldName + ":" + String.format(Locale.ROOT, "{%s+0HOURS TO *}",baseDate), "fl", "id, " + fieldName),
+ "//*[@numFound='9']",
+ "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T11:59:59Z']");
+
+ assertQ(req("q", fieldName + ":" + String.format(Locale.ROOT, "{* TO %s+3HOURS}",baseDate), "fl", "id, " + fieldName),
+ "//*[@numFound='3']",
+ "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T10:59:59Z']");
+
+ assertQ(req("q", fieldName + ":" + String.format(Locale.ROOT, "[* TO %s+3HOURS}",baseDate), "fl", "id, " + fieldName),
+ "//*[@numFound='3']",
+ "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T10:59:59Z']");
+
+ assertQ(req("q", fieldName + ":[* TO *}", "fl", "id, " + fieldName),
+ "//*[@numFound='10']",
+ "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T10:59:59Z']",
+ "//result/doc[10]/date[@name='" + fieldName + "'][.='1995-12-31T19:59:59Z']");
+
+ assertQ(req("q", fieldName + ":" + String.format(Locale.ROOT, "[%s+0HOURS TO %s+1HOURS]",baseDate,baseDate)
+ +" OR " + fieldName + ":" + String.format(Locale.ROOT, "[%s+8HOURS TO %s+9HOURS]",baseDate,baseDate) , "fl", "id, " + fieldName),
+ "//*[@numFound='4']",
+ "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T10:59:59Z']",
+ "//result/doc[2]/date[@name='" + fieldName + "'][.='1995-12-31T11:59:59Z']",
+ "//result/doc[3]/date[@name='" + fieldName + "'][.='1995-12-31T18:59:59Z']",
+ "//result/doc[4]/date[@name='" + fieldName + "'][.='1995-12-31T19:59:59Z']");
+
+ assertQ(req("q", fieldName + ":"+String.format(Locale.ROOT, "[%s+0HOURS TO %s+1HOURS]",baseDate,baseDate)
+ +" AND " + fieldName + ":"+String.format(Locale.ROOT, "[%s+1HOURS TO %s+2HOURS]",baseDate,baseDate) , "fl", "id, " + fieldName),
+ "//*[@numFound='1']",
+ "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T11:59:59Z']");
+
+ assertQ(req("q", fieldName + ":"+String.format(Locale.ROOT, "[%s+0HOURS TO %s+1HOURS]",baseDate,baseDate)
+ +" AND NOT " + fieldName + ":"+String.format(Locale.ROOT, "[%s+1HOURS TO %s+2HOURS]",baseDate,baseDate) , "fl", "id, " + fieldName),
+ "//*[@numFound='1']",
+ "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T10:59:59Z']");
+
+ clearIndex();
+ assertU(commit());
+ }
+
+ private void doTestDatePointFieldRangeFacet(String docValuesField, String nonDocValuesField) throws Exception {
+ String baseDate = "1995-01-10T10:59:59Z";
+ for (int i = 0; i < 10; i++) {
+ String date = String.format(Locale.ROOT, "%s+%dDAYS", baseDate, i);
+ assertU(adoc("id", String.valueOf(i), docValuesField, date, nonDocValuesField, date));
+ }
+ assertU(commit());
+ assertTrue(h.getCore().getLatestSchema().getField(docValuesField).hasDocValues());
+ assertTrue(h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField);
+ assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, "facet.range.start", "1995-01-10T10:59:59Z-10DAYS",
+ "facet.range.end", "1995-01-10T10:59:59Z+10DAYS", "facet.range.gap", "+2DAYS"),
+ "//*[@numFound='10']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-10T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-12T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-14T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-16T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-18T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-08T10:59:59Z'][.='0']");
+
+ assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, "facet.range.start", "1995-01-10T10:59:59Z-10DAYS",
+ "facet.range.end", "1995-01-10T10:59:59Z+10DAYS", "facet.range.gap", "+2DAYS", "facet.range.method", "dv"),
+ "//*[@numFound='10']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-10T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-12T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-14T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-16T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-18T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-08T10:59:59Z'][.='0']");
+
+ assertFalse(h.getCore().getLatestSchema().getField(nonDocValuesField).hasDocValues());
+ assertTrue(h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField);
+ // Range Faceting with method = filter should work
+ assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, "facet.range.start", "1995-01-10T10:59:59Z-10DAYS",
+ "facet.range.end", "1995-01-10T10:59:59Z+10DAYS", "facet.range.gap", "+2DAYS", "facet.range.method", "filter"),
+ "//*[@numFound='10']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-10T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-12T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-14T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-16T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-18T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-08T10:59:59Z'][.='0']");
+
+ // this should actually use filter method instead of dv
+ assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, "facet.range.start", "1995-01-10T10:59:59Z-10DAYS",
+ "facet.range.end", "1995-01-10T10:59:59Z+10DAYS", "facet.range.gap", "+2DAYS", "facet.range.method", "dv"),
+ "//*[@numFound='10']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-10T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-12T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-14T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-16T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-18T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-08T10:59:59Z'][.='0']");
+ }
+
+ private void doTestDatePointFieldMultiValuedRangeFacet(String docValuesField, String nonDocValuesField) throws Exception {
+ String baseDate = "1995-01-10T10:59:59Z";
+ for (int i = 0; i < 10; i++) {
+ String date1 = String.format(Locale.ROOT, "%s+%dDAYS", baseDate, i);
+ String date2 = String.format(Locale.ROOT, "%s+%dDAYS", baseDate, i+10);
+ assertU(adoc("id", String.valueOf(i), docValuesField, date1, docValuesField, date2,
+ nonDocValuesField, date1, nonDocValuesField, date2));
+ }
+ assertU(commit());
+ assertTrue(h.getCore().getLatestSchema().getField(docValuesField).hasDocValues());
+ assertTrue(h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField);
+ assertQ(req("q", "*:*", "fl", "id", "facet", "true", "facet.range", docValuesField, "facet.range.start", "1995-01-10T10:59:59Z-10DAYS",
+ "facet.range.end", "1995-01-10T10:59:59Z+20DAYS", "facet.range.gap", "+2DAYS"),
+ "//*[@numFound='10']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-10T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-12T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-14T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-16T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-18T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-20T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-22T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-24T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-26T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-28T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1994-12-31T10:59:59Z'][.='0']");
+
+ assertQ(req("q", "*:*", "fl", "id", "facet", "true", "facet.range", docValuesField, "facet.range.start", "1995-01-10T10:59:59Z-10DAYS",
+ "facet.range.end", "1995-01-10T10:59:59Z+20DAYS", "facet.range.gap", "+2DAYS", "facet.range.method", "dv"),
+ "//*[@numFound='10']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-10T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-12T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-14T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-16T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-18T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-20T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-22T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-24T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-26T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-28T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1994-12-31T10:59:59Z'][.='0']");
+
+ assertQ(req("q", "*:*", "fl", "id", "facet", "true", "facet.range", docValuesField, "facet.range.start", "1995-01-10T10:59:59Z",
+ "facet.range.end", "1995-01-10T10:59:59Z+20DAYS", "facet.range.gap", "+100DAYS"),
+ "//*[@numFound='10']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField + "']/lst[@name='counts']/int[@name='1995-01-10T10:59:59Z'][.='10']");
+
+ assertFalse(h.getCore().getLatestSchema().getField(nonDocValuesField).hasDocValues());
+ assertTrue(h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField);
+ // Range Faceting with method = filter should work
+ assertQ(req("q", "*:*", "fl", "id", "facet", "true", "facet.range", nonDocValuesField, "facet.range.start", "1995-01-10T10:59:59Z-10DAYS",
+ "facet.range.end", "1995-01-10T10:59:59Z+20DAYS", "facet.range.gap", "+2DAYS", "facet.range.method", "filter"),
+ "//*[@numFound='10']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-10T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-12T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-14T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-16T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-18T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-20T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-22T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-24T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-26T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-28T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1994-12-31T10:59:59Z'][.='0']");
+
+ // this should actually use filter method instead of dv
+ assertQ(req("q", "*:*", "fl", "id", "facet", "true", "facet.range", nonDocValuesField, "facet.range.start", "1995-01-10T10:59:59Z-10DAYS",
+ "facet.range.end", "1995-01-10T10:59:59Z+20DAYS", "facet.range.gap", "+2DAYS", "facet.range.method", "dv"),
+ "//*[@numFound='10']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-10T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-12T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-14T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-16T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-18T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-20T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-22T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-24T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-26T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1995-01-28T10:59:59Z'][.='2']",
+ "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1994-12-31T10:59:59Z'][.='0']");
+ }
+
+ private void doTestDatePointFunctionQuery(String dvFieldName, String nonDvFieldName, String type) throws Exception {
+ String baseDate = "1995-01-10T10:59:10Z";
+ for (int i = 0; i < 10; i++) {
+ String date = String.format(Locale.ROOT, "%s+%dSECONDS", baseDate, i+1);
+ assertU(adoc("id", String.valueOf(i), dvFieldName, date, nonDvFieldName, date));
+ }
+ assertU(commit());
+ assertTrue(h.getCore().getLatestSchema().getField(dvFieldName).hasDocValues());
+ assertTrue(h.getCore().getLatestSchema().getField(dvFieldName).getType() instanceof PointField);
+ assertQ(req("q", "*:*", "fl", "id, " + dvFieldName, "sort", "product(-1," + dvFieldName + ") asc"),
+ "//*[@numFound='10']",
+ "//result/doc[1]/" + type + "[@name='" + dvFieldName + "'][.='1995-01-10T10:59:11Z']",
+ "//result/doc[2]/" + type + "[@name='" + dvFieldName + "'][.='1995-01-10T10:59:12Z']",
+ "//result/doc[3]/" + type + "[@name='" + dvFieldName + "'][.='1995-01-10T10:59:13Z']",
+ "//result/doc[10]/" + type + "[@name='" + dvFieldName + "'][.='1995-01-10T10:59:20Z']");
+
+ assertQ(req("q", "*:*", "fl", "id, " + dvFieldName + ", ms(" + dvFieldName + ","+baseDate+")"),
+ "//*[@numFound='10']",
+ "//result/doc[1]/float[@name='ms(" + dvFieldName + "," + baseDate + ")'][.='1000.0']",
+ "//result/doc[2]/float[@name='ms(" + dvFieldName + "," + baseDate + ")'][.='2000.0']",
+ "//result/doc[3]/float[@name='ms(" + dvFieldName + "," + baseDate + ")'][.='3000.0']",
+ "//result/doc[10]/float[@name='ms(" + dvFieldName + "," + baseDate + ")'][.='10000.0']");
+
+ assertQ(req("q", "*:*", "fl", "id, " + dvFieldName + ", field(" + dvFieldName + ")"),
+ "//*[@numFound='10']",
+ "//result/doc[1]/" + type + "[@name='field(" + dvFieldName + ")'][.='1995-01-10T10:59:11Z']",
+ "//result/doc[2]/" + type + "[@name='field(" + dvFieldName + ")'][.='1995-01-10T10:59:12Z']",
+ "//result/doc[3]/" + type + "[@name='field(" + dvFieldName + ")'][.='1995-01-10T10:59:13Z']",
+ "//result/doc[10]/" + type + "[@name='field(" + dvFieldName + ")'][.='1995-01-10T10:59:20Z']");
+
+ assertFalse(h.getCore().getLatestSchema().getField(nonDvFieldName).hasDocValues());
+ assertTrue(h.getCore().getLatestSchema().getField(nonDvFieldName).getType() instanceof PointField);
+
+ assertQEx("Expecting Exception",
+ "sort param could not be parsed as a query",
+ req("q", "*:*", "fl", "id, " + nonDvFieldName, "sort", "product(-1," + nonDvFieldName + ") asc"),
+ SolrException.ErrorCode.BAD_REQUEST);
+ }
+
+ private void testDatePointStats(String field, String dvField, String[] dates) {
+ for (int i = 0; i < dates.length; i++) {
+ assertU(adoc("id", String.valueOf(i), dvField, dates[i], field, dates[i]));
+ }
+ assertU(adoc("id", String.valueOf(dates.length)));
+ assertU(commit());
+ assertTrue(h.getCore().getLatestSchema().getField(dvField).hasDocValues());
+ assertTrue(h.getCore().getLatestSchema().getField(dvField).getType() instanceof PointField);
+ assertQ(req("q", "*:*", "fl", "id, " + dvField, "stats", "true", "stats.field", dvField),
+ "//*[@numFound='11']",
+ "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + dvField+ "']/date[@name='min'][.='" + dates[0] + "']",
+ "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + dvField+ "']/date[@name='max'][.='" + dates[dates.length-1] + "']",
+ "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + dvField+ "']/long[@name='count'][.='" + dates.length + "']",
+ "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + dvField+ "']/long[@name='missing'][.='1']");
+
+ assertFalse(h.getCore().getLatestSchema().getField(field).hasDocValues());
+ assertTrue(h.getCore().getLatestSchema().getField(field).getType() instanceof PointField);
+ assertQEx("Expecting Exception",
+ "Can't calculate stats on a PointField without docValues",
+ req("q", "*:*", "fl", "id, " + field, "stats", "true", "stats.field", field),
+ SolrException.ErrorCode.BAD_REQUEST);
+ }
+
+ private void testDatePointFieldsAtomicUpdates(String field, String type) throws Exception {
+ String date = "1995-01-10T10:59:10Z";
+ assertU(adoc(sdoc("id", "1", field, date)));
+ assertU(commit());
+
+ assertQ(req("q", "id:1"),
+ "//result/doc[1]/" + type + "[@name='" + field + "'][.='"+date+"']");
+
+ assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("set", date+"+2DAYS"))));
+ assertU(commit());
+
+ assertQ(req("q", "id:1"),
+ "//result/doc[1]/" + type + "[@name='" + field + "'][.='1995-01-12T10:59:10Z']");
+ }
+
+ private void testMultiValuedDatePointFieldsAtomicUpdates(String field, String type) throws Exception {
+ String date1 = "1995-01-10T10:59:10Z";
+ String date2 = "1995-01-11T10:59:10Z";
+ String date3 = "1995-01-12T10:59:10Z";
+ assertU(adoc(sdoc("id", "1", field, date1)));
+ assertU(commit());
+
+ assertQ(req("q", "id:1"),
+ "//result/doc[1]/arr[@name='" + field + "']/" + type + "[.='"+date1+"']",
+ "count(//result/doc[1]/arr[@name='" + field + "']/" + type + ")=1");
+
+ assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("add", date2))));
+ assertU(commit());
+
+ assertQ(req("q", "id:1"),
+ "//result/doc[1]/arr[@name='" + field + "']/" + type + "[.='"+date1+"']",
+ "//result/doc[1]/arr[@name='" + field + "']/" + type + "[.='"+date2+"']",
+ "count(//result/doc[1]/arr[@name='" + field + "']/" + type + ")=2");
+
+ assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("remove", date1))));
+ assertU(commit());
+
+ assertQ(req("q", "id:1"),
+ "//result/doc[1]/arr[@name='" + field + "']/" + type + "[.='"+date2+"']",
+ "count(//result/doc[1]/arr[@name='" + field + "']/" + type + ")=1");
+
+ assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("set", ImmutableList.of(date1, date2, date3)))));
+ assertU(commit());
+
+ assertQ(req("q", "id:1"),
+ "//result/doc[1]/arr[@name='" + field + "']/" + type + "[.='"+date1+"']",
+ "//result/doc[1]/arr[@name='" + field + "']/" + type + "[.='"+date2+"']",
+ "//result/doc[1]/arr[@name='" + field + "']/" + type + "[.='"+date3+"']",
+ "count(//result/doc[1]/arr[@name='" + field + "']/" + type + ")=3");
+
+ assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("removeregex", ".*"))));
+ assertU(commit());
+
+ assertQ(req("q", "id:1"),
+ "count(//result/doc[1]/arr[@name='" + field + "']/" + type + ")=0");
+
+ }
+
+
}
[32/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-9858: Collect
aggregated metrics from nodes and shard leaders in overseer.
Posted by is...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
new file mode 100644
index 0000000..e9b8c3d
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
@@ -0,0 +1,392 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics.reporters.solr;
+
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Supplier;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
+
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Meter;
+import com.codahale.metrics.MetricFilter;
+import com.codahale.metrics.ScheduledReporter;
+import com.codahale.metrics.Timer;
+import org.apache.http.client.HttpClient;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.io.SolrClientCache;
+import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.handler.admin.MetricsCollectorHandler;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.util.stats.MetricUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Implementation of {@link ScheduledReporter} that reports metrics from selected registries and sends
+ * them periodically as update requests to a selected Solr collection and to a configured handler.
+ */
+public class SolrReporter extends ScheduledReporter {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ public static final String REGISTRY_ID = "_registry_";
+ public static final String REPORTER_ID = "_reporter_";
+ public static final String GROUP_ID = "_group_";
+ public static final String LABEL_ID = "_label_";
+
+
+ /**
+ * Specification of what registries and what metrics to send.
+ */
+ public static final class Report {
+ public String groupPattern;
+ public String labelPattern;
+ public String registryPattern;
+ public Set<String> metricFilters = new HashSet<>();
+
+ /**
+ * Create a report specification
+ * @param groupPattern logical group for these metrics. This is used in {@link MetricsCollectorHandler}
+ * to select the target registry for metrics to aggregate. Must not be null or empty.
+ * It may contain back-references to capture groups from {@code registryPattern}
+ * @param labelPattern name of this group of metrics. This is used in {@link MetricsCollectorHandler}
+ * to prefix metric names. May be null or empty. It may contain back-references
+ * to capture groups from {@code registryPattern}.
+ * @param registryPattern pattern for selecting matching registries, see {@link SolrMetricManager#registryNames(String...)}
+ * @param metricFilters patterns for selecting matching metrics, see {@link org.apache.solr.metrics.SolrMetricManager.RegexFilter}
+ */
+ public Report(String groupPattern, String labelPattern, String registryPattern, Collection<String> metricFilters) {
+ this.groupPattern = groupPattern;
+ this.labelPattern = labelPattern;
+ this.registryPattern = registryPattern;
+ if (metricFilters != null) {
+ this.metricFilters.addAll(metricFilters);
+ }
+ }
+
+ public static Report fromMap(Map<?, ?> map) {
+ String groupPattern = (String)map.get("group");
+ String labelPattern = (String)map.get("label");
+ String registryPattern = (String)map.get("registry");
+ Object oFilters = map.get("filter");
+ Collection<String> metricFilters = Collections.emptyList();
+ if (oFilters != null) {
+ if (oFilters instanceof String) {
+ metricFilters = Collections.singletonList((String)oFilters);
+ } else if (oFilters instanceof Collection) {
+ metricFilters = (Collection<String>)oFilters;
+ } else {
+ log.warn("Invalid report filters, ignoring: " + oFilters);
+ }
+ }
+ if (groupPattern == null || registryPattern == null) {
+ log.warn("Invalid report configuration, group and registry required!: " + map);
+ return null;
+ }
+ return new Report(groupPattern, labelPattern, registryPattern, metricFilters);
+ }
+ }
+
+ public static class Builder {
+ private final SolrMetricManager metricManager;
+ private final List<Report> reports;
+ private String reporterId;
+ private TimeUnit rateUnit;
+ private TimeUnit durationUnit;
+ private String handler;
+ private boolean skipHistograms;
+ private boolean skipAggregateValues;
+ private boolean cloudClient;
+ private SolrParams params;
+
+ /**
+ * Create a builder for SolrReporter.
+ * @param metricManager metric manager that is the source of metrics
+ * @param reports report definitions
+ * @return builder
+ */
+ public static Builder forReports(SolrMetricManager metricManager, List<Report> reports) {
+ return new Builder(metricManager, reports);
+ }
+
+ private Builder(SolrMetricManager metricManager, List<Report> reports) {
+ this.metricManager = metricManager;
+ this.reports = reports;
+ this.rateUnit = TimeUnit.SECONDS;
+ this.durationUnit = TimeUnit.MILLISECONDS;
+ this.skipHistograms = false;
+ this.skipAggregateValues = false;
+ this.cloudClient = false;
+ this.params = null;
+ }
+
+ /**
+ * Additional {@link SolrParams} to add to every request.
+ * @param params additional params
+ * @return {@code this}
+ */
+ public Builder withSolrParams(SolrParams params) {
+ this.params = params;
+ return this;
+ }
+ /**
+ * If true then use {@link org.apache.solr.client.solrj.impl.CloudSolrClient} for communication.
+ * Default is false.
+ * @param cloudClient use CloudSolrClient when true, {@link org.apache.solr.client.solrj.impl.HttpSolrClient} otherwise.
+ * @return {@code this}
+ */
+ public Builder cloudClient(boolean cloudClient) {
+ this.cloudClient = cloudClient;
+ return this;
+ }
+
+ /**
+ * Histograms are difficult / impossible to aggregate, so it may not be
+ * worth to report them.
+ * @param skipHistograms when true then skip histograms from reports
+ * @return {@code this}
+ */
+ public Builder skipHistograms(boolean skipHistograms) {
+ this.skipHistograms = skipHistograms;
+ return this;
+ }
+
+ /**
+ * Individual values from {@link org.apache.solr.metrics.AggregateMetric} may not be worth to report.
+ * @param skipAggregateValues when tru then skip reporting individual values from the metric
+ * @return {@code this}
+ */
+ public Builder skipAggregateValues(boolean skipAggregateValues) {
+ this.skipAggregateValues = skipAggregateValues;
+ return this;
+ }
+
+ /**
+ * Handler name to use at the remote end.
+ *
+ * @param handler handler name, eg. "/admin/metricsCollector"
+ * @return {@code this}
+ */
+ public Builder withHandler(String handler) {
+ this.handler = handler;
+ return this;
+ }
+
+ /**
+ * Use this id to identify metrics from this instance.
+ *
+ * @param reporterId reporter id
+ * @return {@code this}
+ */
+ public Builder withReporterId(String reporterId) {
+ this.reporterId = reporterId;
+ return this;
+ }
+
+ /**
+ * Convert rates to the given time unit.
+ *
+ * @param rateUnit a unit of time
+ * @return {@code this}
+ */
+ public Builder convertRatesTo(TimeUnit rateUnit) {
+ this.rateUnit = rateUnit;
+ return this;
+ }
+
+ /**
+ * Convert durations to the given time unit.
+ *
+ * @param durationUnit a unit of time
+ * @return {@code this}
+ */
+ public Builder convertDurationsTo(TimeUnit durationUnit) {
+ this.durationUnit = durationUnit;
+ return this;
+ }
+
+ /**
+ * Build it.
+ * @param client an instance of {@link HttpClient} to be used for making calls.
+ * @param urlProvider function that returns the base URL of Solr instance to target. May return
+ * null to indicate that reporting should be skipped. Note: this
+ * function will be called every time just before report is sent.
+ * @return configured instance of reporter
+ */
+ public SolrReporter build(HttpClient client, Supplier<String> urlProvider) {
+ return new SolrReporter(client, urlProvider, metricManager, reports, handler, reporterId, rateUnit, durationUnit,
+ params, skipHistograms, skipAggregateValues, cloudClient);
+ }
+
+ }
+
+ private String reporterId;
+ private String handler;
+ private Supplier<String> urlProvider;
+ private SolrClientCache clientCache;
+ private List<CompiledReport> compiledReports;
+ private SolrMetricManager metricManager;
+ private boolean skipHistograms;
+ private boolean skipAggregateValues;
+ private boolean cloudClient;
+ private ModifiableSolrParams params;
+ private Map<String, Object> metadata;
+
+ private static final class CompiledReport {
+ String group;
+ String label;
+ Pattern registryPattern;
+ MetricFilter filter;
+
+ CompiledReport(Report report) throws PatternSyntaxException {
+ this.group = report.groupPattern;
+ this.label = report.labelPattern;
+ this.registryPattern = Pattern.compile(report.registryPattern);
+ this.filter = new SolrMetricManager.RegexFilter(report.metricFilters);
+ }
+
+ @Override
+ public String toString() {
+ return "CompiledReport{" +
+ "group='" + group + '\'' +
+ ", label='" + label + '\'' +
+ ", registryPattern=" + registryPattern +
+ ", filter=" + filter +
+ '}';
+ }
+ }
+
+ public SolrReporter(HttpClient httpClient, Supplier<String> urlProvider, SolrMetricManager metricManager,
+ List<Report> metrics, String handler,
+ String reporterId, TimeUnit rateUnit, TimeUnit durationUnit,
+ SolrParams params, boolean skipHistograms, boolean skipAggregateValues, boolean cloudClient) {
+ super(null, "solr-reporter", MetricFilter.ALL, rateUnit, durationUnit);
+ this.metricManager = metricManager;
+ this.urlProvider = urlProvider;
+ this.reporterId = reporterId;
+ if (handler == null) {
+ handler = MetricsCollectorHandler.HANDLER_PATH;
+ }
+ this.handler = handler;
+ this.clientCache = new SolrClientCache(httpClient);
+ this.compiledReports = new ArrayList<>();
+ metrics.forEach(report -> {
+ MetricFilter filter = new SolrMetricManager.RegexFilter(report.metricFilters);
+ try {
+ CompiledReport cs = new CompiledReport(report);
+ compiledReports.add(cs);
+ } catch (PatternSyntaxException e) {
+ log.warn("Skipping report with invalid registryPattern: " + report.registryPattern, e);
+ }
+ });
+ this.skipHistograms = skipHistograms;
+ this.skipAggregateValues = skipAggregateValues;
+ this.cloudClient = cloudClient;
+ this.params = new ModifiableSolrParams();
+ this.params.set(REPORTER_ID, reporterId);
+ // allow overrides to take precedence
+ if (params != null) {
+ this.params.add(params);
+ }
+ metadata = new HashMap<>();
+ metadata.put(REPORTER_ID, reporterId);
+ }
+
+ @Override
+ public void close() {
+ clientCache.close();
+ super.close();
+ }
+
+ @Override
+ public void report() {
+ String url = urlProvider.get();
+ // if null then suppress reporting
+ if (url == null) {
+ return;
+ }
+
+ SolrClient solr;
+ if (cloudClient) {
+ solr = clientCache.getCloudSolrClient(url);
+ } else {
+ solr = clientCache.getHttpSolrClient(url);
+ }
+ UpdateRequest req = new UpdateRequest(handler);
+ req.setParams(params);
+ compiledReports.forEach(report -> {
+ Set<String> registryNames = metricManager.registryNames(report.registryPattern);
+ registryNames.forEach(registryName -> {
+ String label = report.label;
+ if (label != null && label.indexOf('$') != -1) {
+ // label with back-references
+ Matcher m = report.registryPattern.matcher(registryName);
+ label = m.replaceFirst(label);
+ }
+ final String effectiveLabel = label;
+ String group = report.group;
+ if (group.indexOf('$') != -1) {
+ // group with back-references
+ Matcher m = report.registryPattern.matcher(registryName);
+ group = m.replaceFirst(group);
+ }
+ final String effectiveGroup = group;
+ MetricUtils.toSolrInputDocuments(metricManager.registry(registryName), Collections.singletonList(report.filter), MetricFilter.ALL,
+ skipHistograms, skipAggregateValues, metadata, doc -> {
+ doc.setField(REGISTRY_ID, registryName);
+ doc.setField(GROUP_ID, effectiveGroup);
+ if (effectiveLabel != null) {
+ doc.setField(LABEL_ID, effectiveLabel);
+ }
+ req.add(doc);
+ });
+ });
+ });
+
+ // if no docs added then don't send a report
+ if (req.getDocuments() == null || req.getDocuments().isEmpty()) {
+ return;
+ }
+ try {
+ //log.info("%%% sending to " + url + ": " + req.getParams());
+ solr.request(req);
+ } catch (Exception e) {
+ log.debug("Error sending metric report", e.toString());
+ }
+
+ }
+
+ @Override
+ public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) {
+ // no-op - we do all the work in report()
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
new file mode 100644
index 0000000..2b20274
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
@@ -0,0 +1,188 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics.reporters.solr;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Supplier;
+
+import org.apache.solr.cloud.CloudDescriptor;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.handler.admin.MetricsCollectorHandler;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricReporter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This class reports selected metrics from replicas to shard leader.
+ * <p>The following configuration properties are supported:</p>
+ * <ul>
+ * <li>handler - (optional str) handler path where reports are sent. Default is
+ * {@link MetricsCollectorHandler#HANDLER_PATH}.</li>
+ * <li>period - (optional int) how often reports are sent, in seconds. Default is 60. Setting this
+ * to 0 disables the reporter.</li>
+ * <li>filter - (optional multiple str) regex expression(s) matching selected metrics to be reported.</li>
+ * </ul>
+ * NOTE: this reporter uses predefined "replica" group, and it's always created even if explicit configuration
+ * is missing. Default configuration uses filters defined in {@link #DEFAULT_FILTERS}.
+ * <p>Example configuration:</p>
+ * <pre>
+ * <reporter name="test" group="replica">
+ * <int name="period">11</int>
+ * <str name="filter">UPDATE\./update/.*requests</str>
+ * <str name="filter">QUERY\./select.*requests</str>
+ * </reporter>
+ * </pre>
+ */
+public class SolrShardReporter extends SolrMetricReporter {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ public static final List<String> DEFAULT_FILTERS = new ArrayList(){{
+ add("TLOG.*");
+ add("REPLICATION.*");
+ add("INDEX.flush.*");
+ add("INDEX.merge.major.*");
+ add("UPDATE\\./update/.*requests");
+ add("QUERY\\./select.*requests");
+ }};
+
+ private String handler = MetricsCollectorHandler.HANDLER_PATH;
+ private int period = SolrMetricManager.DEFAULT_CLOUD_REPORTER_PERIOD;
+ private List<String> filters = new ArrayList<>();
+
+ private SolrReporter reporter;
+
+ /**
+ * Create a reporter for metrics managed in a named registry.
+ *
+ * @param metricManager metric manager
+ * @param registryName registry to use, one of registries managed by
+ * {@link SolrMetricManager}
+ */
+ public SolrShardReporter(SolrMetricManager metricManager, String registryName) {
+ super(metricManager, registryName);
+ }
+
+ public void setHandler(String handler) {
+ this.handler = handler;
+ }
+
+ public void setPeriod(int period) {
+ this.period = period;
+ }
+
+ public void setFilter(List<String> filterConfig) {
+ if (filterConfig == null || filterConfig.isEmpty()) {
+ return;
+ }
+ filters = filterConfig;
+ }
+
+ // for unit tests
+ int getPeriod() {
+ return period;
+ }
+
+ @Override
+ protected void validate() throws IllegalStateException {
+ if (period < 1) {
+ log.info("Turning off shard reporter, period=" + period);
+ }
+ if (filters.isEmpty()) {
+ filters = DEFAULT_FILTERS;
+ }
+ // start in inform(...) only when core is available
+ }
+
+ @Override
+ public void close() throws IOException {
+ if (reporter != null) {
+ reporter.close();
+ }
+ }
+
+ public void setCore(SolrCore core) {
+ if (reporter != null) {
+ reporter.close();
+ }
+ if (core.getCoreDescriptor().getCloudDescriptor() == null) {
+ // not a cloud core
+ log.warn("Not initializing shard reporter for non-cloud core " + core.getName());
+ return;
+ }
+ if (period < 1) { // don't start it
+ log.warn("Not starting shard reporter ");
+ return;
+ }
+ // our id is coreNodeName
+ String id = core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName();
+ // target registry is the leaderRegistryName
+ String groupId = core.getCoreMetricManager().getLeaderRegistryName();
+ if (groupId == null) {
+ log.warn("No leaderRegistryName for core " + core + ", not starting the reporter...");
+ return;
+ }
+ SolrReporter.Report spec = new SolrReporter.Report(groupId, null, registryName, filters);
+ reporter = SolrReporter.Builder.forReports(metricManager, Collections.singletonList(spec))
+ .convertRatesTo(TimeUnit.SECONDS)
+ .convertDurationsTo(TimeUnit.MILLISECONDS)
+ .withHandler(handler)
+ .withReporterId(id)
+ .cloudClient(false) // we want to send reports specifically to a selected leader instance
+ .skipAggregateValues(true) // we don't want to transport details of aggregates
+ .skipHistograms(true) // we don't want to transport histograms
+ .build(core.getCoreDescriptor().getCoreContainer().getUpdateShardHandler().getHttpClient(), new LeaderUrlSupplier(core));
+
+ reporter.start(period, TimeUnit.SECONDS);
+ }
+
+ private static class LeaderUrlSupplier implements Supplier<String> {
+ private SolrCore core;
+
+ LeaderUrlSupplier(SolrCore core) {
+ this.core = core;
+ }
+
+ @Override
+ public String get() {
+ CloudDescriptor cd = core.getCoreDescriptor().getCloudDescriptor();
+ if (cd == null) {
+ return null;
+ }
+ ClusterState state = core.getCoreDescriptor().getCoreContainer().getZkController().getClusterState();
+ DocCollection collection = state.getCollection(core.getCoreDescriptor().getCollectionName());
+ Replica replica = collection.getLeader(core.getCoreDescriptor().getCloudDescriptor().getShardId());
+ if (replica == null) {
+ log.warn("No leader for " + collection.getName() + "/" + core.getCoreDescriptor().getCloudDescriptor().getShardId());
+ return null;
+ }
+ String baseUrl = replica.getStr("base_url");
+ if (baseUrl == null) {
+ log.warn("No base_url for replica " + replica);
+ }
+ return baseUrl;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/reporters/solr/package-info.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/package-info.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/package-info.java
new file mode 100644
index 0000000..740bcce
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This package contains {@link org.apache.solr.metrics.SolrMetricReporter} implementations
+ * specific to SolrCloud reporting.
+ */
+package org.apache.solr.metrics.reporters.solr;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/update/PeerSync.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/PeerSync.java b/solr/core/src/java/org/apache/solr/update/PeerSync.java
index ac07413..874e39c 100644
--- a/solr/core/src/java/org/apache/solr/update/PeerSync.java
+++ b/solr/core/src/java/org/apache/solr/update/PeerSync.java
@@ -161,11 +161,13 @@ public class PeerSync implements SolrMetricProducer {
core.getCoreMetricManager().registerMetricProducer(SolrInfoMBean.Category.REPLICATION.toString(), this);
}
+ public static final String METRIC_SCOPE = "peerSync";
+
@Override
public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
- syncTime = manager.timer(registry, "time", scope);
- syncErrors = manager.counter(registry, "errors", scope);
- syncSkipped = manager.counter(registry, "skipped", scope);
+ syncTime = manager.timer(registry, "time", scope, METRIC_SCOPE);
+ syncErrors = manager.counter(registry, "errors", scope, METRIC_SCOPE);
+ syncSkipped = manager.counter(registry, "skipped", scope, METRIC_SCOPE);
}
/** optional list of updates we had before possibly receiving new updates */
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
index 80f035b..5a7c680 100644
--- a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
@@ -16,11 +16,15 @@
*/
package org.apache.solr.util.stats;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
+import java.util.function.BiConsumer;
+import java.util.function.Consumer;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
@@ -32,13 +36,40 @@ import com.codahale.metrics.MetricFilter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.Timer;
+import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.util.NamedList;
+import org.apache.solr.metrics.AggregateMetric;
/**
* Metrics specific utility functions.
*/
public class MetricUtils {
+ public static final String METRIC_NAME = "metric";
+ public static final String VALUES = "values";
+
+ static final String MS = "_ms";
+
+ static final String MIN = "min";
+ static final String MIN_MS = MIN + MS;
+ static final String MAX = "max";
+ static final String MAX_MS = MAX + MS;
+ static final String MEAN = "mean";
+ static final String MEAN_MS = MEAN + MS;
+ static final String MEDIAN = "median";
+ static final String MEDIAN_MS = MEDIAN + MS;
+ static final String STDDEV = "stddev";
+ static final String STDDEV_MS = STDDEV + MS;
+ static final String SUM = "sum";
+ static final String P75 = "p75";
+ static final String P75_MS = P75 + MS;
+ static final String P95 = "p95";
+ static final String P95_MS = P95 + MS;
+ static final String P99 = "p99";
+ static final String P99_MS = P99 + MS;
+ static final String P999 = "p999";
+ static final String P999_MS = P999 + MS;
+
/**
* Adds metrics from a Timer to a NamedList, using well-known back-compat names.
* @param lst The NamedList to add the metrics data to
@@ -77,41 +108,138 @@ public class MetricUtils {
* included in the output
* @param mustMatchFilter a {@link MetricFilter}.
* A metric <em>must</em> match this filter to be included in the output.
+ * @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
+ * @param metadata optional metadata. If not null and not empty then this map will be added under a
+ * {@code _metadata_} key.
* @return a {@link NamedList}
*/
- public static NamedList toNamedList(MetricRegistry registry, List<MetricFilter> shouldMatchFilters, MetricFilter mustMatchFilter) {
- NamedList response = new NamedList();
+ public static NamedList toNamedList(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
+ MetricFilter mustMatchFilter, boolean skipHistograms,
+ boolean skipAggregateValues,
+ Map<String, Object> metadata) {
+ NamedList result = new NamedList();
+ toNamedMaps(registry, shouldMatchFilters, mustMatchFilter, skipHistograms, skipAggregateValues, (k, v) -> {
+ result.add(k, new NamedList(v));
+ });
+ if (metadata != null && !metadata.isEmpty()) {
+ result.add("_metadata_", new NamedList(metadata));
+ }
+ return result;
+ }
+
+ /**
+ * Returns a representation of the given metric registry as a list of {@link SolrInputDocument}-s.
+ Only those metrics
+ * are converted to NamedList which match at least one of the given MetricFilter instances.
+ *
+ * @param registry the {@link MetricRegistry} to be converted to NamedList
+ * @param shouldMatchFilters a list of {@link MetricFilter} instances.
+ * A metric must match <em>any one</em> of the filters from this list to be
+ * included in the output
+ * @param mustMatchFilter a {@link MetricFilter}.
+ * A metric <em>must</em> match this filter to be included in the output.
+ * @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
+ * @param metadata optional metadata. If not null and not empty then this map will be added under a
+ * {@code _metadata_} key.
+ * @return a list of {@link SolrInputDocument}-s
+ */
+ public static List<SolrInputDocument> toSolrInputDocuments(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
+ MetricFilter mustMatchFilter, boolean skipHistograms,
+ boolean skipAggregateValues,
+ Map<String, Object> metadata) {
+ List<SolrInputDocument> result = new LinkedList<>();
+ toSolrInputDocuments(registry, shouldMatchFilters, mustMatchFilter, skipHistograms,
+ skipAggregateValues, metadata, doc -> {
+ result.add(doc);
+ });
+ return result;
+ }
+
+ public static void toSolrInputDocuments(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
+ MetricFilter mustMatchFilter, boolean skipHistograms,
+ boolean skipAggregateValues,
+ Map<String, Object> metadata, Consumer<SolrInputDocument> consumer) {
+ boolean addMetadata = metadata != null && !metadata.isEmpty();
+ toNamedMaps(registry, shouldMatchFilters, mustMatchFilter, skipHistograms, skipAggregateValues, (k, v) -> {
+ SolrInputDocument doc = new SolrInputDocument();
+ doc.setField(METRIC_NAME, k);
+ toSolrInputDocument(null, doc, v);
+ if (addMetadata) {
+ toSolrInputDocument(null, doc, metadata);
+ }
+ consumer.accept(doc);
+ });
+ }
+
+ public static void toSolrInputDocument(String prefix, SolrInputDocument doc, Map<String, Object> map) {
+ for (Map.Entry<String, Object> entry : map.entrySet()) {
+ if (entry.getValue() instanceof Map) { // flatten recursively
+ toSolrInputDocument(entry.getKey(), doc, (Map<String, Object>)entry.getValue());
+ } else {
+ String key = prefix != null ? prefix + "." + entry.getKey() : entry.getKey();
+ doc.addField(key, entry.getValue());
+ }
+ }
+ }
+
+ public static void toNamedMaps(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
+ MetricFilter mustMatchFilter, boolean skipHistograms, boolean skipAggregateValues,
+ BiConsumer<String, Map<String, Object>> consumer) {
Map<String, Metric> metrics = registry.getMetrics();
SortedSet<String> names = registry.getNames();
names.stream()
.filter(s -> shouldMatchFilters.stream().anyMatch(metricFilter -> metricFilter.matches(s, metrics.get(s))))
.filter(s -> mustMatchFilter.matches(s, metrics.get(s)))
.forEach(n -> {
- Metric metric = metrics.get(n);
- if (metric instanceof Counter) {
- Counter counter = (Counter) metric;
- response.add(n, counterToNamedList(counter));
- } else if (metric instanceof Gauge) {
- Gauge gauge = (Gauge) metric;
- response.add(n, gaugeToNamedList(gauge));
- } else if (metric instanceof Meter) {
- Meter meter = (Meter) metric;
- response.add(n, meterToNamedList(meter));
- } else if (metric instanceof Timer) {
- Timer timer = (Timer) metric;
- response.add(n, timerToNamedList(timer));
- } else if (metric instanceof Histogram) {
- Histogram histogram = (Histogram) metric;
- response.add(n, histogramToNamedList(histogram));
- }
- });
+ Metric metric = metrics.get(n);
+ if (metric instanceof Counter) {
+ Counter counter = (Counter) metric;
+ consumer.accept(n, counterToMap(counter));
+ } else if (metric instanceof Gauge) {
+ Gauge gauge = (Gauge) metric;
+ consumer.accept(n, gaugeToMap(gauge));
+ } else if (metric instanceof Meter) {
+ Meter meter = (Meter) metric;
+ consumer.accept(n, meterToMap(meter));
+ } else if (metric instanceof Timer) {
+ Timer timer = (Timer) metric;
+ consumer.accept(n, timerToMap(timer, skipHistograms));
+ } else if (metric instanceof Histogram) {
+ if (!skipHistograms) {
+ Histogram histogram = (Histogram) metric;
+ consumer.accept(n, histogramToMap(histogram));
+ }
+ } else if (metric instanceof AggregateMetric) {
+ consumer.accept(n, aggregateMetricToMap((AggregateMetric)metric, skipAggregateValues));
+ }
+ });
+ }
+
+ static Map<String, Object> aggregateMetricToMap(AggregateMetric metric, boolean skipAggregateValues) {
+ Map<String, Object> response = new LinkedHashMap<>();
+ response.put("count", metric.size());
+ response.put(MAX, metric.getMax());
+ response.put(MIN, metric.getMin());
+ response.put(MEAN, metric.getMean());
+ response.put(STDDEV, metric.getStdDev());
+ response.put(SUM, metric.getSum());
+ if (!(metric.isEmpty() || skipAggregateValues)) {
+ Map<String, Object> values = new LinkedHashMap<>();
+ response.put(VALUES, values);
+ metric.getValues().forEach((k, v) -> {
+ Map<String, Object> map = new LinkedHashMap<>();
+ map.put("value", v.value);
+ map.put("updateCount", v.updateCount.get());
+ values.put(k, map);
+ });
+ }
return response;
}
- static NamedList histogramToNamedList(Histogram histogram) {
- NamedList response = new NamedList();
+ static Map<String, Object> histogramToMap(Histogram histogram) {
+ Map<String, Object> response = new LinkedHashMap<>();
Snapshot snapshot = histogram.getSnapshot();
- response.add("count", histogram.getCount());
+ response.put("count", histogram.getCount());
// non-time based values
addSnapshot(response, snapshot, false);
return response;
@@ -126,71 +254,52 @@ public class MetricUtils {
}
}
- static final String MS = "_ms";
-
- static final String MIN = "min";
- static final String MIN_MS = MIN + MS;
- static final String MAX = "max";
- static final String MAX_MS = MAX + MS;
- static final String MEAN = "mean";
- static final String MEAN_MS = MEAN + MS;
- static final String MEDIAN = "median";
- static final String MEDIAN_MS = MEDIAN + MS;
- static final String STDDEV = "stddev";
- static final String STDDEV_MS = STDDEV + MS;
- static final String P75 = "p75";
- static final String P75_MS = P75 + MS;
- static final String P95 = "p95";
- static final String P95_MS = P95 + MS;
- static final String P99 = "p99";
- static final String P99_MS = P99 + MS;
- static final String P999 = "p999";
- static final String P999_MS = P999 + MS;
-
// some snapshots represent time in ns, other snapshots represent raw values (eg. chunk size)
- static void addSnapshot(NamedList response, Snapshot snapshot, boolean ms) {
- response.add((ms ? MIN_MS: MIN), nsToMs(ms, snapshot.getMin()));
- response.add((ms ? MAX_MS: MAX), nsToMs(ms, snapshot.getMax()));
- response.add((ms ? MEAN_MS : MEAN), nsToMs(ms, snapshot.getMean()));
- response.add((ms ? MEDIAN_MS: MEDIAN), nsToMs(ms, snapshot.getMedian()));
- response.add((ms ? STDDEV_MS: STDDEV), nsToMs(ms, snapshot.getStdDev()));
- response.add((ms ? P75_MS: P75), nsToMs(ms, snapshot.get75thPercentile()));
- response.add((ms ? P95_MS: P95), nsToMs(ms, snapshot.get95thPercentile()));
- response.add((ms ? P99_MS: P99), nsToMs(ms, snapshot.get99thPercentile()));
- response.add((ms ? P999_MS: P999), nsToMs(ms, snapshot.get999thPercentile()));
- }
-
- static NamedList timerToNamedList(Timer timer) {
- NamedList response = new NamedList();
- response.add("count", timer.getCount());
- response.add("meanRate", timer.getMeanRate());
- response.add("1minRate", timer.getOneMinuteRate());
- response.add("5minRate", timer.getFiveMinuteRate());
- response.add("15minRate", timer.getFifteenMinuteRate());
- // time-based values in nanoseconds
- addSnapshot(response, timer.getSnapshot(), true);
+ static void addSnapshot(Map<String, Object> response, Snapshot snapshot, boolean ms) {
+ response.put((ms ? MIN_MS: MIN), nsToMs(ms, snapshot.getMin()));
+ response.put((ms ? MAX_MS: MAX), nsToMs(ms, snapshot.getMax()));
+ response.put((ms ? MEAN_MS : MEAN), nsToMs(ms, snapshot.getMean()));
+ response.put((ms ? MEDIAN_MS: MEDIAN), nsToMs(ms, snapshot.getMedian()));
+ response.put((ms ? STDDEV_MS: STDDEV), nsToMs(ms, snapshot.getStdDev()));
+ response.put((ms ? P75_MS: P75), nsToMs(ms, snapshot.get75thPercentile()));
+ response.put((ms ? P95_MS: P95), nsToMs(ms, snapshot.get95thPercentile()));
+ response.put((ms ? P99_MS: P99), nsToMs(ms, snapshot.get99thPercentile()));
+ response.put((ms ? P999_MS: P999), nsToMs(ms, snapshot.get999thPercentile()));
+ }
+
+ static Map<String,Object> timerToMap(Timer timer, boolean skipHistograms) {
+ Map<String, Object> response = new LinkedHashMap<>();
+ response.put("count", timer.getCount());
+ response.put("meanRate", timer.getMeanRate());
+ response.put("1minRate", timer.getOneMinuteRate());
+ response.put("5minRate", timer.getFiveMinuteRate());
+ response.put("15minRate", timer.getFifteenMinuteRate());
+ if (!skipHistograms) {
+ // time-based values in nanoseconds
+ addSnapshot(response, timer.getSnapshot(), true);
+ }
return response;
}
- static NamedList meterToNamedList(Meter meter) {
- NamedList response = new NamedList();
- response.add("count", meter.getCount());
- response.add("meanRate", meter.getMeanRate());
- response.add("1minRate", meter.getOneMinuteRate());
- response.add("5minRate", meter.getFiveMinuteRate());
- response.add("15minRate", meter.getFifteenMinuteRate());
+ static Map<String, Object> meterToMap(Meter meter) {
+ Map<String, Object> response = new LinkedHashMap<>();
+ response.put("count", meter.getCount());
+ response.put("meanRate", meter.getMeanRate());
+ response.put("1minRate", meter.getOneMinuteRate());
+ response.put("5minRate", meter.getFiveMinuteRate());
+ response.put("15minRate", meter.getFifteenMinuteRate());
return response;
}
- static NamedList gaugeToNamedList(Gauge gauge) {
- NamedList response = new NamedList();
- response.add("value", gauge.getValue());
+ static Map<String, Object> gaugeToMap(Gauge gauge) {
+ Map<String, Object> response = new LinkedHashMap<>();
+ response.put("value", gauge.getValue());
return response;
}
- static NamedList counterToNamedList(Counter counter) {
- NamedList response = new NamedList();
- response.add("count", counter.getCount());
+ static Map<String, Object> counterToMap(Counter counter) {
+ Map<String, Object> response = new LinkedHashMap<>();
+ response.put("count", counter.getCount());
return response;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test-files/solr/solr-solrreporter.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/solr-solrreporter.xml b/solr/core/src/test-files/solr/solr-solrreporter.xml
new file mode 100644
index 0000000..db03e42
--- /dev/null
+++ b/solr/core/src/test-files/solr/solr-solrreporter.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<solr>
+ <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
+ <str name="urlScheme">${urlScheme:}</str>
+ <int name="socketTimeout">${socketTimeout:90000}</int>
+ <int name="connTimeout">${connTimeout:15000}</int>
+ </shardHandlerFactory>
+
+ <solrcloud>
+ <str name="host">127.0.0.1</str>
+ <int name="hostPort">${hostPort:8983}</int>
+ <str name="hostContext">${hostContext:solr}</str>
+ <int name="zkClientTimeout">${solr.zkclienttimeout:30000}</int>
+ <bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
+ <int name="leaderVoteWait">${leaderVoteWait:10000}</int>
+ <int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:45000}</int>
+ <int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:340000}</int>
+ <int name="autoReplicaFailoverWaitAfterExpiration">${autoReplicaFailoverWaitAfterExpiration:10000}</int>
+ <int name="autoReplicaFailoverWorkLoopDelay">${autoReplicaFailoverWorkLoopDelay:10000}</int>
+ <int name="autoReplicaFailoverBadNodeExpiration">${autoReplicaFailoverBadNodeExpiration:60000}</int>
+ </solrcloud>
+
+ <metrics>
+ <reporter name="test" group="shard">
+ <int name="period">5</int>
+ <str name="filter">UPDATE\./update/.*requests</str>
+ <str name="filter">QUERY\./select.*requests</str>
+ </reporter>
+ <reporter name="test" group="cluster">
+ <str name="handler">/admin/metrics/collector</str>
+ <int name="period">5</int>
+ <lst name="report">
+ <str name="group">cluster</str>
+ <str name="label">jvm</str>
+ <str name="registry">solr\.jvm</str>
+ <str name="filter">memory\.total\..*</str>
+ <str name="filter">memory\.heap\..*</str>
+ <str name="filter">os\.SystemLoadAverage</str>
+ <str name="filter">threads\.count</str>
+ </lst>
+ <lst name="report">
+ <str name="group">cluster</str>
+ <str name="label">leader.$1</str>
+ <str name="registry">solr\.collection\.(.*)\.leader</str>
+ <str name="filter">UPDATE\./update/.*</str>
+ </lst>
+ </reporter>
+ </metrics>
+</solr>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java
index 164eeab..1af09f4 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java
@@ -119,9 +119,9 @@ public class TestCloudRecovery extends SolrCloudTestCase {
.filter(s -> s.startsWith("solr.core.")).collect(Collectors.toList());
for (String registry : registryNames) {
Map<String, Metric> metrics = manager.registry(registry).getMetrics();
- Timer timer = (Timer)metrics.get("REPLICATION.time");
- Counter counter = (Counter)metrics.get("REPLICATION.errors");
- Counter skipped = (Counter)metrics.get("REPLICATION.skipped");
+ Timer timer = (Timer)metrics.get("REPLICATION.peerSync.time");
+ Counter counter = (Counter)metrics.get("REPLICATION.peerSync.errors");
+ Counter skipped = (Counter)metrics.get("REPLICATION.peerSync.skipped");
replicationCount += timer.getCount();
errorsCount += counter.getCount();
skippedCount += skipped.getCount();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/core/TestJmxMonitoredMap.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestJmxMonitoredMap.java b/solr/core/src/test/org/apache/solr/core/TestJmxMonitoredMap.java
index 2cad6e8..aa107bc 100644
--- a/solr/core/src/test/org/apache/solr/core/TestJmxMonitoredMap.java
+++ b/solr/core/src/test/org/apache/solr/core/TestJmxMonitoredMap.java
@@ -85,7 +85,7 @@ public class TestJmxMonitoredMap extends LuceneTestCase {
log.info("Using port: " + port);
String url = "service:jmx:rmi:///jndi/rmi://127.0.0.1:"+port+"/solrjmx";
JmxConfiguration config = new JmxConfiguration(true, null, url, null);
- monitoredMap = new JmxMonitoredMap<>("", "", config);
+ monitoredMap = new JmxMonitoredMap<>("", "", "", config);
JMXServiceURL u = new JMXServiceURL(url);
connector = JMXConnectorFactory.connect(u);
mbeanServer = connector.getMBeanServerConnection();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java
index 1df6021..6e8e1e5 100644
--- a/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java
@@ -103,6 +103,7 @@ public class SolrCoreMetricManagerTest extends SolrTestCaseJ4 {
String className = MockMetricReporter.class.getName();
String reporterName = TestUtil.randomUnicodeString(random);
+ String taggedName = reporterName + "@" + coreMetricManager.getTag();
Map<String, Object> attrs = new HashMap<>();
attrs.put(FieldType.CLASS_NAME, className);
@@ -116,15 +117,16 @@ public class SolrCoreMetricManagerTest extends SolrTestCaseJ4 {
PluginInfo pluginInfo = shouldDefinePlugin ? new PluginInfo(TestUtil.randomUnicodeString(random), attrs) : null;
try {
- metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(), pluginInfo);
+ metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(),
+ pluginInfo, String.valueOf(coreMetricManager.getCore().hashCode()));
assertNotNull(pluginInfo);
Map<String, SolrMetricReporter> reporters = metricManager.getReporters(coreMetricManager.getRegistryName());
assertTrue("reporters.size should be > 0, but was + " + reporters.size(), reporters.size() > 0);
- assertNotNull("reporter " + reporterName + " not present among " + reporters, reporters.get(reporterName));
- assertTrue("wrong reporter class: " + reporters.get(reporterName), reporters.get(reporterName) instanceof MockMetricReporter);
+ assertNotNull("reporter " + reporterName + " not present among " + reporters, reporters.get(taggedName));
+ assertTrue("wrong reporter class: " + reporters.get(taggedName), reporters.get(taggedName) instanceof MockMetricReporter);
} catch (IllegalArgumentException e) {
assertTrue(pluginInfo == null || attrs.get("configurable") == null);
- assertNull(metricManager.getReporters(coreMetricManager.getRegistryName()).get(reporterName));
+ assertNull(metricManager.getReporters(coreMetricManager.getRegistryName()).get(taggedName));
}
}
@@ -152,20 +154,11 @@ public class SolrCoreMetricManagerTest extends SolrTestCaseJ4 {
}
@Test
- public void testRegistryName() throws Exception {
- String collectionName = "my_collection_";
- String cloudCoreName = "my_collection__shard1_0_replica0";
- String simpleCoreName = "collection_1_replica0";
- String simpleRegistryName = "solr.core." + simpleCoreName;
- String cloudRegistryName = "solr.core." + cloudCoreName;
- String nestedRegistryName = "solr.core.my_collection_.shard1_0.replica0";
- // pass through
- assertEquals(cloudRegistryName, coreMetricManager.createRegistryName(null, cloudCoreName));
- assertEquals(simpleRegistryName, coreMetricManager.createRegistryName(null, simpleCoreName));
- // unknown naming scheme -> pass through
- assertEquals(simpleRegistryName, coreMetricManager.createRegistryName(collectionName, simpleCoreName));
- // cloud collection
- assertEquals(nestedRegistryName, coreMetricManager.createRegistryName(collectionName, cloudCoreName));
-
+ public void testNonCloudRegistryName() throws Exception {
+ String registryName = h.getCore().getCoreMetricManager().getRegistryName();
+ String leaderRegistryName = h.getCore().getCoreMetricManager().getLeaderRegistryName();
+ assertNotNull(registryName);
+ assertEquals("solr.core.collection1", registryName);
+ assertNull(leaderRegistryName);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
index ee2acd3..1c29c5e 100644
--- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
@@ -205,32 +205,32 @@ public class SolrMetricManagerTest extends SolrTestCaseJ4 {
createPluginInfo("node_foo", "node", null),
createPluginInfo("core_foo", "core", null)
};
-
- metricManager.loadReporters(plugins, loader, SolrInfoMBean.Group.node);
+ String tag = "xyz";
+ metricManager.loadReporters(plugins, loader, tag, SolrInfoMBean.Group.node);
Map<String, SolrMetricReporter> reporters = metricManager.getReporters(
SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node));
assertEquals(4, reporters.size());
- assertTrue(reporters.containsKey("universal_foo"));
- assertTrue(reporters.containsKey("multigroup_foo"));
- assertTrue(reporters.containsKey("node_foo"));
- assertTrue(reporters.containsKey("multiregistry_foo"));
+ assertTrue(reporters.containsKey("universal_foo@" + tag));
+ assertTrue(reporters.containsKey("multigroup_foo@" + tag));
+ assertTrue(reporters.containsKey("node_foo@" + tag));
+ assertTrue(reporters.containsKey("multiregistry_foo@" + tag));
- metricManager.loadReporters(plugins, loader, SolrInfoMBean.Group.core, "collection1");
+ metricManager.loadReporters(plugins, loader, tag, SolrInfoMBean.Group.core, "collection1");
reporters = metricManager.getReporters(
SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, "collection1"));
assertEquals(5, reporters.size());
- assertTrue(reporters.containsKey("universal_foo"));
- assertTrue(reporters.containsKey("multigroup_foo"));
- assertTrue(reporters.containsKey("specific_foo"));
- assertTrue(reporters.containsKey("core_foo"));
- assertTrue(reporters.containsKey("multiregistry_foo"));
+ assertTrue(reporters.containsKey("universal_foo@" + tag));
+ assertTrue(reporters.containsKey("multigroup_foo@" + tag));
+ assertTrue(reporters.containsKey("specific_foo@" + tag));
+ assertTrue(reporters.containsKey("core_foo@" + tag));
+ assertTrue(reporters.containsKey("multiregistry_foo@" + tag));
- metricManager.loadReporters(plugins, loader, SolrInfoMBean.Group.jvm);
+ metricManager.loadReporters(plugins, loader, tag, SolrInfoMBean.Group.jvm);
reporters = metricManager.getReporters(
SolrMetricManager.getRegistryName(SolrInfoMBean.Group.jvm));
assertEquals(2, reporters.size());
- assertTrue(reporters.containsKey("universal_foo"));
- assertTrue(reporters.containsKey("multigroup_foo"));
+ assertTrue(reporters.containsKey("universal_foo@" + tag));
+ assertTrue(reporters.containsKey("multigroup_foo@" + tag));
metricManager.removeRegistry("solr.jvm");
reporters = metricManager.getReporters(
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java
index 27c038b..dfb5a0f 100644
--- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java
@@ -19,7 +19,6 @@ package org.apache.solr.metrics;
import java.nio.file.Path;
import java.nio.file.Paths;
-import java.util.Arrays;
import java.util.Map;
import java.util.Random;
@@ -55,6 +54,11 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
private CoreContainer cc;
private SolrMetricManager metricManager;
+ private String tag;
+
+ private void assertTagged(Map<String, SolrMetricReporter> reporters, String name) {
+ assertTrue("Reporter '" + name + "' missing in " + reporters, reporters.containsKey(name + "@" + tag));
+ }
@Before
public void beforeTest() throws Exception {
@@ -68,10 +72,13 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
new TestHarness.TestCoresLocator(DEFAULT_TEST_CORENAME, initCoreDataDir.getAbsolutePath(), "solrconfig.xml", "schema.xml"));
h.coreName = DEFAULT_TEST_CORENAME;
metricManager = cc.getMetricManager();
+ tag = h.getCore().getCoreMetricManager().getTag();
// initially there are more reporters, because two of them are added via a matching collection name
Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.core." + DEFAULT_TEST_CORENAME);
assertEquals(INITIAL_REPORTERS.length, reporters.size());
- assertTrue(reporters.keySet().containsAll(Arrays.asList(INITIAL_REPORTERS)));
+ for (String r : INITIAL_REPORTERS) {
+ assertTagged(reporters, r);
+ }
// test rename operation
cc.rename(DEFAULT_TEST_CORENAME, CORE_NAME);
h.coreName = CORE_NAME;
@@ -101,7 +108,7 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
deleteCore();
for (String reporterName : RENAMED_REPORTERS) {
- SolrMetricReporter reporter = reporters.get(reporterName);
+ SolrMetricReporter reporter = reporters.get(reporterName + "@" + tag);
MockMetricReporter mockReporter = (MockMetricReporter) reporter;
assertTrue("Reporter " + reporterName + " was not closed: " + mockReporter, mockReporter.didClose);
}
@@ -130,7 +137,7 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
// SPECIFIC and MULTIREGISTRY were skipped because they were
// specific to collection1
for (String reporterName : RENAMED_REPORTERS) {
- SolrMetricReporter reporter = reporters.get(reporterName);
+ SolrMetricReporter reporter = reporters.get(reporterName + "@" + tag);
assertNotNull("Reporter " + reporterName + " was not found.", reporter);
assertTrue(reporter instanceof MockMetricReporter);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
index ea452b2..82b9d58 100644
--- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
@@ -64,15 +64,17 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
coreMetricManager = core.getCoreMetricManager();
metricManager = core.getCoreDescriptor().getCoreContainer().getMetricManager();
PluginInfo pluginInfo = createReporterPluginInfo();
- metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(), pluginInfo);
+ metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(),
+ pluginInfo, coreMetricManager.getTag());
Map<String, SolrMetricReporter> reporters = metricManager.getReporters(coreMetricManager.getRegistryName());
assertTrue("reporters.size should be > 0, but was + " + reporters.size(), reporters.size() > 0);
reporterName = pluginInfo.name;
- assertNotNull("reporter " + reporterName + " not present among " + reporters, reporters.get(reporterName));
- assertTrue("wrong reporter class: " + reporters.get(reporterName), reporters.get(reporterName) instanceof SolrJmxReporter);
+ String taggedName = reporterName + "@" + coreMetricManager.getTag();
+ assertNotNull("reporter " + taggedName + " not present among " + reporters, reporters.get(taggedName));
+ assertTrue("wrong reporter class: " + reporters.get(taggedName), reporters.get(taggedName) instanceof SolrJmxReporter);
- reporter = (SolrJmxReporter) reporters.get(reporterName);
+ reporter = (SolrJmxReporter) reporters.get(taggedName);
mBeanServer = reporter.getMBeanServer();
assertNotNull("MBean server not found.", mBeanServer);
}
@@ -144,7 +146,8 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
h.getCoreContainer().reload(h.getCore().getName());
PluginInfo pluginInfo = createReporterPluginInfo();
- metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(), pluginInfo);
+ metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(),
+ pluginInfo, String.valueOf(coreMetricManager.getCore().hashCode()));
coreMetricManager.registerMetricProducer(scope, producer);
objects = mBeanServer.queryMBeans(null, null);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
new file mode 100644
index 0000000..91952b8
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics.reporters.solr;
+
+import java.nio.file.Paths;
+import java.util.Map;
+
+import com.codahale.metrics.Metric;
+import org.apache.commons.io.IOUtils;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.cloud.SolrCloudTestCase;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.metrics.AggregateMetric;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricReporter;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ */
+public class SolrCloudReportersTest extends SolrCloudTestCase {
+ int leaderRegistries;
+ int clusterRegistries;
+
+
+ @BeforeClass
+ public static void configureDummyCluster() throws Exception {
+ configureCluster(0).configure();
+ }
+
+ @Before
+ public void closePreviousCluster() throws Exception {
+ shutdownCluster();
+ leaderRegistries = 0;
+ clusterRegistries = 0;
+ }
+
+ @Test
+ public void testExplicitConfiguration() throws Exception {
+ String solrXml = IOUtils.toString(SolrCloudReportersTest.class.getResourceAsStream("/solr/solr-solrreporter.xml"), "UTF-8");
+ configureCluster(2)
+ .withSolrXml(solrXml).configure();
+ cluster.uploadConfigSet(Paths.get(TEST_PATH().toString(), "configsets", "minimal", "conf"), "test");
+ System.out.println("ZK: " + cluster.getZkServer().getZkAddress());
+ CollectionAdminRequest.createCollection("test_collection", "test", 2, 2)
+ .setMaxShardsPerNode(4)
+ .process(cluster.getSolrClient());
+ waitForState("Expected test_collection with 2 shards and 2 replicas", "test_collection", clusterShape(2, 2));
+ Thread.sleep(15000);
+ cluster.getJettySolrRunners().forEach(jetty -> {
+ CoreContainer cc = jetty.getCoreContainer();
+ // verify registry names
+ for (String name : cc.getCoreNames()) {
+ SolrCore core = cc.getCore(name);
+ try {
+ String registryName = core.getCoreMetricManager().getRegistryName();
+ String leaderRegistryName = core.getCoreMetricManager().getLeaderRegistryName();
+ String coreName = core.getName();
+ String collectionName = core.getCoreDescriptor().getCollectionName();
+ String coreNodeName = core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName();
+ String replicaName = coreName.split("_")[3];
+ String shardId = core.getCoreDescriptor().getCloudDescriptor().getShardId();
+
+ assertEquals("solr.core." + collectionName + "." + shardId + "." + replicaName, registryName);
+ assertEquals("solr.collection." + collectionName + "." + shardId + ".leader", leaderRegistryName);
+
+ } finally {
+ if (core != null) {
+ core.close();
+ }
+ }
+ }
+ SolrMetricManager metricManager = cc.getMetricManager();
+ Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.cluster");
+ assertEquals(reporters.toString(), 1, reporters.size());
+ SolrMetricReporter reporter = reporters.get("test");
+ assertNotNull(reporter);
+ assertTrue(reporter.toString(), reporter instanceof SolrClusterReporter);
+ SolrClusterReporter sor = (SolrClusterReporter)reporter;
+ assertEquals(5, sor.getPeriod());
+ for (String registryName : metricManager.registryNames(".*\\.shard[0-9]\\.replica.*")) {
+ reporters = metricManager.getReporters(registryName);
+ assertEquals(reporters.toString(), 1, reporters.size());
+ reporter = null;
+ for (String name : reporters.keySet()) {
+ if (name.startsWith("test")) {
+ reporter = reporters.get(name);
+ }
+ }
+ assertNotNull(reporter);
+ assertTrue(reporter.toString(), reporter instanceof SolrShardReporter);
+ SolrShardReporter srr = (SolrShardReporter)reporter;
+ assertEquals(5, srr.getPeriod());
+ }
+ for (String registryName : metricManager.registryNames(".*\\.leader")) {
+ leaderRegistries++;
+ reporters = metricManager.getReporters(registryName);
+ // no reporters registered for leader registry
+ assertEquals(reporters.toString(), 0, reporters.size());
+ // verify specific metrics
+ Map<String, Metric> metrics = metricManager.registry(registryName).getMetrics();
+ String key = "QUERY./select.requests.count";
+ assertTrue(key, metrics.containsKey(key));
+ assertTrue(key, metrics.get(key) instanceof AggregateMetric);
+ key = "UPDATE./update/json.requests.count";
+ assertTrue(key, metrics.containsKey(key));
+ assertTrue(key, metrics.get(key) instanceof AggregateMetric);
+ }
+ if (metricManager.registryNames().contains("solr.cluster")) {
+ clusterRegistries++;
+ Map<String,Metric> metrics = metricManager.registry("solr.cluster").getMetrics();
+ String key = "jvm.memory.heap.init.value";
+ assertTrue(key, metrics.containsKey(key));
+ assertTrue(key, metrics.get(key) instanceof AggregateMetric);
+ key = "leader.test_collection.shard1.UPDATE./update/json.requests.count.max";
+ assertTrue(key, metrics.containsKey(key));
+ assertTrue(key, metrics.get(key) instanceof AggregateMetric);
+ }
+ });
+ assertEquals("leaderRegistries", 2, leaderRegistries);
+ assertEquals("clusterRegistries", 1, clusterRegistries);
+ }
+
+ @Test
+ public void testDefaultPlugins() throws Exception {
+ String solrXml = IOUtils.toString(SolrCloudReportersTest.class.getResourceAsStream("/solr/solr.xml"), "UTF-8");
+ configureCluster(2)
+ .withSolrXml(solrXml).configure();
+ cluster.uploadConfigSet(Paths.get(TEST_PATH().toString(), "configsets", "minimal", "conf"), "test");
+ System.out.println("ZK: " + cluster.getZkServer().getZkAddress());
+ CollectionAdminRequest.createCollection("test_collection", "test", 2, 2)
+ .setMaxShardsPerNode(4)
+ .process(cluster.getSolrClient());
+ waitForState("Expected test_collection with 2 shards and 2 replicas", "test_collection", clusterShape(2, 2));
+ cluster.getJettySolrRunners().forEach(jetty -> {
+ CoreContainer cc = jetty.getCoreContainer();
+ SolrMetricManager metricManager = cc.getMetricManager();
+ Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.cluster");
+ assertEquals(reporters.toString(), 0, reporters.size());
+ for (String registryName : metricManager.registryNames(".*\\.shard[0-9]\\.replica.*")) {
+ reporters = metricManager.getReporters(registryName);
+ assertEquals(reporters.toString(), 0, reporters.size());
+ }
+ });
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java
new file mode 100644
index 0000000..9ce3762
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics.reporters.solr;
+
+import java.lang.invoke.MethodHandles;
+import java.util.Map;
+
+import com.codahale.metrics.Metric;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
+import org.apache.solr.cloud.CloudDescriptor;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.CoreDescriptor;
+import org.apache.solr.metrics.AggregateMetric;
+import org.apache.solr.metrics.SolrCoreMetricManager;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ *
+ */
+public class SolrShardReporterTest extends AbstractFullDistribZkTestBase {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ public SolrShardReporterTest() {
+ schemaString = "schema15.xml"; // we need a string id
+ }
+
+ @Override
+ public String getSolrXml() {
+ return "solr-solrreporter.xml";
+ }
+
+ @Test
+ public void test() throws Exception {
+ waitForRecoveriesToFinish("control_collection",
+ jettys.get(0).getCoreContainer().getZkController().getZkStateReader(), false);
+ waitForRecoveriesToFinish("collection1",
+ jettys.get(0).getCoreContainer().getZkController().getZkStateReader(), false);
+ printLayout();
+ // wait for at least two reports
+ Thread.sleep(10000);
+ ClusterState state = jettys.get(0).getCoreContainer().getZkController().getClusterState();
+ for (JettySolrRunner jetty : jettys) {
+ CoreContainer cc = jetty.getCoreContainer();
+ SolrMetricManager metricManager = cc.getMetricManager();
+ for (final String coreName : cc.getCoreNames()) {
+ CoreDescriptor cd = cc.getCoreDescriptor(coreName);
+ if (cd.getCloudDescriptor() == null) { // not a cloud collection
+ continue;
+ }
+ CloudDescriptor cloudDesc = cd.getCloudDescriptor();
+ DocCollection docCollection = state.getCollection(cloudDesc.getCollectionName());
+ String replicaName = SolrCoreMetricManager.parseReplicaName(cloudDesc.getCollectionName(), coreName);
+ if (replicaName == null) {
+ replicaName = cloudDesc.getCoreNodeName();
+ }
+ String registryName = SolrCoreMetricManager.createRegistryName(true,
+ cloudDesc.getCollectionName(), cloudDesc.getShardId(), replicaName, null);
+ String leaderRegistryName = SolrCoreMetricManager.createLeaderRegistryName(true,
+ cloudDesc.getCollectionName(), cloudDesc.getShardId());
+ boolean leader = cloudDesc.isLeader();
+ Slice slice = docCollection.getSlice(cloudDesc.getShardId());
+ int numReplicas = slice.getReplicas().size();
+ if (leader) {
+ assertTrue(metricManager.registryNames() + " doesn't contain " + leaderRegistryName,
+ metricManager.registryNames().contains(leaderRegistryName));
+ Map<String, Metric> metrics = metricManager.registry(leaderRegistryName).getMetrics();
+ metrics.forEach((k, v) -> {
+ assertTrue("Unexpected type of " + k + ": " + v.getClass().getName() + ", " + v,
+ v instanceof AggregateMetric);
+ AggregateMetric am = (AggregateMetric)v;
+ if (!k.startsWith("REPLICATION.peerSync")) {
+ assertEquals(coreName + "::" + registryName + "::" + k + ": " + am.toString(), numReplicas, am.size());
+ }
+ });
+ } else {
+ assertFalse(metricManager.registryNames() + " contains " + leaderRegistryName +
+ " but it's not a leader!",
+ metricManager.registryNames().contains(leaderRegistryName));
+ Map<String, Metric> metrics = metricManager.registry(leaderRegistryName).getMetrics();
+ metrics.forEach((k, v) -> {
+ assertTrue("Unexpected type of " + k + ": " + v.getClass().getName() + ", " + v,
+ v instanceof AggregateMetric);
+ AggregateMetric am = (AggregateMetric)v;
+ if (!k.startsWith("REPLICATION.peerSync")) {
+ assertEquals(coreName + "::" + registryName + "::" + k + ": " + am.toString(), 1, am.size());
+ }
+ });
+ }
+ assertTrue(metricManager.registryNames() + " doesn't contain " + registryName,
+ metricManager.registryNames().contains(registryName));
+ }
+ }
+ SolrMetricManager metricManager = controlJetty.getCoreContainer().getMetricManager();
+ assertTrue(metricManager.registryNames().contains("solr.cluster"));
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java b/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
index e39ad6e..8717ad6 100644
--- a/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
+++ b/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
@@ -17,12 +17,20 @@
package org.apache.solr.util.stats;
+import java.util.Collections;
+import java.util.Map;
import java.util.concurrent.TimeUnit;
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Meter;
+import com.codahale.metrics.MetricFilter;
+import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.Timer;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.util.NamedList;
+import org.apache.solr.metrics.AggregateMetric;
import org.junit.Test;
public class MetricUtilsTest extends SolrTestCaseJ4 {
@@ -36,7 +44,7 @@ public class MetricUtilsTest extends SolrTestCaseJ4 {
timer.update(Math.abs(random().nextInt()) + 1, TimeUnit.NANOSECONDS);
}
// obtain timer metrics
- NamedList lst = MetricUtils.timerToNamedList(timer);
+ NamedList lst = new NamedList(MetricUtils.timerToMap(timer, false));
// check that expected metrics were obtained
assertEquals(14, lst.size());
final Snapshot snapshot = timer.getSnapshot();
@@ -52,5 +60,49 @@ public class MetricUtilsTest extends SolrTestCaseJ4 {
assertEquals(MetricUtils.nsToMs(snapshot.get999thPercentile()), lst.get("p999_ms"));
}
+ @Test
+ public void testMetrics() throws Exception {
+ MetricRegistry registry = new MetricRegistry();
+ Counter counter = registry.counter("counter");
+ counter.inc();
+ Timer timer = registry.timer("timer");
+ Timer.Context ctx = timer.time();
+ Thread.sleep(150);
+ ctx.stop();
+ Meter meter = registry.meter("meter");
+ meter.mark();
+ Histogram histogram = registry.histogram("histogram");
+ histogram.update(10);
+ AggregateMetric am = new AggregateMetric();
+ registry.register("aggregate", am);
+ am.set("foo", 10);
+ am.set("bar", 1);
+ am.set("bar", 2);
+ MetricUtils.toNamedMaps(registry, Collections.singletonList(MetricFilter.ALL), MetricFilter.ALL,
+ false, false, (k, v) -> {
+ if (k.startsWith("counter")) {
+ assertEquals(1L, v.get("count"));
+ } else if (k.startsWith("timer")) {
+ assertEquals(1L, v.get("count"));
+ assertTrue(((Number)v.get("min_ms")).intValue() > 100);
+ } else if (k.startsWith("meter")) {
+ assertEquals(1L, v.get("count"));
+ } else if (k.startsWith("histogram")) {
+ assertEquals(1L, v.get("count"));
+ } else if (k.startsWith("aggregate")) {
+ assertEquals(2, v.get("count"));
+ Map<String, Object> values = (Map<String, Object>)v.get("values");
+ assertNotNull(values);
+ assertEquals(2, values.size());
+ Map<String, Object> update = (Map<String, Object>)values.get("foo");
+ assertEquals(10, update.get("value"));
+ assertEquals(1, update.get("updateCount"));
+ update = (Map<String, Object>)values.get("bar");
+ assertEquals(2, update.get("value"));
+ assertEquals(2, update.get("updateCount"));
+ }
+ });
+ }
+
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java
index 67274c2..310c282 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java
@@ -112,8 +112,8 @@ public class BinaryRequestWriter extends RequestWriter {
/*
* A hack to get access to the protected internal buffer and avoid an additional copy
*/
- class BAOS extends ByteArrayOutputStream {
- byte[] getbuf() {
+ public static class BAOS extends ByteArrayOutputStream {
+ public byte[] getbuf() {
return super.buf;
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/solrj/src/java/org/apache/solr/client/solrj/io/SolrClientCache.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/SolrClientCache.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/SolrClientCache.java
index da94162..132a1a8 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/SolrClientCache.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/SolrClientCache.java
@@ -22,6 +22,7 @@ import java.lang.invoke.MethodHandles;
import java.util.Map;
import java.util.HashMap;
+import org.apache.http.client.HttpClient;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
@@ -38,15 +39,27 @@ public class SolrClientCache implements Serializable {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final Map<String, SolrClient> solrClients = new HashMap<>();
+ private final HttpClient httpClient;
+
+ public SolrClientCache() {
+ httpClient = null;
+ }
+
+ public SolrClientCache(HttpClient httpClient) {
+ this.httpClient = httpClient;
+ }
public synchronized CloudSolrClient getCloudSolrClient(String zkHost) {
CloudSolrClient client;
if (solrClients.containsKey(zkHost)) {
client = (CloudSolrClient) solrClients.get(zkHost);
} else {
- client = new CloudSolrClient.Builder()
- .withZkHost(zkHost)
- .build();
+ CloudSolrClient.Builder builder = new CloudSolrClient.Builder()
+ .withZkHost(zkHost);
+ if (httpClient != null) {
+ builder = builder.withHttpClient(httpClient);
+ }
+ client = builder.build();
client.connect();
solrClients.put(zkHost, client);
}
@@ -59,8 +72,11 @@ public class SolrClientCache implements Serializable {
if (solrClients.containsKey(host)) {
client = (HttpSolrClient) solrClients.get(host);
} else {
- client = new HttpSolrClient.Builder(host)
- .build();
+ HttpSolrClient.Builder builder = new HttpSolrClient.Builder(host);
+ if (httpClient != null) {
+ builder = builder.withHttpClient(httpClient);
+ }
+ client = builder.build();
solrClients.put(host, client);
}
return client;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
index b2174cd..de7c620 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
@@ -251,8 +251,8 @@ public class TestCoreAdmin extends AbstractEmbeddedSolrServerTestCase {
// assert initial metrics
SolrMetricManager metricManager = cores.getMetricManager();
- String core0RegistryName = SolrCoreMetricManager.createRegistryName(null, "core0");
- String core1RegistryName = SolrCoreMetricManager.createRegistryName(null, "core1");
+ String core0RegistryName = SolrCoreMetricManager.createRegistryName(false, null, null, null, "core0");
+ String core1RegistryName = SolrCoreMetricManager.createRegistryName(false, null, null,null, "core1");
MetricRegistry core0Registry = metricManager.registry(core0RegistryName);
MetricRegistry core1Registry = metricManager.registry(core1RegistryName);
[39/50] [abbrv] lucene-solr:jira/solr-6736: SOLR-10254:
significantTerms Streaming Expression should work in non-SolrCloud mode
Posted by is...@apache.org.
SOLR-10254: significantTerms Streaming Expression should work in non-SolrCloud mode
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/682c6a7d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/682c6a7d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/682c6a7d
Branch: refs/heads/jira/solr-6736
Commit: 682c6a7d5145129e8ae01ff00505ddf5a564d396
Parents: 8756be0
Author: Joel Bernstein <jb...@apache.org>
Authored: Wed Mar 8 21:10:56 2017 -0500
Committer: Joel Bernstein <jb...@apache.org>
Committed: Wed Mar 8 21:11:26 2017 -0500
----------------------------------------------------------------------
.../org/apache/solr/handler/StreamHandler.java | 27 +++
.../solrj/io/stream/SignificantTermsStream.java | 49 +---
.../client/solrj/io/stream/TupleStream.java | 94 ++++++++
.../solrj/io/stream/StreamExpressionTest.java | 234 +++++++++++++------
4 files changed, 285 insertions(+), 119 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/682c6a7d/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index 31b37e7..06e59b6 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -18,6 +18,7 @@ package org.apache.solr.handler;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
@@ -246,6 +247,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
int worker = params.getInt("workerID", 0);
int numWorkers = params.getInt("numWorkers", 1);
StreamContext context = new StreamContext();
+ context.put("shards", getCollectionShards(params));
context.workerID = worker;
context.numWorkers = numWorkers;
context.setSolrClientCache(clientCache);
@@ -509,4 +511,29 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
return tuple;
}
}
+
+ private Map<String, List<String>> getCollectionShards(SolrParams params) {
+
+ Map<String, List<String>> collectionShards = new HashMap();
+ Iterator<String> paramsIt = params.getParameterNamesIterator();
+ while(paramsIt.hasNext()) {
+ String param = paramsIt.next();
+ if(param.indexOf(".shards") > -1) {
+ String collection = param.split("\\.")[0];
+ String shardString = params.get(param);
+ String[] shards = shardString.split(",");
+ List<String> shardList = new ArrayList();
+ for(String shard : shards) {
+ shardList.add(shard);
+ }
+ collectionShards.put(collection, shardList);
+ }
+ }
+
+ if(collectionShards.size() > 0) {
+ return collectionShards;
+ } else {
+ return null;
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/682c6a7d/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
index 87b5a9f..2acee51 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
@@ -74,12 +74,9 @@ public class SignificantTermsStream extends TupleStream implements Expressible{
protected transient SolrClientCache cache;
protected transient boolean isCloseCache;
- protected transient CloudSolrClient cloudSolrClient;
-
protected transient StreamContext streamContext;
protected ExecutorService executorService;
-
public SignificantTermsStream(String zkHost,
String collectionName,
Map params,
@@ -168,12 +165,12 @@ public class SignificantTermsStream extends TupleStream implements Expressible{
String zkHost = null;
if(null == zkHostExpression){
zkHost = factory.getCollectionZkHost(collectionName);
- }
- else if(zkHostExpression.getParameter() instanceof StreamExpressionValue){
+ } else if(zkHostExpression.getParameter() instanceof StreamExpressionValue) {
zkHost = ((StreamExpressionValue)zkHostExpression.getParameter()).getValue();
}
- if(null == zkHost){
- throw new IOException(String.format(Locale.ROOT,"invalid expression %s - zkHost not found for collection '%s'",expression,collectionName));
+
+ if(zkHost == null){
+ zkHost = factory.getDefaultZkHost();
}
// We've got all the required items
@@ -238,47 +235,13 @@ public class SignificantTermsStream extends TupleStream implements Expressible{
isCloseCache = false;
}
- this.cloudSolrClient = this.cache.getCloudSolrClient(zkHost);
- this.executorService = ExecutorUtil.newMDCAwareCachedThreadPool(new SolrjNamedThreadFactory("FeaturesSelectionStream"));
+ this.executorService = ExecutorUtil.newMDCAwareCachedThreadPool(new SolrjNamedThreadFactory("SignificantTermsStream"));
}
public List<TupleStream> children() {
return null;
}
- private List<String> getShardUrls() throws IOException {
- try {
- ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader();
-
- Collection<Slice> slices = CloudSolrStream.getSlices(this.collection, zkStateReader, false);
-
- ClusterState clusterState = zkStateReader.getClusterState();
- Set<String> liveNodes = clusterState.getLiveNodes();
-
- List<String> baseUrls = new ArrayList<>();
- for(Slice slice : slices) {
- Collection<Replica> replicas = slice.getReplicas();
- List<Replica> shuffler = new ArrayList<>();
- for(Replica replica : replicas) {
- if(replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())) {
- shuffler.add(replica);
- }
- }
-
- Collections.shuffle(shuffler, new Random());
- Replica rep = shuffler.get(0);
- ZkCoreNodeProps zkProps = new ZkCoreNodeProps(rep);
- String url = zkProps.getCoreUrl();
- baseUrls.add(url);
- }
-
- return baseUrls;
-
- } catch (Exception e) {
- throw new IOException(e);
- }
- }
-
private List<Future<NamedList>> callShards(List<String> baseUrls) throws IOException {
List<Future<NamedList>> futures = new ArrayList<>();
@@ -326,7 +289,7 @@ public class SignificantTermsStream extends TupleStream implements Expressible{
Map<String, int[]> mergeFreqs = new HashMap<>();
long numDocs = 0;
long resultCount = 0;
- for (Future<NamedList> getTopTermsCall : callShards(getShardUrls())) {
+ for (Future<NamedList> getTopTermsCall : callShards(getShards(zkHost, collection, streamContext))) {
NamedList resp = getTopTermsCall.get();
List<String> terms = (List<String>)resp.get("sterms");
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/682c6a7d/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupleStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupleStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupleStream.java
index 49a806f..ceea6af 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupleStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupleStream.java
@@ -19,9 +19,16 @@ package org.apache.solr.client.solrj.io.stream;
import java.io.Closeable;
import java.io.IOException;
import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
import java.util.List;
+import java.util.Random;
+import java.util.Set;
import java.util.UUID;
+import java.util.Map;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.io.Tuple;
import org.apache.solr.client.solrj.io.comp.StreamComparator;
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
@@ -29,6 +36,14 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
import org.apache.solr.common.IteratorWriter;
import org.apache.solr.common.MapWriter;
import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.Aliases;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.ZkCoreNodeProps;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.util.StrUtils;
public abstract class TupleStream implements Closeable, Serializable, MapWriter {
@@ -84,4 +99,83 @@ public abstract class TupleStream implements Closeable, Serializable, MapWriter
public UUID getStreamNodeId(){
return streamNodeId;
}
+
+ public static List<String> getShards(String zkHost,
+ String collection,
+ StreamContext streamContext)
+ throws IOException {
+ Map<String, List<String>> shardsMap = null;
+ List<String> shards = new ArrayList();
+
+ if(streamContext != null) {
+ shardsMap = (Map<String, List<String>>)streamContext.get("shards");
+ }
+
+ if(shardsMap != null) {
+ //Manual Sharding
+ shards = shardsMap.get(collection);
+ } else {
+ //SolrCloud Sharding
+ CloudSolrClient cloudSolrClient = streamContext.getSolrClientCache().getCloudSolrClient(zkHost);
+ ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader();
+ ClusterState clusterState = zkStateReader.getClusterState();
+ Collection<Slice> slices = getSlices(collection, zkStateReader, true);
+ Set<String> liveNodes = clusterState.getLiveNodes();
+ for(Slice slice : slices) {
+ Collection<Replica> replicas = slice.getReplicas();
+ List<Replica> shuffler = new ArrayList<>();
+ for(Replica replica : replicas) {
+ if(replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName()))
+ shuffler.add(replica);
+ }
+
+ Collections.shuffle(shuffler, new Random());
+ Replica rep = shuffler.get(0);
+ ZkCoreNodeProps zkProps = new ZkCoreNodeProps(rep);
+ String url = zkProps.getCoreUrl();
+ shards.add(url);
+ }
+ }
+
+ return shards;
+ }
+
+ public static Collection<Slice> getSlices(String collectionName,
+ ZkStateReader zkStateReader,
+ boolean checkAlias) throws IOException {
+ ClusterState clusterState = zkStateReader.getClusterState();
+
+ Map<String, DocCollection> collectionsMap = clusterState.getCollectionsMap();
+
+ // Check collection case sensitive
+ if(collectionsMap.containsKey(collectionName)) {
+ return collectionsMap.get(collectionName).getActiveSlices();
+ }
+
+ // Check collection case insensitive
+ for(String collectionMapKey : collectionsMap.keySet()) {
+ if(collectionMapKey.equalsIgnoreCase(collectionName)) {
+ return collectionsMap.get(collectionMapKey).getActiveSlices();
+ }
+ }
+
+ if(checkAlias) {
+ // check for collection alias
+ Aliases aliases = zkStateReader.getAliases();
+ String alias = aliases.getCollectionAlias(collectionName);
+ if (alias != null) {
+ Collection<Slice> slices = new ArrayList<>();
+
+ List<String> aliasList = StrUtils.splitSmart(alias, ",", true);
+ for (String aliasCollectionName : aliasList) {
+ // Add all active slices for this alias collection
+ slices.addAll(collectionsMap.get(aliasCollectionName).getActiveSlices());
+ }
+
+ return slices;
+ }
+ }
+
+ throw new IOException("Slices not found for " + collectionName);
+ }
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/682c6a7d/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index 30b7056..c61e443 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -335,7 +335,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
tuples = getTuples(stream);
assert(tuples.size() == 4);
- assertOrder(tuples, 4,3,1,2);
+ assertOrder(tuples, 4, 3, 1, 2);
// Basic w/multi comp
expression = StreamExpressionParser.parse("unique(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f, a_i\")");
@@ -1577,7 +1577,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
List<Tuple> tuples = getTuples(pstream);
assert(tuples.size() == 5);
- assertOrder(tuples, 0,1,3,4,6);
+ assertOrder(tuples, 0, 1, 3, 4, 6);
//Test the eofTuples
@@ -4712,8 +4712,6 @@ public class StreamExpressionTest extends SolrCloudTestCase {
@Test
public void testSignificantTermsStream() throws Exception {
- Assume.assumeTrue(!useAlias);
-
UpdateRequest updateRequest = new UpdateRequest();
for (int i = 0; i < 5000; i++) {
updateRequest.add(id, "a"+i, "test_t", "a b c d m l");
@@ -4742,106 +4740,186 @@ public class StreamExpressionTest extends SolrCloudTestCase {
StreamFactory factory = new StreamFactory()
.withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress())
+ .withDefaultZkHost(cluster.getZkServer().getZkAddress())
.withFunctionName("significantTerms", SignificantTermsStream.class);
- String significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, minTermLength=1, maxDocFreq=\".5\")";
- stream = factory.constructStream(significantTerms);
- tuples = getTuples(stream);
+ StreamContext streamContext = new StreamContext();
+ SolrClientCache cache = new SolrClientCache();
+ streamContext.setSolrClientCache(cache);
+ try {
- assert(tuples.size() == 3);
- assertTrue(tuples.get(0).get("term").equals("l"));
- assertTrue(tuples.get(0).getLong("background") == 5000);
- assertTrue(tuples.get(0).getLong("foreground") == 5000);
+ String significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, minTermLength=1, maxDocFreq=\".5\")";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(streamContext);
+ tuples = getTuples(stream);
+ assert (tuples.size() == 3);
+ assertTrue(tuples.get(0).get("term").equals("l"));
+ assertTrue(tuples.get(0).getLong("background") == 5000);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
- assertTrue(tuples.get(1).get("term").equals("m"));
- assertTrue(tuples.get(1).getLong("background") == 5500);
- assertTrue(tuples.get(1).getLong("foreground") == 5000);
- assertTrue(tuples.get(2).get("term").equals("d"));
- assertTrue(tuples.get(2).getLong("background") == 5600);
- assertTrue(tuples.get(2).getLong("foreground") == 5000);
+ assertTrue(tuples.get(1).get("term").equals("m"));
+ assertTrue(tuples.get(1).getLong("background") == 5500);
+ assertTrue(tuples.get(1).getLong("foreground") == 5000);
- //Test maxDocFreq
- significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, maxDocFreq=2650, minTermLength=1)";
- stream = factory.constructStream(significantTerms);
- tuples = getTuples(stream);
+ assertTrue(tuples.get(2).get("term").equals("d"));
+ assertTrue(tuples.get(2).getLong("background") == 5600);
+ assertTrue(tuples.get(2).getLong("foreground") == 5000);
- assert(tuples.size() == 1);
- assertTrue(tuples.get(0).get("term").equals("l"));
- assertTrue(tuples.get(0).getLong("background") == 5000);
- assertTrue(tuples.get(0).getLong("foreground") == 5000);
+ //Test maxDocFreq
+ significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, maxDocFreq=2650, minTermLength=1)";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(streamContext);
+ tuples = getTuples(stream);
- //Test maxDocFreq percentage
+ assert (tuples.size() == 1);
+ assertTrue(tuples.get(0).get("term").equals("l"));
+ assertTrue(tuples.get(0).getLong("background") == 5000);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
- significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, maxDocFreq=\".45\", minTermLength=1)";
- stream = factory.constructStream(significantTerms);
- tuples = getTuples(stream);
- assert(tuples.size() == 1);
- assertTrue(tuples.get(0).get("term").equals("l"));
- assertTrue(tuples.get(0).getLong("background") == 5000);
- assertTrue(tuples.get(0).getLong("foreground") == 5000);
+ //Test maxDocFreq percentage
+ significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, maxDocFreq=\".45\", minTermLength=1)";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(streamContext);
+ tuples = getTuples(stream);
+ assert (tuples.size() == 1);
+ assertTrue(tuples.get(0).get("term").equals("l"));
+ assertTrue(tuples.get(0).getLong("background") == 5000);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
- //Test min doc freq
- significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, minDocFreq=\"2700\", minTermLength=1, maxDocFreq=\".5\")";
- stream = factory.constructStream(significantTerms);
- tuples = getTuples(stream);
- assert(tuples.size() == 3);
+ //Test min doc freq
+ significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, minDocFreq=\"2700\", minTermLength=1, maxDocFreq=\".5\")";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(streamContext);
+ tuples = getTuples(stream);
- assertTrue(tuples.get(0).get("term").equals("m"));
- assertTrue(tuples.get(0).getLong("background") == 5500);
- assertTrue(tuples.get(0).getLong("foreground") == 5000);
+ assert (tuples.size() == 3);
- assertTrue(tuples.get(1).get("term").equals("d"));
- assertTrue(tuples.get(1).getLong("background") == 5600);
- assertTrue(tuples.get(1).getLong("foreground") == 5000);
+ assertTrue(tuples.get(0).get("term").equals("m"));
+ assertTrue(tuples.get(0).getLong("background") == 5500);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
- assertTrue(tuples.get(2).get("term").equals("c"));
- assertTrue(tuples.get(2).getLong("background") == 5900);
- assertTrue(tuples.get(2).getLong("foreground") == 5000);
+ assertTrue(tuples.get(1).get("term").equals("d"));
+ assertTrue(tuples.get(1).getLong("background") == 5600);
+ assertTrue(tuples.get(1).getLong("foreground") == 5000);
+ assertTrue(tuples.get(2).get("term").equals("c"));
+ assertTrue(tuples.get(2).getLong("background") == 5900);
+ assertTrue(tuples.get(2).getLong("foreground") == 5000);
- //Test min doc freq percent
- significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, minDocFreq=\".478\", minTermLength=1, maxDocFreq=\".5\")";
- stream = factory.constructStream(significantTerms);
- tuples = getTuples(stream);
- assert(tuples.size() == 1);
+ //Test min doc freq percent
+ significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, minDocFreq=\".478\", minTermLength=1, maxDocFreq=\".5\")";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(streamContext);
+ tuples = getTuples(stream);
- assertTrue(tuples.get(0).get("term").equals("c"));
- assertTrue(tuples.get(0).getLong("background") == 5900);
- assertTrue(tuples.get(0).getLong("foreground") == 5000);
+ assert (tuples.size() == 1);
+ assertTrue(tuples.get(0).get("term").equals("c"));
+ assertTrue(tuples.get(0).getLong("background") == 5900);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
- //Test limit
+ //Test limit
- significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=2, minDocFreq=\"2700\", minTermLength=1, maxDocFreq=\".5\")";
- stream = factory.constructStream(significantTerms);
- tuples = getTuples(stream);
+ significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=2, minDocFreq=\"2700\", minTermLength=1, maxDocFreq=\".5\")";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(streamContext);
+ tuples = getTuples(stream);
- assert(tuples.size() == 2);
+ assert (tuples.size() == 2);
- assertTrue(tuples.get(0).get("term").equals("m"));
- assertTrue(tuples.get(0).getLong("background") == 5500);
- assertTrue(tuples.get(0).getLong("foreground") == 5000);
+ assertTrue(tuples.get(0).get("term").equals("m"));
+ assertTrue(tuples.get(0).getLong("background") == 5500);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
- assertTrue(tuples.get(1).get("term").equals("d"));
- assertTrue(tuples.get(1).getLong("background") == 5600);
- assertTrue(tuples.get(1).getLong("foreground") == 5000);
+ assertTrue(tuples.get(1).get("term").equals("d"));
+ assertTrue(tuples.get(1).getLong("background") == 5600);
+ assertTrue(tuples.get(1).getLong("foreground") == 5000);
- //Test term length
+ //Test term length
- significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=2, minDocFreq=\"2700\", minTermLength=2)";
- stream = factory.constructStream(significantTerms);
- tuples = getTuples(stream);
- assert(tuples.size() == 0);
+ significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=2, minDocFreq=\"2700\", minTermLength=2)";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(streamContext);
+ tuples = getTuples(stream);
+ assert (tuples.size() == 0);
- }
+
+ //Test with shards parameter
+ List<String> shardUrls = TupleStream.getShards(cluster.getZkServer().getZkAddress(), COLLECTIONORALIAS, streamContext);
+
+ Map<String, List<String>> shardsMap = new HashMap();
+ shardsMap.put("myCollection", shardUrls);
+ StreamContext context = new StreamContext();
+ context.put("shards", shardsMap);
+ context.setSolrClientCache(cache);
+ significantTerms = "significantTerms(myCollection, q=\"id:a*\", field=\"test_t\", limit=2, minDocFreq=\"2700\", minTermLength=1, maxDocFreq=\".5\")";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(context);
+ tuples = getTuples(stream);
+
+ assert (tuples.size() == 2);
+
+ assertTrue(tuples.get(0).get("term").equals("m"));
+ assertTrue(tuples.get(0).getLong("background") == 5500);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
+
+ assertTrue(tuples.get(1).get("term").equals("d"));
+ assertTrue(tuples.get(1).getLong("background") == 5600);
+ assertTrue(tuples.get(1).getLong("foreground") == 5000);
+
+ //Execersise the /stream hander
+
+ //Add the shards http parameter for the myCollection
+ StringBuilder buf = new StringBuilder();
+ for (String shardUrl : shardUrls) {
+ if (buf.length() > 0) {
+ buf.append(",");
+ }
+ buf.append(shardUrl);
+ }
+
+ ModifiableSolrParams solrParams = new ModifiableSolrParams();
+ solrParams.add("qt", "/stream");
+ solrParams.add("expr", significantTerms);
+ solrParams.add("myCollection.shards", buf.toString());
+ SolrStream solrStream = new SolrStream(shardUrls.get(0), solrParams);
+ tuples = getTuples(solrStream);
+ assert (tuples.size() == 2);
+
+ assertTrue(tuples.get(0).get("term").equals("m"));
+ assertTrue(tuples.get(0).getLong("background") == 5500);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
+
+ assertTrue(tuples.get(1).get("term").equals("d"));
+ assertTrue(tuples.get(1).getLong("background") == 5600);
+ assertTrue(tuples.get(1).getLong("foreground") == 5000);
+
+ //Add a negative test to prove that it cannot find slices if shards parameter is removed
+
+ try {
+ ModifiableSolrParams solrParamsBad = new ModifiableSolrParams();
+ solrParamsBad.add("qt", "/stream");
+ solrParamsBad.add("expr", significantTerms);
+ solrStream = new SolrStream(shardUrls.get(0), solrParamsBad);
+ tuples = getTuples(solrStream);
+ throw new Exception("Exception should have been thrown above");
+ } catch (IOException e) {
+ assertTrue(e.getMessage().contains("Slices not found for myCollection"));
+ }
+ } finally {
+ cache.close();
+ }
+
+ }
+
@Test
public void testComplementStream() throws Exception {
@@ -4920,12 +4998,16 @@ public class StreamExpressionTest extends SolrCloudTestCase {
}
protected List<Tuple> getTuples(TupleStream tupleStream) throws IOException {
- tupleStream.open();
List<Tuple> tuples = new ArrayList<Tuple>();
- for(Tuple t = tupleStream.read(); !t.EOF; t = tupleStream.read()) {
- tuples.add(t);
+
+ try {
+ tupleStream.open();
+ for (Tuple t = tupleStream.read(); !t.EOF; t = tupleStream.read()) {
+ tuples.add(t);
+ }
+ } finally {
+ tupleStream.close();
}
- tupleStream.close();
return tuples;
}
protected boolean assertOrder(List<Tuple> tuples, int... ids) throws Exception {